lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
boa/src/builtins/math/mod.rs
coolreader18/boa
7dd32a68594585950be3eaa62e480b8fec5ba45a
use crate::{ builtins::{ function::NativeFunctionData, value::{from_value, to_value, ResultValue, Value, ValueData}, }, exec::Interpreter, }; use rand::random; use std::f64; #[cfg(test)] mod tests; pub fn abs(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .abs() })) } pub fn acos(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .acos() })) } pub fn acosh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .acosh() })) } pub fn asin(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .asin() })) } pub fn asinh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .asinh() })) } pub fn atan(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .atan() })) } pub fn atanh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .atanh() })) } pub fn atan2(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .atan2(args.get(1).expect("Could not get argument").to_num()) })) } pub fn cbrt(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .cbrt() })) } pub fn ceil(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .ceil() })) } pub fn cos(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .cos() })) } pub fn cosh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .cosh() })) } pub fn exp(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .exp() })) } pub fn floor(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .floor() })) } pub fn log(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value <= 0.0 { f64::NAN } else { value.log(f64::consts::E) } })) } pub fn log10(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value <= 0.0 { f64::NAN } else { value.log10() } })) } pub fn log2(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value <= 0.0 { f64::NAN } else { value.log2() } })) } pub fn max(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { let mut max = f64::NEG_INFINITY; for arg in args { let num = arg.to_num(); max = max.max(num); } Ok(to_value(max)) } pub fn min(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { let mut max = f64::INFINITY; for arg in args { let num = arg.to_num(); max = max.min(num); } Ok(to_value(max)) } pub fn pow(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.len() >= 2 { let num: f64 = from_value(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); let power: f64 = from_value(args.get(1).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); num.powf(power) } else { f64::NAN })) } pub fn _random(_: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(random::<f64>())) } pub fn round(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .round() })) } pub fn sign(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value == 0.0 || value == -0.0 { value } else { value.signum() } })) } pub fn sin(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .sin() })) } pub fn sinh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .sinh() })) } pub fn sqrt(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .sqrt() })) } pub fn tan(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .tan() })) } pub fn tanh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .tanh() })) } pub fn trunc(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .trunc() })) } pub fn create_constructor(global: &Value) -> Value { let math = ValueData::new_obj(Some(global)); math.set_field_slice("E", to_value(f64::consts::E)); math.set_field_slice("LN2", to_value(f64::consts::LN_2)); math.set_field_slice("LN10", to_value(f64::consts::LN_10)); math.set_field_slice("LOG2E", to_value(f64::consts::LOG2_E)); math.set_field_slice("LOG10E", to_value(f64::consts::LOG10_E)); math.set_field_slice("SQRT1_2", to_value(0.5_f64.sqrt())); math.set_field_slice("SQRT2", to_value(f64::consts::SQRT_2)); math.set_field_slice("PI", to_value(f64::consts::PI)); make_builtin_fn!(abs, named "abs", with length 1, of math); make_builtin_fn!(acos, named "acos", with length 1, of math); make_builtin_fn!(acosh, named "acosh", with length 1, of math); make_builtin_fn!(asin, named "asin", with length 1, of math); make_builtin_fn!(asinh, named "asinh", with length 1, of math); make_builtin_fn!(atan, named "atan", with length 1, of math); make_builtin_fn!(atanh, named "atanh", with length 1, of math); make_builtin_fn!(atan2, named "atan2", with length 2, of math); make_builtin_fn!(cbrt, named "cbrt", with length 1, of math); make_builtin_fn!(ceil, named "ceil", with length 1, of math); make_builtin_fn!(cos, named "cos", with length 1, of math); make_builtin_fn!(cosh, named "cosh", with length 1, of math); make_builtin_fn!(exp, named "exp", with length 1, of math); make_builtin_fn!(floor, named "floor", with length 1, of math); make_builtin_fn!(log, named "log", with length 1, of math); make_builtin_fn!(log10, named "log10", with length 1, of math); make_builtin_fn!(log2, named "log2", with length 1, of math); make_builtin_fn!(max, named "max", with length 2, of math); make_builtin_fn!(min, named "min", with length 2, of math); make_builtin_fn!(pow, named "pow", with length 2, of math); make_builtin_fn!(_random, named "random", of math); make_builtin_fn!(round, named "round", with length 1, of math); make_builtin_fn!(sign, named "sign", with length 1, of math); make_builtin_fn!(sin, named "sin", with length 1, of math); make_builtin_fn!(sinh, named "sinh", with length 1, of math); make_builtin_fn!(sqrt, named "sqrt", with length 1, of math); make_builtin_fn!(tan, named "tan", with length 1, of math); make_builtin_fn!(tanh, named "tanh", with length 1, of math); make_builtin_fn!(trunc, named "trunc", with length 1, of math); math }
use crate::{ builtins::{ function::NativeFunctionData, value::{from_value, to_value, ResultValue, Value, ValueData}, }, exec::Interpreter, }; use rand::random; use std::f64; #[cfg(test)] mod tests; pub fn abs(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .abs() })) } pub fn acos(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .acos() })) } pub fn acosh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .acosh() })) } pub fn asin(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .asin() })) } pub fn asinh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .asinh() })) } pub fn atan(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .atan() })) } pub fn atanh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .atanh() })) } pub fn atan2(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .atan2(args.get(1).expect("Could not get argument").to_num()) })) } pub fn cbrt(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .cbrt() })) } pub fn ceil(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .ceil() })) } pub fn cos(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .cos() })) } pub fn cosh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .cosh() })) } pub fn exp(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .exp() })) } pub fn floor(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .floor() })) } pub fn log(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value <= 0.0 { f64::NAN } else { value.log(f64::consts::E) } })) } pub fn log10(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value <= 0.0 { f64::NAN } else { value.log10() } })) } pub fn log2(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value <= 0.0 { f64::NAN } else { value.log2() } })) } pub fn max(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { let mut max = f64::NEG_INFINITY; for arg in args { let num = arg.to_num(); max = max.max(num); } Ok(to_value(max)) } pub fn min(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { let mut max = f64::INFINITY; for arg in args { let num = arg.to_num(); max = max.min(num); } Ok(to_value(max)) } pub fn pow(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.len() >= 2 { let num: f64 = from_value(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); let power: f64 = from_value(args.get(1).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); num.powf(power) } else { f64::NAN })) } pub fn _random(_: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(random::<f64>())) } pub fn round(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .round() })) } pub fn sign(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { let value = from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64"); if value == 0.0 || value == -0.0 { value } else { value.signum() } })) } pub fn sin(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .sin() })) } pub fn sinh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .sinh() })) } pub fn sqrt(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {
} pub fn tan(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .tan() })) } pub fn tanh(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .tanh() })) } pub fn trunc(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue { Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .trunc() })) } pub fn create_constructor(global: &Value) -> Value { let math = ValueData::new_obj(Some(global)); math.set_field_slice("E", to_value(f64::consts::E)); math.set_field_slice("LN2", to_value(f64::consts::LN_2)); math.set_field_slice("LN10", to_value(f64::consts::LN_10)); math.set_field_slice("LOG2E", to_value(f64::consts::LOG2_E)); math.set_field_slice("LOG10E", to_value(f64::consts::LOG10_E)); math.set_field_slice("SQRT1_2", to_value(0.5_f64.sqrt())); math.set_field_slice("SQRT2", to_value(f64::consts::SQRT_2)); math.set_field_slice("PI", to_value(f64::consts::PI)); make_builtin_fn!(abs, named "abs", with length 1, of math); make_builtin_fn!(acos, named "acos", with length 1, of math); make_builtin_fn!(acosh, named "acosh", with length 1, of math); make_builtin_fn!(asin, named "asin", with length 1, of math); make_builtin_fn!(asinh, named "asinh", with length 1, of math); make_builtin_fn!(atan, named "atan", with length 1, of math); make_builtin_fn!(atanh, named "atanh", with length 1, of math); make_builtin_fn!(atan2, named "atan2", with length 2, of math); make_builtin_fn!(cbrt, named "cbrt", with length 1, of math); make_builtin_fn!(ceil, named "ceil", with length 1, of math); make_builtin_fn!(cos, named "cos", with length 1, of math); make_builtin_fn!(cosh, named "cosh", with length 1, of math); make_builtin_fn!(exp, named "exp", with length 1, of math); make_builtin_fn!(floor, named "floor", with length 1, of math); make_builtin_fn!(log, named "log", with length 1, of math); make_builtin_fn!(log10, named "log10", with length 1, of math); make_builtin_fn!(log2, named "log2", with length 1, of math); make_builtin_fn!(max, named "max", with length 2, of math); make_builtin_fn!(min, named "min", with length 2, of math); make_builtin_fn!(pow, named "pow", with length 2, of math); make_builtin_fn!(_random, named "random", of math); make_builtin_fn!(round, named "round", with length 1, of math); make_builtin_fn!(sign, named "sign", with length 1, of math); make_builtin_fn!(sin, named "sin", with length 1, of math); make_builtin_fn!(sinh, named "sinh", with length 1, of math); make_builtin_fn!(sqrt, named "sqrt", with length 1, of math); make_builtin_fn!(tan, named "tan", with length 1, of math); make_builtin_fn!(tanh, named "tanh", with length 1, of math); make_builtin_fn!(trunc, named "trunc", with length 1, of math); math }
Ok(to_value(if args.is_empty() { f64::NAN } else { from_value::<f64>(args.get(0).expect("Could not get argument").clone()) .expect("Could not convert argument to f64") .sqrt() }))
call_expression
[ { "content": "/// Search for a match between this regex and a specified string\n\npub fn test(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let arg_str = get_argument::<String>(args, 0)?;\n\n let mut last_index =\n\n from_value::<usize>(this.get_field_slice(\"lastIndex\")).map_err(to_value)?;\n\n let result = this.with_internal_state_ref(|regex: &RegExp| {\n\n let result = if let Some(m) = regex.matcher.find_at(arg_str.as_str(), last_index) {\n\n if regex.use_last_index {\n\n last_index = m.end();\n\n }\n\n true\n\n } else {\n\n if regex.use_last_index {\n\n last_index = 0;\n\n }\n\n false\n\n };\n\n Ok(Gc::new(ValueData::Boolean(result)))\n\n });\n\n this.set_field_slice(\"lastIndex\", to_value(last_index));\n\n result\n\n}\n\n\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 14, "score": 399127.51803097327 }, { "content": "/// Get the prototype of an object\n\npub fn get_proto_of(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let obj = args.get(0).expect(\"Cannot get object\");\n\n Ok(obj.get_field_slice(INSTANCE_PROTOTYPE))\n\n}\n\n\n", "file_path": "boa/src/builtins/object/mod.rs", "rank": 17, "score": 380151.08338779304 }, { "content": "/// Call new string [[Call]]\n\n/// https://tc39.es/ecma262/#sec-string-constructor-string-value\n\npub fn call_string(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let arg = match args.get(0) {\n\n Some(v) => v.clone(),\n\n None => Gc::new(ValueData::Undefined),\n\n };\n\n\n\n if arg.is_undefined() {\n\n return Ok(to_value(\"\"));\n\n }\n\n\n\n Ok(to_value(arg.to_string()))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 30, "score": 349691.3695613297 }, { "content": "/// Set the prototype of an object\n\npub fn set_proto_of(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let obj = args.get(0).expect(\"Cannot get object\").clone();\n\n let proto = args.get(1).expect(\"Cannot get object\").clone();\n\n obj.set_internal_slot(INSTANCE_PROTOTYPE, proto);\n\n Ok(obj)\n\n}\n\n\n", "file_path": "boa/src/builtins/object/mod.rs", "rank": 31, "score": 349687.5618183804 }, { "content": "/// Define a property in an object\n\npub fn define_prop(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let obj = args.get(0).expect(\"Cannot get object\");\n\n let prop = from_value::<String>(args.get(1).expect(\"Cannot get object\").clone())\n\n .expect(\"Cannot get object\");\n\n let desc = from_value::<Property>(args.get(2).expect(\"Cannot get object\").clone())\n\n .expect(\"Cannot get object\");\n\n obj.set_prop(prop, desc);\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n\n\n", "file_path": "boa/src/builtins/object/mod.rs", "rank": 32, "score": 349687.5618183804 }, { "content": "/// Return a boolean literal [[Call]]\n\npub fn call_boolean(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n // Get the argument, if any\n\n match args.get(0) {\n\n Some(ref value) => Ok(to_boolean(value)),\n\n None => Ok(to_boolean(&to_value(false))),\n\n }\n\n}\n\n\n", "file_path": "boa/src/builtins/boolean/mod.rs", "rank": 33, "score": 349687.5618183804 }, { "content": "/// Array.prototype.join ( separator )\n\n///\n\n/// The elements of the array are converted to Strings, and these Strings are\n\n/// then concatenated, separated by occurrences of the separator. If no\n\n/// separator is provided, a single comma is used as the separator.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.join>\n\npub fn join(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let separator = if args.is_empty() {\n\n String::from(\",\")\n\n } else {\n\n args.get(0).expect(\"Could not get argument\").to_string()\n\n };\n\n\n\n let mut elem_strs: Vec<String> = Vec::new();\n\n let length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n for n in 0..length {\n\n let elem_str: String = this.get_field_slice(&n.to_string()).to_string();\n\n elem_strs.push(elem_str);\n\n }\n\n\n\n Ok(to_value(elem_strs.join(&separator)))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 34, "score": 345029.2238582563 }, { "content": "/// Array.prototype.indexOf ( searchElement[, fromIndex ] )\n\n///\n\n///\n\n/// indexOf compares searchElement to the elements of the array, in ascending order,\n\n/// using the Strict Equality Comparison algorithm, and if found at one or more indices,\n\n/// returns the smallest such index; otherwise, -1 is returned.\n\n///\n\n/// The optional second argument fromIndex defaults to 0 (i.e. the whole array is searched).\n\n/// If it is greater than or equal to the length of the array, -1 is returned,\n\n/// i.e. the array will not be searched. If it is negative, it is used as the offset\n\n/// from the end of the array to compute fromIndex. If the computed index is less than 0,\n\n/// the whole array will be searched.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.indexof>\n\npub fn index_of(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n // If no arguments, return -1. Not described in spec, but is what chrome does.\n\n if args.is_empty() {\n\n return Ok(to_value(-1));\n\n }\n\n\n\n let search_element = args[0].clone();\n\n let len: i32 = from_value(this.get_field_slice(\"length\"))\n\n .expect(\"Expected array property \\\"length\\\" is not set.\");\n\n\n\n let mut idx = match args.get(1) {\n\n Some(from_idx_ptr) => {\n\n let from_idx = from_value(from_idx_ptr.clone())\n\n .expect(\"Error parsing \\\"Array.prototype.indexOf - fromIndex\\\" argument\");\n\n\n\n if from_idx < 0 {\n\n len + from_idx\n\n } else {\n\n from_idx\n\n }\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 35, "score": 345028.1136015304 }, { "content": "/// Array.prototype.concat(...arguments)\n\n///\n\n/// When the concat method is called with zero or more arguments, it returns an\n\n/// array containing the array elements of the object followed by the array\n\n/// elements of each argument in order.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.concat>\n\npub fn concat(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n // If concat is called with no arguments, it returns the original array\n\n return Ok(this.clone());\n\n }\n\n\n\n // Make a new array (using this object as the prototype basis for the new\n\n // one)\n\n let mut new_values: Vec<Value> = Vec::new();\n\n\n\n let this_length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n for n in 0..this_length {\n\n new_values.push(this.get_field_slice(&n.to_string()));\n\n }\n\n\n\n for concat_array in args {\n\n let concat_length: i32 = from_value(concat_array.get_field_slice(\"length\"))\n\n .expect(\"Could not convert argument to i32\");\n\n for n in 0..concat_length {\n\n new_values.push(concat_array.get_field_slice(&n.to_string()));\n\n }\n\n }\n\n\n\n construct_array(this, &new_values)\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 36, "score": 345027.62457666785 }, { "content": "/// Array.prototype.unshift ( ...items )\n\n///\n\n/// The arguments are prepended to the start of the array, such that their order\n\n/// within the array is the same as the order in which they appear in the\n\n/// argument list.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.unshift/>\n\npub fn unshift(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let len: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n let arg_c: i32 = args.len() as i32;\n\n\n\n if arg_c > 0 {\n\n for k in (1..=len).rev() {\n\n let from = (k.wrapping_sub(1)).to_string();\n\n let to = (k.wrapping_add(arg_c).wrapping_sub(1)).to_string();\n\n\n\n let from_value = this.get_field_slice(&from);\n\n if from_value == Gc::new(ValueData::Undefined) {\n\n this.remove_prop(&to);\n\n } else {\n\n this.set_field_slice(&to, from_value);\n\n }\n\n }\n\n for j in 0..arg_c {\n\n this.set_field_slice(\n\n &j.to_string(),\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 37, "score": 345027.25903808273 }, { "content": "/// Array.prototype.fill ( value[, start[, end]] )\n\n///\n\n/// The method fills (modifies) all the elements of an array from start index (default 0)\n\n/// to an end index (default array length) with a static value. It returns the modified array\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.fill>\n\npub fn fill(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let len: i32 = from_value(this.get_field_slice(\"length\")).expect(\"Could not get argument\");\n\n let default_value = undefined();\n\n let value = args.get(0).unwrap_or(&default_value);\n\n let relative_start = args.get(1).unwrap_or(&default_value).to_num() as i32;\n\n let relative_end_val = args.get(2).unwrap_or(&default_value);\n\n let relative_end = if relative_end_val.is_undefined() {\n\n len\n\n } else {\n\n relative_end_val.to_num() as i32\n\n };\n\n let start = if relative_start < 0 {\n\n max(len + relative_start, 0)\n\n } else {\n\n min(relative_start, len)\n\n };\n\n let fin = if relative_end < 0 {\n\n max(len + relative_end, 0)\n\n } else {\n\n min(relative_end, len)\n\n };\n\n\n\n for i in start..fin {\n\n this.set_field_slice(&i.to_string(), value.clone());\n\n }\n\n\n\n Ok(this.clone())\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 38, "score": 345026.6849161153 }, { "content": "/// Array.prototype.push ( ...items )\n\n///\n\n/// The arguments are appended to the end of the array, in the order in which\n\n/// they appear. The new length of the array is returned as the result of the\n\n/// call.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.push>\n\npub fn push(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let new_array = add_to_array_object(this, args)?;\n\n Ok(new_array.get_field_slice(\"length\"))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 39, "score": 345026.1191020117 }, { "content": "/// Check if it has a property\n\npub fn has_own_prop(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let prop = if args.is_empty() {\n\n None\n\n } else {\n\n from_value::<String>(args.get(0).expect(\"Cannot get object\").clone()).ok()\n\n };\n\n Ok(to_value(\n\n prop.is_some() && this.get_prop(&prop.expect(\"Cannot get object\")).is_some(),\n\n ))\n\n}\n\n\n", "file_path": "boa/src/builtins/object/mod.rs", "rank": 40, "score": 345022.7123238294 }, { "content": "/// Search for a match between this regex and a specified string\n\npub fn exec(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let arg_str = get_argument::<String>(args, 0)?;\n\n let mut last_index =\n\n from_value::<usize>(this.get_field_slice(\"lastIndex\")).map_err(to_value)?;\n\n let result = this.with_internal_state_ref(|regex: &RegExp| {\n\n let mut locations = regex.matcher.capture_locations();\n\n let result = if let Some(m) =\n\n regex\n\n .matcher\n\n .captures_read_at(&mut locations, arg_str.as_str(), last_index)\n\n {\n\n if regex.use_last_index {\n\n last_index = m.end();\n\n }\n\n let mut result = Vec::with_capacity(locations.len());\n\n for i in 0..locations.len() {\n\n if let Some((start, end)) = locations.get(i) {\n\n result.push(to_value(\n\n arg_str.get(start..end).expect(\"Could not get slice\"),\n\n ));\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 41, "score": 345022.7123238295 }, { "content": "pub fn includes_value(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let search_element = args\n\n .get(0)\n\n .cloned()\n\n .unwrap_or_else(|| Gc::new(ValueData::Undefined));\n\n\n\n let length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not get `length` property.\");\n\n\n\n for idx in 0..length {\n\n let check_element = this.get_field_slice(&idx.to_string()).clone();\n\n\n\n if check_element == search_element {\n\n return Ok(to_value(true));\n\n }\n\n }\n\n\n\n Ok(to_value(false))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 42, "score": 342941.13746644545 }, { "content": "/// Array.prototype.lastIndexOf ( searchElement[, fromIndex ] )\n\n///\n\n///\n\n/// lastIndexOf compares searchElement to the elements of the array in descending order\n\n/// using the Strict Equality Comparison algorithm, and if found at one or more indices,\n\n/// returns the largest such index; otherwise, -1 is returned.\n\n///\n\n/// The optional second argument fromIndex defaults to the array's length minus one\n\n/// (i.e. the whole array is searched). If it is greater than or equal to the length of the array,\n\n/// the whole array will be searched. If it is negative, it is used as the offset from the end\n\n/// of the array to compute fromIndex. If the computed index is less than 0, -1 is returned.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.lastindexof>\n\npub fn last_index_of(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n // If no arguments, return -1. Not described in spec, but is what chrome does.\n\n if args.is_empty() {\n\n return Ok(to_value(-1));\n\n }\n\n\n\n let search_element = args[0].clone();\n\n let len: i32 = from_value(this.get_field_slice(\"length\"))\n\n .expect(\"Expected array property \\\"length\\\" is not set.\");\n\n\n\n let mut idx = match args.get(1) {\n\n Some(from_idx_ptr) => {\n\n let from_idx = from_value(from_idx_ptr.clone())\n\n .expect(\"Error parsing \\\"Array.prototype.indexOf - fromIndex\\\" argument\");\n\n\n\n if from_idx >= 0 {\n\n min(from_idx, len - 1)\n\n } else {\n\n len + from_idx\n\n }\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 43, "score": 339330.5324586644 }, { "content": "/// Create a new `RegExp`\n\npub fn make_regexp(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(Gc::new(ValueData::Undefined));\n\n }\n\n let mut regex_body = String::new();\n\n let mut regex_flags = String::new();\n\n #[allow(clippy::indexing_slicing)] // length has been checked\n\n match args[0].deref() {\n\n ValueData::String(ref body) => {\n\n // first argument is a string -> use it as regex pattern\n\n regex_body = body.into();\n\n }\n\n ValueData::Object(ref obj) => {\n\n let slots = &*obj.borrow().internal_slots;\n\n if slots.get(\"RegExpMatcher\").is_some() {\n\n // first argument is another `RegExp` object, so copy its pattern and flags\n\n if let Some(body) = slots.get(\"OriginalSource\") {\n\n regex_body =\n\n from_value(body.clone()).expect(\"Could not convert value to String\");\n\n }\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 44, "score": 339329.9965553715 }, { "content": "/// Create new string [[Construct]]\n\n/// <https://searchfox.org/mozilla-central/source/js/src/vm/StringObject.h#19>\n\n// This gets called when a new String() is created, it's called by exec:346\n\npub fn make_string(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n // If we're constructing a string, we should set the initial length\n\n // To do this we need to convert the string back to a Rust String, then get the .len()\n\n // let a: String = from_value(args.get(0).expect(\"failed to get argument for String method\").clone()).unwrap();\n\n // this.set_field_slice(\"length\", to_value(a.len() as i32));\n\n\n\n // This value is used by console.log and other routines to match Obexpecty\"failed to parse argument for String method\"pe\n\n // to its Javascript Identifier (global constructor method name)\n\n this.set_kind(ObjectKind::String);\n\n this.set_internal_slot(\n\n \"StringData\",\n\n args.get(0)\n\n .expect(\"failed to get StringData for make_string()\")\n\n .clone(),\n\n );\n\n Ok(this.clone())\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 45, "score": 339328.66180887376 }, { "content": "/// Create a new boolean object - [[Construct]]\n\npub fn construct_boolean(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n this.set_kind(ObjectKind::Boolean);\n\n\n\n // Get the argument, if any\n\n if let Some(ref value) = args.get(0) {\n\n this.set_internal_slot(\"BooleanData\", to_boolean(value));\n\n } else {\n\n this.set_internal_slot(\"BooleanData\", to_boolean(&to_value(false)));\n\n }\n\n\n\n // no need to return `this` as its passed by reference\n\n Ok(this.clone())\n\n}\n\n\n", "file_path": "boa/src/builtins/boolean/mod.rs", "rank": 46, "score": 339325.25080774195 }, { "content": "/// https://tc39.es/ecma262/#sec-symbol-description\n\n/// Creates Symbol instances.\n\n///\n\n/// Symbol instances are ordinary objects that inherit properties from the Symbol prototype object.\n\n/// Symbol instances have a [[SymbolData]] internal slot.\n\n/// The [[SymbolData]] internal slot is the Symbol value represented by this Symbol object.\n\npub fn call_symbol(_: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // From an implementation and specificaition perspective Symbols are similar to Objects.\n\n // They have internal slots to hold the SymbolData and Description, they also have methods and a prototype.\n\n // So we start by creating an Object\n\n // TODO: Set prototype to Symbol.prototype (by changing to Object::create(), use interpreter to get Symbol.prototype)\n\n let mut sym_instance = Object::default();\n\n sym_instance.kind = ObjectKind::Symbol;\n\n\n\n // Set description which should either be undefined or a string\n\n let desc_string = match args.get(0) {\n\n Some(value) => to_value(value.to_string()),\n\n None => Gc::new(ValueData::Undefined),\n\n };\n\n\n\n sym_instance.set_internal_slot(\"Description\", desc_string);\n\n sym_instance.set_internal_slot(\"SymbolData\", to_value(random::<i32>()));\n\n\n\n // Set __proto__ internal slot\n\n let proto = ctx\n\n .realm\n\n .global_obj\n\n .get_field_slice(\"Symbol\")\n\n .get_field_slice(PROTOTYPE);\n\n sym_instance.set_internal_slot(INSTANCE_PROTOTYPE, proto);\n\n\n\n Ok(Gc::new(ValueData::Symbol(GcCell::new(sym_instance))))\n\n}\n\n\n", "file_path": "boa/src/builtins/symbol/mod.rs", "rank": 48, "score": 333889.2509257146 }, { "content": "/// https://tc39.es/ecma262/#sec-boolean.prototype.valueof\n\npub fn value_of(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n Ok(this_boolean_value(this))\n\n}\n\n\n", "file_path": "boa/src/builtins/boolean/mod.rs", "rank": 49, "score": 333251.3093564996 }, { "content": "/// Array.prototype.some ( callbackfn [ , thisArg ] )\n\n///\n\n/// The some method tests whether at least one element in the array passes\n\n/// the test implemented by the provided callback function. It returns a Boolean value,\n\n/// true if the callback function returns a truthy value for at least one element\n\n/// in the array. Otherwise, false.\n\n///\n\n/// Caution: Calling this method on an empty array returns false for any condition!\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.some/>\n\npub fn some(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"missing callback when calling function Array.prototype.some\".to_string(),\n\n ));\n\n }\n\n let callback = &args[0];\n\n let this_arg = if args.len() > 1 {\n\n args[1].clone()\n\n } else {\n\n Gc::new(ValueData::Undefined)\n\n };\n\n let mut i = 0;\n\n let max_len: i32 = from_value(this.get_field_slice(\"length\")).unwrap();\n\n let mut len = max_len;\n\n while i < len {\n\n let element = this.get_field_slice(&i.to_string());\n\n let arguments = vec![element.clone(), to_value(i), this.clone()];\n\n let result = interpreter.call(callback, &this_arg, arguments)?.is_true();\n\n if result {\n\n return Ok(to_value(true));\n\n }\n\n // the length of the array must be updated because the callback can mutate it.\n\n len = min(max_len, from_value(this.get_field_slice(\"length\")).unwrap());\n\n i += 1;\n\n }\n\n Ok(to_value(false))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 50, "score": 332831.7717291893 }, { "content": "/// Array.prototype.forEach ( callbackFn [ , thisArg ] )\n\n///\n\n/// This method executes the provided callback function for each element in the array.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.foreach>\n\npub fn for_each(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"Missing argument for Array.prototype.forEach\".to_string(),\n\n ));\n\n }\n\n\n\n let callback_arg = args.get(0).expect(\"Could not get `callbackFn` argument.\");\n\n let this_arg = args.get(1).cloned().unwrap_or_else(undefined);\n\n\n\n let length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not get `length` property.\");\n\n\n\n for i in 0..length {\n\n let element = this.get_field_slice(&i.to_string());\n\n let arguments = vec![element.clone(), to_value(i), this.clone()];\n\n\n\n interpreter.call(callback_arg, &this_arg, arguments)?;\n\n }\n\n\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 51, "score": 332829.2889921045 }, { "content": "/// Print a javascript value to the standard error stream\n\npub fn error(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let args: Vec<String> = FromIterator::from_iter(\n\n args.iter()\n\n .map(|x| from_value::<String>(x.clone()).expect(\"Could not convert value to String\")),\n\n );\n\n eprintln!(\"{}\", args.join(\" \"));\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n\n\n", "file_path": "boa/src/builtins/console.rs", "rank": 52, "score": 332419.1342840878 }, { "content": "/// Print a javascript value to the standard output stream\n\n/// <https://console.spec.whatwg.org/#logger>\n\npub fn log(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n // Welcome to console.log! The output here is what the developer sees, so its best matching through value types and stringifying to the correct output\n\n // The input is a vector of Values, we generate a vector of strings then\n\n // pass them to println!\n\n let args: Vec<String> =\n\n FromIterator::from_iter(args.iter().map(|x| log_string_from(x.deref(), false)));\n\n\n\n println!(\"{}\", args.join(\" \"));\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n", "file_path": "boa/src/builtins/console.rs", "rank": 53, "score": 332418.70336493076 }, { "content": "/// Process a Javascript object into a JSON string\n\npub fn stringify(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let obj = args.get(0).expect(\"cannot get argument for JSON.stringify\");\n\n let json = obj.to_json();\n\n Ok(to_value(to_string_pretty(&json).expect(\"\")))\n\n}\n\n\n", "file_path": "boa/src/builtins/json.rs", "rank": 54, "score": 332414.8320568674 }, { "content": "/// Parse a JSON string into a Javascript object\n\n/// <https://tc39.es/ecma262/#sec-json.parse>\n\npub fn parse(_: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n match serde_json::from_str::<JSONValue>(\n\n &args\n\n .get(0)\n\n .expect(\"cannot get argument for JSON.parse\")\n\n .clone()\n\n .to_string(),\n\n ) {\n\n Ok(json) => Ok(to_value(json)),\n\n Err(err) => Err(to_value(err.to_string())),\n\n }\n\n}\n", "file_path": "boa/src/builtins/json.rs", "rank": 55, "score": 332414.8320568674 }, { "content": "/// Create a new object\n\npub fn make_object(_: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n\n\n", "file_path": "boa/src/builtins/object/mod.rs", "rank": 56, "score": 330446.8901466558 }, { "content": "/// RegExp.prototype[Symbol.match]\n\n/// Returns matches of the regular expression against a string\n\npub fn r#match(this: &Value, arg: String, ctx: &mut Interpreter) -> ResultValue {\n\n let (matcher, flags) =\n\n this.with_internal_state_ref(|regex: &RegExp| (regex.matcher.clone(), regex.flags.clone()));\n\n if flags.contains('g') {\n\n let mut matches = Vec::new();\n\n for mat in matcher.find_iter(&arg) {\n\n matches.push(to_value(mat.as_str()));\n\n }\n\n if matches.is_empty() {\n\n return Ok(Gc::new(ValueData::Null));\n\n }\n\n Ok(to_value(matches))\n\n } else {\n\n exec(this, &[to_value(arg)], ctx)\n\n }\n\n}\n\n\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 57, "score": 330228.67764216213 }, { "content": "/// Get the string value to a primitive string\n\npub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n // Get String from String Object and send it back as a new value\n\n let primitive_val = this.get_internal_slot(\"StringData\");\n\n Ok(to_value(format!(\"{}\", primitive_val)))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 58, "score": 329806.4657461756 }, { "content": "/// <https://tc39.es/ecma262/#sec-symbol.prototype.tostring>\n\npub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let s: Value = this.get_internal_slot(\"Description\");\n\n let full_string = format!(r#\"Symbol({})\"#, s.to_string());\n\n Ok(to_value(full_string))\n\n}\n\n\n", "file_path": "boa/src/builtins/symbol/mod.rs", "rank": 59, "score": 329797.57509478927 }, { "content": "/// Array.prototype.shift ( )\n\n///\n\n/// The first element of the array is removed from the array and returned.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.shift/>\n\npub fn shift(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let len: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n\n\n if len == 0 {\n\n this.set_field_slice(\"length\", to_value(0_i32));\n\n // Since length is 0, this will be an Undefined value\n\n return Ok(this.get_field_slice(&0.to_string()));\n\n }\n\n\n\n let first: Value = this.get_field_slice(&0.to_string());\n\n\n\n for k in 1..len {\n\n let from = k.to_string();\n\n let to = (k.wrapping_sub(1)).to_string();\n\n\n\n let from_value = this.get_field_slice(&from);\n\n if from_value == Gc::new(ValueData::Undefined) {\n\n this.remove_prop(&to);\n\n } else {\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 60, "score": 329797.57509478927 }, { "content": "#[allow(clippy::else_if_without_else)]\n\npub fn reverse(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let len: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n let middle: i32 = len.wrapping_div(2);\n\n\n\n for lower in 0..middle {\n\n let upper = len.wrapping_sub(lower).wrapping_sub(1);\n\n\n\n let upper_exists = this.has_field(&upper.to_string());\n\n let lower_exists = this.has_field(&lower.to_string());\n\n\n\n let upper_value = this.get_field_slice(&upper.to_string());\n\n let lower_value = this.get_field_slice(&lower.to_string());\n\n\n\n if upper_exists && lower_exists {\n\n this.set_field_slice(&upper.to_string(), lower_value);\n\n this.set_field_slice(&lower.to_string(), upper_value);\n\n } else if upper_exists {\n\n this.set_field_slice(&lower.to_string(), upper_value);\n\n this.remove_prop(&upper.to_string());\n\n } else if lower_exists {\n\n this.set_field_slice(&upper.to_string(), lower_value);\n\n this.remove_prop(&lower.to_string());\n\n }\n\n }\n\n\n\n Ok(this.clone())\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 61, "score": 329797.57509478927 }, { "content": "/// Array.prototype.pop ( )\n\n///\n\n/// The last element of the array is removed from the array and returned.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.pop>\n\npub fn pop(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let curr_length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n if curr_length < 1 {\n\n return Ok(Gc::new(ValueData::Undefined));\n\n }\n\n let pop_index = curr_length.wrapping_sub(1);\n\n let pop_value: Value = this.get_field_slice(&pop_index.to_string());\n\n this.remove_prop(&pop_index.to_string());\n\n this.set_field_slice(\"length\", to_value(pop_index));\n\n Ok(pop_value)\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 62, "score": 329797.57509478927 }, { "content": "/// Return a string representing the regular expression\n\npub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let body = from_value::<String>(this.get_internal_slot(\"OriginalSource\")).map_err(to_value)?;\n\n let flags = this.with_internal_state_ref(|regex: &RegExp| regex.flags.clone());\n\n Ok(to_value(format!(\"/{}/{}\", body, flags)))\n\n}\n\n\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 63, "score": 329797.57509478927 }, { "content": "/// To string\n\npub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n Ok(to_value(this.to_string()))\n\n}\n\n\n", "file_path": "boa/src/builtins/object/mod.rs", "rank": 64, "score": 329797.57509478927 }, { "content": "/// https://tc39.es/ecma262/#sec-boolean.prototype.tostring\n\npub fn to_string(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n let b = this_boolean_value(this);\n\n Ok(to_value(b.to_string()))\n\n}\n\n\n", "file_path": "boa/src/builtins/boolean/mod.rs", "rank": 65, "score": 329797.57509478927 }, { "content": "/// Array.isArray ( arg )\n\n///\n\n/// The isArray function takes one argument arg, and returns the Boolean value true\n\n/// if the argument is an object whose class internal property is \"Array\"; otherwise it returns false.\n\n/// <https://tc39.es/ecma262/#sec-array.isarray>\n\n/// ECMA-262 v5, 15.4.3.2\n\npub fn is_array(_this: &Value, args: &[Value], _interpreter: &mut Interpreter) -> ResultValue {\n\n let value_true = Gc::new(ValueData::Boolean(true));\n\n let value_false = Gc::new(ValueData::Boolean(false));\n\n\n\n match args.get(0) {\n\n Some(arg) => {\n\n match *(*arg).clone() {\n\n // 1.\n\n ValueData::Object(ref obj) => {\n\n // 2.\n\n if obj.borrow().kind == ObjectKind::Array {\n\n return Ok(value_true);\n\n }\n\n Ok(value_false)\n\n }\n\n // 3.\n\n _ => Ok(value_false),\n\n }\n\n }\n\n None => Ok(value_false),\n\n }\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 66, "score": 327177.2482267673 }, { "content": "/// Array.prototype.filter ( callback, [ thisArg ] )\n\n///\n\n/// For each element in the array the callback function is called, and a new\n\n/// array is constructed for every value whose callback returned a truthy value\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.filter>\n\npub fn filter(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"missing argument 0 when calling function Array.prototype.filter\",\n\n ));\n\n }\n\n\n\n let callback = args.get(0).cloned().unwrap_or_else(undefined);\n\n let this_val = args.get(1).cloned().unwrap_or_else(undefined);\n\n\n\n let length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not get `length` property.\");\n\n\n\n let new = new_array(&interpreter)?;\n\n\n\n let values = (0..length)\n\n .filter_map(|idx| {\n\n let element = this.get_field_slice(&idx.to_string());\n\n\n\n let args = vec![element.clone(), to_value(idx), new.clone()];\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 67, "score": 327173.29292729753 }, { "content": "/// Array.prototype.find ( callback, [thisArg] )\n\n///\n\n/// The find method executes the callback function once for each index of the array\n\n/// until the callback returns a truthy value. If so, find immediately returns the value\n\n/// of that element. Otherwise, find returns undefined.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.find>\n\npub fn find(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"missing callback when calling function Array.prototype.find\".to_string(),\n\n ));\n\n }\n\n let callback = &args[0];\n\n let this_arg = if args.len() > 1 {\n\n args[1].clone()\n\n } else {\n\n Gc::new(ValueData::Undefined)\n\n };\n\n let len: i32 = from_value(this.get_field_slice(\"length\")).unwrap();\n\n for i in 0..len {\n\n let element = this.get_field_slice(&i.to_string());\n\n let arguments = vec![element.clone(), to_value(i), this.clone()];\n\n let result = interpreter.call(callback, &this_arg, arguments)?;\n\n if result.is_true() {\n\n return Ok(element);\n\n }\n\n }\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 68, "score": 327172.91853492544 }, { "content": "/// Array.prototype.map ( callback, [ thisArg ] )\n\n///\n\n/// For each element in the array the callback function is called, and a new\n\n/// array is constructed from the return values of these calls.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.map>\n\npub fn map(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"missing argument 0 when calling function Array.prototype.map\",\n\n ));\n\n }\n\n\n\n let callback = args.get(0).cloned().unwrap_or_else(undefined);\n\n let this_val = args.get(1).cloned().unwrap_or_else(undefined);\n\n\n\n let length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not get `length` property.\");\n\n\n\n let new = new_array(&interpreter)?;\n\n\n\n let values = (0..length)\n\n .map(|idx| {\n\n let element = this.get_field_slice(&idx.to_string());\n\n\n\n let args = vec![element, to_value(idx), new.clone()];\n\n\n\n interpreter\n\n .call(&callback, &this_val, args)\n\n .unwrap_or_else(|_| undefined())\n\n })\n\n .collect::<Vec<Value>>();\n\n\n\n construct_array(&new, &values)\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 69, "score": 327172.5843995862 }, { "content": "/// Array.prototype.every ( callback, [ thisArg ] )\n\n///\n\n/// The every method executes the provided callback function once for each\n\n/// element present in the array until it finds the one where callback returns\n\n/// a falsy value. It returns `false` if it finds such element, otherwise it\n\n/// returns `true`.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.every/>\n\npub fn every(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"missing callback when calling function Array.prototype.every\".to_string(),\n\n ));\n\n }\n\n let callback = &args[0];\n\n let this_arg = if args.len() > 1 {\n\n args[1].clone()\n\n } else {\n\n Gc::new(ValueData::Undefined)\n\n };\n\n let mut i = 0;\n\n let max_len: i32 = from_value(this.get_field_slice(\"length\")).unwrap();\n\n let mut len = max_len;\n\n while i < len {\n\n let element = this.get_field_slice(&i.to_string());\n\n let arguments = vec![element.clone(), to_value(i), this.clone()];\n\n let result = interpreter.call(callback, &this_arg, arguments)?.is_true();\n\n if !result {\n\n return Ok(to_value(false));\n\n }\n\n len = min(max_len, from_value(this.get_field_slice(\"length\")).unwrap());\n\n i += 1;\n\n }\n\n Ok(to_value(true))\n\n}\n\n\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 70, "score": 327171.70381732605 }, { "content": "/// Array.prototype.slice ( [begin[, end]] )\n\n///\n\n/// The slice method takes two arguments, start and end, and returns an array containing the\n\n/// elements of the array from element start up to, but not including, element end (or through the\n\n/// end of the array if end is undefined). If start is negative, it is treated as length + start\n\n/// where length is the length of the array. If end is negative, it is treated as length + end where\n\n/// length is the length of the array.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.slice>\n\npub fn slice(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n let new_array = new_array(interpreter)?;\n\n let len: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not convert argument to i32\");\n\n\n\n let start = match args.get(0) {\n\n Some(v) => from_value(v.clone()).expect(\"failed to parse argument for Array method\"),\n\n None => 0,\n\n };\n\n let end = match args.get(1) {\n\n Some(v) => from_value(v.clone()).expect(\"failed to parse argument for Array method\"),\n\n None => len,\n\n };\n\n\n\n let from = if start < 0 {\n\n max(len.wrapping_add(start), 0)\n\n } else {\n\n min(start, len)\n\n };\n\n let to = if end < 0 {\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 71, "score": 327168.44981272746 }, { "content": "/// Array.prototype.findIndex ( predicate [ , thisArg ] )\n\n///\n\n/// This method executes the provided predicate function for each element of the array.\n\n/// If the predicate function returns `true` for an element, this method returns the index of the element.\n\n/// If all elements return `false`, the value `-1` is returned.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.findindex/>\n\npub fn find_index(this: &Value, args: &[Value], interpreter: &mut Interpreter) -> ResultValue {\n\n if args.is_empty() {\n\n return Err(to_value(\n\n \"Missing argument for Array.prototype.findIndex\".to_string(),\n\n ));\n\n }\n\n\n\n let predicate_arg = args.get(0).expect(\"Could not get `predicate` argument.\");\n\n\n\n let this_arg = args\n\n .get(1)\n\n .cloned()\n\n .unwrap_or_else(|| Gc::new(ValueData::Undefined));\n\n\n\n let length: i32 =\n\n from_value(this.get_field_slice(\"length\")).expect(\"Could not get `length` property.\");\n\n\n\n for i in 0..length {\n\n let element = this.get_field_slice(&i.to_string());\n\n let arguments = vec![element.clone(), to_value(i), this.clone()];\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 72, "score": 321761.95738197817 }, { "content": "/// Get the string value to a primitive string\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.valueof>\n\npub fn value_of(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // Use the to_string method because it is specified to do the same thing in this case\n\n to_string(this, args, ctx)\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 73, "score": 319436.03432061104 }, { "content": "/// Number().valueOf()\n\n///\n\n/// https://tc39.es/ecma262/#sec-number.prototype.valueof\n\npub fn value_of(this: &Value, _args: &[Value], _ctx: &mut Interpreter) -> ResultValue {\n\n Ok(to_number(this))\n\n}\n\n\n", "file_path": "boa/src/builtins/number/mod.rs", "rank": 74, "score": 319432.31653777923 }, { "content": "/// Create a new error\n\npub fn make_error(this: &Value, args: &[Value], _: &mut Interpreter) -> ResultValue {\n\n if !args.is_empty() {\n\n this.set_field_slice(\n\n \"message\",\n\n to_value(\n\n args.get(0)\n\n .expect(\"failed getting error message\")\n\n .to_string(),\n\n ),\n\n );\n\n }\n\n // This value is used by console.log and other routines to match Object type\n\n // to its Javascript Identifier (global constructor method name)\n\n this.set_kind(ObjectKind::Error);\n\n Ok(Gc::new(ValueData::Undefined))\n\n}\n", "file_path": "boa/src/builtins/error.rs", "rank": 75, "score": 316802.918548142 }, { "content": "/// Return a String which is a subset of the String value resulting from converting this object to a String.\n\n/// The subset of the string is contained between the start index and the end index.\n\n/// When both the start and end arguments are specified, the smaller one represent the index of the code unit\n\n/// from which the returned String will start and the larger one the index of the code unit just after the end.\n\n/// When only the start index is specified, the end index defaults to being the length of the string.\n\n/// When no argument is specified, the returned String is the same as the original\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.substring>\n\npub fn substring(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n // If no args are specified, start is 'undefined', defaults to 0\n\n let start = if args.is_empty() {\n\n 0\n\n } else {\n\n from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\")\n\n };\n\n let length: i32 = primitive_val.chars().count() as i32;\n\n // If less than 2 args specified, end is the length of the this object converted to a String\n\n let end = if args.len() < 2 {\n\n length\n\n } else {\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 76, "score": 315820.36719495157 }, { "content": "/// Return a String which is a subset of the String value resulting from converting this object to a String.\n\n/// The subset of the string starts at the start index and is at most length code units long, depending if the string is shorter.\n\n/// When only the start index is specified, the length become the length of the string.\n\n/// When the start index is negative, the start index become the number of code units from the end of the string.\n\n/// When no argument is specified, the returned String is the same as the original\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.substr>\n\npub fn substr(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n // If no args are specified, start is 'undefined', defaults to 0\n\n let mut start = if args.is_empty() {\n\n 0\n\n } else {\n\n from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\")\n\n };\n\n let length: i32 = primitive_val.chars().count() as i32;\n\n // If less than 2 args specified, end is +infinity, the maximum number value.\n\n // Using i32::max_value() should be safe because the final length used is at most\n\n // the number of code units from start to the end of the string,\n\n // which should always be smaller or equals to both +infinity and i32::max_value\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 77, "score": 315819.6580907756 }, { "content": "/// Returns a single element String containing the code unit at index pos within the String value\n\n/// resulting from converting this object to a String. If there is no element at that index, the\n\n/// result is the empty String. The result is a String value, not a String object.\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.charat>\n\npub fn char_at(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val = ctx.value_to_rust_string(this);\n\n let pos: i32 = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n\n\n // Calling .len() on a string would give the wrong result, as they are bytes not the number of\n\n // unicode code points\n\n // Note that this is an O(N) operation (because UTF-8 is complex) while getting the number of\n\n // bytes is an O(1) operation.\n\n let length = primitive_val.chars().count();\n\n\n\n // We should return an empty string is pos is out of range\n\n if pos >= length as i32 || pos < 0 {\n\n return Ok(to_value::<String>(String::new()));\n\n }\n\n\n\n Ok(to_value::<char>(\n\n primitive_val\n\n .chars()\n\n .nth(pos as usize)\n\n .expect(\"failed to get value\"),\n\n ))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 78, "score": 315819.3311803655 }, { "content": "/// Array.prototype.toString ( separator )\n\n///\n\n/// The toString function is intentionally generic; it does not require that\n\n/// its this value be an Array object. Therefore it can be transferred to\n\n/// other kinds of objects for use as a method.\n\n/// <https://tc39.es/ecma262/#sec-array.prototype.tostring>\n\npub fn to_string(this: &Value, _args: &[Value], _ctx: &mut Interpreter) -> ResultValue {\n\n let method_name = \"join\";\n\n let mut arguments = vec![to_value(\",\")];\n\n // 2.\n\n let mut method: Value =\n\n from_value(this.get_field_slice(method_name)).expect(\"failed to get Array.prototype.join\");\n\n // 3.\n\n if !method.is_function() {\n\n method = _ctx\n\n .realm\n\n .global_obj\n\n .get_field_slice(\"Object\")\n\n .get_field_slice(PROTOTYPE)\n\n .get_field_slice(\"toString\");\n\n\n\n method = from_value(method).expect(\"failed to get Object.prototype.toString\");\n\n arguments = vec![];\n\n }\n\n // 4.\n\n let join_result = _ctx.call(&method, this, arguments);\n", "file_path": "boa/src/builtins/array/mod.rs", "rank": 79, "score": 315818.7513703172 }, { "content": "/// Returns a String that is the result of concatenating this String and all strings provided as\n\n/// arguments\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.concat>\n\npub fn concat(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let mut new_str = ctx.value_to_rust_string(this);\n\n\n\n for arg in args {\n\n let concat_str: String = from_value(arg.clone()).expect(\"failed to get argument value\");\n\n new_str.push_str(&concat_str);\n\n }\n\n\n\n Ok(to_value(new_str))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 80, "score": 315816.2581257281 }, { "content": "/// Returns a String that is the result of repeating this String the number of times given by the\n\n/// first argument\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.repeat>\n\npub fn repeat(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n\n\n let repeat_times: usize = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n Ok(to_value(primitive_val.repeat(repeat_times)))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 81, "score": 315816.1417950635 }, { "content": "/// Returns a Boolean indicating whether searchString appears as a substring of\n\n/// the result of converting this object to a String, at one or more indices\n\n/// that are greater than or equal to position. If position is undefined, 0 is\n\n/// assumed, so as to search all of the String.\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.includes>\n\npub fn includes(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n\n\n // TODO: Should throw TypeError if search_string is regular expression\n\n let search_string: String = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n\n\n let length: i32 = primitive_val.chars().count() as i32;\n\n\n\n // If less than 2 args specified, position is 'undefined', defaults to 0\n\n let position: i32 = if args.len() < 2 {\n\n 0\n\n } else {\n\n from_value(args.get(1).expect(\"Could not get argument\").clone())\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 82, "score": 315815.57956455695 }, { "content": "/// If searchString appears as a substring of the result of converting this\n\n/// object to a String, at one or more indices that are greater than or equal to\n\n/// position, then the smallest such index is returned; otherwise, -1 is\n\n/// returned. If position is undefined, 0 is assumed, so as to search all of the\n\n/// String.\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.includes>\n\npub fn index_of(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n\n\n // TODO: Should throw TypeError if search_string is regular expression\n\n let search_string: String = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n\n\n let length: i32 = primitive_val.chars().count() as i32;\n\n\n\n // If less than 2 args specified, position is 'undefined', defaults to 0\n\n let position: i32 = if args.len() < 2 {\n\n 0\n\n } else {\n\n from_value(args.get(1).expect(\"Could not get argument\").clone())\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 83, "score": 315815.5395824362 }, { "content": "/// Returns a String which contains the slice of the JS String from character at \"start\" index up\n\n/// to but not including character at \"end\" index\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.slice>\n\npub fn slice(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n\n\n let start: i32 = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n let end: i32 = from_value(\n\n args.get(1)\n\n .expect(\"failed to get argument in slice\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument\");\n\n\n\n // Calling .len() on a string would give the wrong result, as they are bytes not the number of unicode code points\n\n // Note that this is an O(N) operation (because UTF-8 is complex) while getting the number of bytes is an O(1) operation.\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 84, "score": 315812.42485407076 }, { "content": "/// Number().toPrecision(p)\n\n///\n\n/// https://tc39.es/ecma262/#sec-number.prototype.toprecision\n\npub fn to_precision(this: &Value, args: &[Value], _ctx: &mut Interpreter) -> ResultValue {\n\n let this_num = to_number(this);\n\n let _num_str_len = format!(\"{}\", this_num.to_num()).len();\n\n let _precision = match args.get(0) {\n\n Some(n) => match n.to_int() {\n\n x if x > 0 => n.to_int() as usize,\n\n _ => 0,\n\n },\n\n None => 0,\n\n };\n\n // TODO: Implement toPrecision\n\n unimplemented!(\"TODO: Implement toPrecision\");\n\n}\n\n\n", "file_path": "boa/src/builtins/number/mod.rs", "rank": 85, "score": 315812.42485407076 }, { "content": "/// Number().toString()\n\n///\n\n/// https://tc39.es/ecma262/#sec-number.prototype.tostring\n\npub fn to_string(this: &Value, _args: &[Value], _ctx: &mut Interpreter) -> ResultValue {\n\n Ok(to_value(format!(\"{}\", to_number(this).to_num())))\n\n}\n\n\n", "file_path": "boa/src/builtins/number/mod.rs", "rank": 86, "score": 315812.42485407076 }, { "content": "/// Number().toExponential()\n\n///\n\n/// https://tc39.es/ecma262/#sec-number.prototype.toexponential\n\npub fn to_exponential(this: &Value, _args: &[Value], _ctx: &mut Interpreter) -> ResultValue {\n\n let this_num = to_number(this).to_num();\n\n let this_str_num = num_to_exponential(this_num);\n\n Ok(to_value(this_str_num))\n\n}\n\n\n", "file_path": "boa/src/builtins/number/mod.rs", "rank": 87, "score": 315812.4248540708 }, { "content": "/// TODO: update this method to return iterator\n\n/// Returns an array* of all results matching a string against a regular expression, including capturing groups\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.matchall>\n\npub fn match_all(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n let re: Value = match args.get(0) {\n\n Some(arg) => {\n\n if arg == &Gc::new(ValueData::Null) {\n\n make_regexp(\n\n &to_value(Object::default()),\n\n &[\n\n to_value(ctx.value_to_rust_string(arg)),\n\n to_value(String::from(\"g\")),\n\n ],\n\n ctx,\n\n )\n\n } else if arg == &Gc::new(ValueData::Undefined) {\n\n make_regexp(\n\n &to_value(Object::default()),\n\n &[Gc::new(ValueData::Undefined), to_value(String::from(\"g\"))],\n\n ctx,\n\n )\n\n } else {\n\n from_value(arg.clone()).map_err(to_value)\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 88, "score": 315812.4248540708 }, { "content": "/// https://tc39.es/ecma262/#sec-number.prototype.tofixed\n\npub fn to_fixed(this: &Value, args: &[Value], _ctx: &mut Interpreter) -> ResultValue {\n\n let this_num = to_number(this).to_num();\n\n let precision = match args.get(0) {\n\n Some(n) => match n.to_int() {\n\n x if x > 0 => n.to_int() as usize,\n\n _ => 0,\n\n },\n\n None => 0,\n\n };\n\n let this_fixed_num = format!(\"{:.*}\", precision, this_num);\n\n Ok(to_value(this_fixed_num))\n\n}\n\n\n", "file_path": "boa/src/builtins/number/mod.rs", "rank": 89, "score": 315812.42485407076 }, { "content": "/// Returns a Boolean indicating whether the sequence of code units of the\n\n/// \"search string\" is the same as the corresponding code units of this string\n\n/// starting at position \"end position\" - length\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.endswith>\n\npub fn ends_with(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n\n\n // TODO: Should throw TypeError if search_string is regular expression\n\n let search_string: String = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n\n\n let length: i32 = primitive_val.chars().count() as i32;\n\n let search_length: i32 = search_string.chars().count() as i32;\n\n\n\n // If less than 2 args specified, end_position is 'undefined', defaults to\n\n // length of this\n\n let end_position: i32 = if args.len() < 2 {\n\n length\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 90, "score": 315812.42485407076 }, { "content": "/// Returns a Boolean indicating whether the sequence of code units of the\n\n/// \"search string\" is the same as the corresponding code units of this string\n\n/// starting at index \"position\"\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.startswith>\n\npub fn starts_with(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n\n\n // TODO: Should throw TypeError if pattern is regular expression\n\n let search_string: String = from_value(\n\n args.get(0)\n\n .expect(\"failed to get argument for String method\")\n\n .clone(),\n\n )\n\n .expect(\"failed to parse argument for String method\");\n\n\n\n let length: i32 = primitive_val.chars().count() as i32;\n\n let search_length: i32 = search_string.chars().count() as i32;\n\n\n\n // If less than 2 args specified, position is 'undefined', defaults to 0\n\n let position: i32 = if args.len() < 2 {\n\n 0\n\n } else {\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 91, "score": 315812.42485407076 }, { "content": "/// <https://tc39.es/ecma262/#sec-string.prototype.replace>\n\npub fn replace(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // TODO: Support Symbol replacer\n\n let primitive_val: String = ctx.value_to_rust_string(this);\n\n if args.is_empty() {\n\n return Ok(to_value(primitive_val));\n\n }\n\n\n\n let regex_body = get_regex_string(args.get(0).expect(\"Value needed\"));\n\n let re = Regex::new(&regex_body).expect(\"unable to convert regex to regex object\");\n\n let mat = re.find(&primitive_val).expect(\"unable to find value\");\n\n let caps = re\n\n .captures(&primitive_val)\n\n .expect(\"unable to get capture groups from text\");\n\n\n\n let replace_value = if args.len() > 1 {\n\n // replace_object could be a string or function or not exist at all\n\n let replace_object: &Value = args.get(1).expect(\"second argument expected\");\n\n match replace_object.deref() {\n\n ValueData::String(val) => {\n\n // https://tc39.es/ecma262/#table-45\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 92, "score": 315812.42485407076 }, { "content": "pub fn trim(this: &Value, _: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n let this_str: String = ctx.value_to_rust_string(this);\n\n Ok(to_value(this_str.trim_matches(is_trimmable_whitespace)))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 93, "score": 314251.294579609 }, { "content": "/// Return a String with every code point mapped to its corresponding lowercase equivalent.\n\n/// With the current implementation the string is always copied even if the resulting String is identical\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.tolowercase>\n\npub fn to_lowercase(this: &Value, _: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let this_str: String = ctx.value_to_rust_string(this);\n\n // The Rust String is mapped to uppercase using the builtin .to_lowercase().\n\n // There might be corner cases where it does not behave exactly like Javascript expects\n\n Ok(to_value(this_str.to_lowercase()))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 94, "score": 314251.294579609 }, { "content": "/// Return a String with every code point mapped to its corresponding uppercase equivalent.\n\n/// With the current implementation the string is always copied even if the resulting String is identical\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.touppercase>\n\npub fn to_uppercase(this: &Value, _: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n // First we get it the actual string a private field stored on the object only the engine has access to.\n\n // Then we convert it into a Rust String by wrapping it in from_value\n\n let this_str: String = ctx.value_to_rust_string(this);\n\n // The Rust String is mapped to uppercase using the builtin .to_uppercase().\n\n // There might be corner cases where it does not behave exactly like Javascript expects\n\n Ok(to_value(this_str.to_uppercase()))\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 95, "score": 314251.294579609 }, { "content": "/// Returns an array whose contents is all the results matching the regular expression, if the global (g) flag is present,\n\n/// in its absence, only the first complete match and its related capturing groups is returned,\n\n/// otherwise null is returned if no match is found.\n\n/// <https://tc39.es/ecma262/#sec-string.prototype.match>\n\npub fn r#match(this: &Value, args: &[Value], ctx: &mut Interpreter) -> ResultValue {\n\n let re = make_regexp(&to_value(Object::default()), &[args[0].clone()], ctx)?;\n\n regexp_match(&re, ctx.value_to_rust_string(this), ctx)\n\n}\n\n\n", "file_path": "boa/src/builtins/string/mod.rs", "rank": 96, "score": 312949.6775640475 }, { "content": "fn get_global(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n this.with_internal_state_ref(|regex: &RegExp| Ok(to_value(regex.global)))\n\n}\n\n\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 97, "score": 310961.60407080577 }, { "content": "fn get_flags(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n this.with_internal_state_ref(|regex: &RegExp| Ok(to_value(regex.flags.clone())))\n\n}\n\n\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 98, "score": 310961.60407080577 }, { "content": "fn get_dot_all(this: &Value, _: &[Value], _: &mut Interpreter) -> ResultValue {\n\n this.with_internal_state_ref(|regex: &RegExp| Ok(to_value(regex.dot_all)))\n\n}\n\n\n", "file_path": "boa/src/builtins/regexp/mod.rs", "rank": 99, "score": 310961.60407080577 } ]
Rust
src/game.rs
ttempleton/rust-battleship
48d45f8c5d8c73ec399e1b6781418cdb5774fdc0
use crate::direction::Direction; use crate::player::Player; use crate::settings::GameSettings; use rand::{seq::SliceRandom, thread_rng, Rng}; pub struct Game { settings: GameSettings, players: [Player; 2], state: GameState, turn: u8, } impl Game { pub fn new(settings: GameSettings) -> Result<Game, &'static str> { let grid_size = [settings.spaces[0], settings.spaces[1]]; let mut players = [ Player::new(grid_size, settings.ships.len(), false), Player::new(grid_size, settings.ships.len(), true), ]; for player in &mut players { if !player.is_cpu() { player.add_ship([0, 0], Direction::West, settings.ships[0], true)?; } else { let mut rng = thread_rng(); let mut i = 0; while i < settings.ships.len() { let pos = [ rng.gen_range(0, grid_size[0]), rng.gen_range(0, grid_size[1]), ]; if player .add_ship(pos, Direction::random(), settings.ships[i], false) .is_ok() { i += 1; } } } } Ok(Game { settings: settings, players: players, state: GameState::Placement, turn: 0, }) } pub fn settings(&self) -> &GameSettings { &self.settings } pub fn active_player(&self) -> &Player { &self.players[self.turn as usize] } pub fn inactive_player(&self) -> &Player { &self.players[self.not_turn()] } pub fn is_state_placement(&self) -> bool { self.state == GameState::Placement } pub fn is_state_active(&self) -> bool { self.state == GameState::Active } pub fn set_state_active(&mut self) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to set game as active from a state other than placement") } else { self.state = GameState::Active; self.turn = 0; Ok(()) } } pub fn is_state_complete(&self) -> bool { self.state == GameState::Complete } pub fn turn(&self) -> usize { self.turn as usize } pub fn not_turn(&self) -> usize { (self.turn + 1) as usize % 2 } pub fn switch_active_player(&mut self) { self.turn = self.not_turn() as u8; } pub fn active_player_placed_all_ships(&self) -> bool { let ships = self.active_player().ships(); ships.len() == self.settings.ships.len() && !ships[ships.len() - 1].is_placement() } pub fn is_player_placing_ship(&self) -> bool { self.state == GameState::Placement && !self.active_player().is_cpu() } pub fn is_player_selecting_space(&self) -> bool { self.state == GameState::Active && !self.active_player().is_cpu() } pub fn get_winner(&self) -> Option<usize> { match self.state { GameState::Complete => Some(self.turn as usize), _ => None, } } pub fn place_ship(&mut self) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to place ship outside of placement game state") } else { let ref mut player = self.players[self.turn as usize]; let ship_count = player.ships().len(); player.place_placement_ship()?; if ship_count < self.settings.ships.len() { player.add_ship( [0, 0], Direction::West, self.settings.ships[ship_count], true, )?; } Ok(()) } } pub fn move_ship(&mut self, direction: Direction) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to move ship outside of placement game state") } else { self.players[self.turn as usize].move_placement_ship(direction)?; Ok(()) } } pub fn rotate_ship(&mut self) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to rotate ship outside of placement game state") } else { self.players[self.turn as usize].rotate_placement_ship()?; Ok(()) } } pub fn set_placement_ship(&mut self, pos: Vec<[u8; 2]>) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to set position of ship outside of placement game state") } else { let ship = self.players[self.turn as usize].placement_ship_mut()?; ship.set_pos(pos)?; Ok(()) } } pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { let ref mut opponent = self.players[self.not_turn()]; opponent.select_space(pos)?; if opponent.sink_ship_if_all_hit(pos) == Ok(true) { self.state = match opponent.all_ships_sunk() { true => GameState::Complete, false => GameState::Active, }; } Ok(()) } pub fn suggested_check(&self) -> [u8; 2] { let mut rng = thread_rng(); let mut positions = self.inactive_player().suggested_checks(); positions.shuffle(&mut rng); positions[0] } pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> { self.players[self.turn as usize].move_grid_cursor(direction) } pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { self.players[self.turn as usize].set_grid_cursor(pos) } } #[derive(Clone, Copy, PartialEq)] pub enum GameState { Placement, Active, Complete, }
use crate::direction::Direction; use crate::player::Player; use crate::settings::GameSettings; use rand::{seq::SliceRandom, thread_rng, Rng}; pub struct Game { settings: GameSettings, players: [Player; 2], state: GameState, turn: u8, } impl Game { pub fn new(settings: GameSettings) -> Result<Game, &'static str> { let grid_size = [settings.spaces[0], settings.spaces[1]]; let mut players = [ Player::new(grid_size, settings.ships.len(), false), Player::new(grid_size, settings.ships.len(), true), ]; for player in &mut players { if !player.is_cpu() { player.add_ship([0, 0], Direction::West, settings.ships[0], true)?; } else { let mut rng = thread_rng(); let mut i = 0; while i < settings.ships.len() { let pos = [ rng.gen_range(0, grid_size[0]), rng.gen_range(0, grid_size[1]), ]; if player .add_ship(pos, Direction::random(), settings.ships[i], false) .is_ok() { i += 1; } } } } Ok(Game {
pub fn settings(&self) -> &GameSettings { &self.settings } pub fn active_player(&self) -> &Player { &self.players[self.turn as usize] } pub fn inactive_player(&self) -> &Player { &self.players[self.not_turn()] } pub fn is_state_placement(&self) -> bool { self.state == GameState::Placement } pub fn is_state_active(&self) -> bool { self.state == GameState::Active } pub fn set_state_active(&mut self) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to set game as active from a state other than placement") } else { self.state = GameState::Active; self.turn = 0; Ok(()) } } pub fn is_state_complete(&self) -> bool { self.state == GameState::Complete } pub fn turn(&self) -> usize { self.turn as usize } pub fn not_turn(&self) -> usize { (self.turn + 1) as usize % 2 } pub fn switch_active_player(&mut self) { self.turn = self.not_turn() as u8; } pub fn active_player_placed_all_ships(&self) -> bool { let ships = self.active_player().ships(); ships.len() == self.settings.ships.len() && !ships[ships.len() - 1].is_placement() } pub fn is_player_placing_ship(&self) -> bool { self.state == GameState::Placement && !self.active_player().is_cpu() } pub fn is_player_selecting_space(&self) -> bool { self.state == GameState::Active && !self.active_player().is_cpu() } pub fn get_winner(&self) -> Option<usize> { match self.state { GameState::Complete => Some(self.turn as usize), _ => None, } } pub fn place_ship(&mut self) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to place ship outside of placement game state") } else { let ref mut player = self.players[self.turn as usize]; let ship_count = player.ships().len(); player.place_placement_ship()?; if ship_count < self.settings.ships.len() { player.add_ship( [0, 0], Direction::West, self.settings.ships[ship_count], true, )?; } Ok(()) } } pub fn move_ship(&mut self, direction: Direction) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to move ship outside of placement game state") } else { self.players[self.turn as usize].move_placement_ship(direction)?; Ok(()) } } pub fn rotate_ship(&mut self) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to rotate ship outside of placement game state") } else { self.players[self.turn as usize].rotate_placement_ship()?; Ok(()) } } pub fn set_placement_ship(&mut self, pos: Vec<[u8; 2]>) -> Result<(), &'static str> { if self.state != GameState::Placement { Err("tried to set position of ship outside of placement game state") } else { let ship = self.players[self.turn as usize].placement_ship_mut()?; ship.set_pos(pos)?; Ok(()) } } pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { let ref mut opponent = self.players[self.not_turn()]; opponent.select_space(pos)?; if opponent.sink_ship_if_all_hit(pos) == Ok(true) { self.state = match opponent.all_ships_sunk() { true => GameState::Complete, false => GameState::Active, }; } Ok(()) } pub fn suggested_check(&self) -> [u8; 2] { let mut rng = thread_rng(); let mut positions = self.inactive_player().suggested_checks(); positions.shuffle(&mut rng); positions[0] } pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> { self.players[self.turn as usize].move_grid_cursor(direction) } pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> { self.players[self.turn as usize].set_grid_cursor(pos) } } #[derive(Clone, Copy, PartialEq)] pub enum GameState { Placement, Active, Complete, }
settings: settings, players: players, state: GameState::Placement, turn: 0, }) }
function_block-function_prefix_line
[ { "content": "fn main() {\n\n let settings = settings::AppSettings { space_size: 20 };\n\n let mut app = app::App::new(&settings);\n\n\n\n app.init();\n\n}\n", "file_path": "src/main.rs", "rank": 0, "score": 28101.960460366587 }, { "content": "pub struct AppSettings {\n\n pub space_size: u32,\n\n}\n\n\n\npub struct GameSettings {\n\n pub spaces: [u8; 2],\n\n pub ships: Vec<u8>,\n\n}\n\n\n\nimpl GameSettings {\n\n pub fn defaults() -> GameSettings {\n\n GameSettings {\n\n spaces: [10, 10],\n\n ships: vec![2, 3, 4, 5],\n\n }\n\n }\n\n}\n", "file_path": "src/settings.rs", "rank": 1, "score": 24415.913305041762 }, { "content": " /// Sets the player's grid cursor position.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if no space exists at `pos`.\n\n pub fn set_grid_cursor(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> {\n\n if self.space_index(pos) < self.spaces.len() {\n\n self.grid_cursor = *pos;\n\n\n\n Ok(())\n\n } else {\n\n Err(\"tried to set the grid cursor to a nonexistent space\")\n\n }\n\n }\n\n\n\n pub fn is_cpu(&self) -> bool {\n\n self.is_cpu\n\n }\n\n\n\n pub fn placement_ship(&self) -> Result<&Ship, &'static str> {\n", "file_path": "src/player.rs", "rank": 16, "score": 22888.7719791738 }, { "content": " /// Returns an error if no ship is at the given position, or if the ship\n\n /// state is not active.\n\n pub fn sink_ship_if_all_hit(&mut self, pos: &[u8; 2]) -> Result<bool, &'static str> {\n\n if let Some(index) = self.ships.iter().position(|s| s.pos().contains(pos)) {\n\n let sunk = self.ships[index]\n\n .pos()\n\n .iter()\n\n .all(|p| self.space(p).is_hit());\n\n\n\n if sunk {\n\n self.ships[index].set_sunk()?;\n\n }\n\n\n\n Ok(sunk)\n\n } else {\n\n Err(\"no ship at the given position\")\n\n }\n\n }\n\n\n\n /// Returns whether all of the player's ships have been sunk.\n", "file_path": "src/player.rs", "rank": 17, "score": 22886.782760884256 }, { "content": " /// Returns the coordinates of the player's grid cursor.\n\n pub fn grid_cursor(&self) -> &[u8; 2] {\n\n &self.grid_cursor\n\n }\n\n\n\n /// Moves the player's grid cursor in the given `direction`.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if moving the grid cursor in `direction` would move it out of bounds.\n\n pub fn move_grid_cursor(&mut self, direction: Direction) -> Result<(), &'static str> {\n\n if let Some(new_cursor) = self.movement(&self.grid_cursor, direction) {\n\n self.set_grid_cursor(&new_cursor)?;\n\n\n\n Ok(())\n\n } else {\n\n Err(\"tried to move grid cursor out of bounds\")\n\n }\n\n }\n\n\n", "file_path": "src/player.rs", "rank": 18, "score": 22885.989590436842 }, { "content": "\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Rotates the player's placement ship during the ship placement game state.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the player does not have a placement ship.\n\n pub fn rotate_placement_ship(&mut self) -> Result<(), &'static str> {\n\n let index = self.ships.len() - 1;\n\n let ship_len = self.ships[index].len() as u8;\n\n let dir = self.ships[index].dir().rotated();\n\n\n\n // If the current ship position would cause the rotation to position the ship partially out\n\n // of bounds, adjust the position such that the ship will be entirely within bounds.\n\n let old_head = self.ships[index].pos()[0];\n\n let new_head = match dir {\n\n Direction::North => [\n", "file_path": "src/player.rs", "rank": 19, "score": 22885.768767541682 }, { "content": "use crate::{direction::Direction, ship::Ship, space::Space};\n\nuse std::cmp;\n\n\n\npub struct Player {\n\n is_cpu: bool,\n\n spaces: Vec<Space>,\n\n ships: Vec<Ship>,\n\n grid_size: [u8; 2],\n\n grid_cursor: [u8; 2],\n\n}\n\n\n\nimpl Player {\n\n pub fn new(grid_size: [u8; 2], ship_count: usize, is_cpu: bool) -> Player {\n\n Player {\n\n is_cpu: is_cpu,\n\n spaces: Space::all_grid_spaces(&grid_size),\n\n ships: Vec::with_capacity(ship_count),\n\n grid_size: grid_size,\n\n grid_cursor: [0, 0],\n\n }\n", "file_path": "src/player.rs", "rank": 20, "score": 22885.296431498824 }, { "content": " let ships_len = self.ships.len();\n\n\n\n if ships_len == 0 {\n\n Err(\"player has no ships\")\n\n } else if !self.ships[ships_len - 1].is_placement() {\n\n Err(\"player has no placement ship\")\n\n } else {\n\n Ok(&self.ships[self.ships.len() - 1])\n\n }\n\n }\n\n\n\n pub fn placement_ship_mut(&mut self) -> Result<&mut Ship, &'static str> {\n\n let ships_len = self.ships.len();\n\n\n\n if ships_len == 0 {\n\n Err(\"player has no ships\")\n\n } else if !self.ships[ships_len - 1].is_placement() {\n\n Err(\"player has no placement ship\")\n\n } else {\n\n Ok(&mut self.ships[ships_len - 1])\n\n }\n\n }\n\n}\n", "file_path": "src/player.rs", "rank": 21, "score": 22884.32169546083 }, { "content": " }\n\n\n\n /// Selects a space.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the space at `pos` was already checked.\n\n pub fn select_space(&mut self, pos: &[u8; 2]) -> Result<(), &'static str> {\n\n let space_index = self.space_index(pos);\n\n let ship_hit = self.ships.iter().position(|s| s.pos().contains(pos));\n\n self.spaces[space_index].set_checked(ship_hit.is_some())?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Sets the ship at the given position as sunk if all spaces it occupies\n\n /// have been checked, and returns whether the ship was sunk.\n\n ///\n\n /// # Errors\n\n ///\n", "file_path": "src/player.rs", "rank": 22, "score": 22884.140580725816 }, { "content": " .get_ship_position(\n\n new_head,\n\n self.ships[index].dir(),\n\n self.ships[index].len() as u8,\n\n )\n\n .ok_or(\"movement not possible without going out of bounds\")?;\n\n\n\n self.ships[index].set_pos(ship_pos)?;\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn place_placement_ship(&mut self) -> Result<(), &'static str> {\n\n let index = self.ships.len() - 1;\n\n\n\n // Ensure the ship doesn't overlap with another ship.\n\n if !self.valid_ship_position(&self.ships[index].pos()) {\n\n Err(\"placement ship overlaps with another ship\")\n\n } else {\n\n self.ships[index].set_active()?;\n", "file_path": "src/player.rs", "rank": 23, "score": 22883.546980289786 }, { "content": " pub fn all_ships_sunk(&self) -> bool {\n\n self.ships.iter().all(|ship| ship.is_sunk())\n\n }\n\n\n\n /// Returns suggestions for the best spaces to check based on the player's hit spaces and ships.\n\n ///\n\n /// This is intended for use in cases where the active player is computer-controlled, to\n\n /// determine the space they check. However, it could also be used to suggest a space that a\n\n /// human player could check.\n\n pub fn suggested_checks(&self) -> Vec<[u8; 2]> {\n\n let mut select = vec![];\n\n let directions = Direction::all();\n\n let hit_spaces = self\n\n .spaces\n\n .iter()\n\n .filter(|s| s.is_hit() && self.ship(s.pos()).unwrap().is_active())\n\n .collect::<Vec<&Space>>();\n\n\n\n // Check for a line of hit spaces.\n\n for space in &hit_spaces {\n", "file_path": "src/player.rs", "rank": 24, "score": 22883.32317727435 }, { "content": " direction: Direction,\n\n length: u8,\n\n placement: bool,\n\n ) -> Result<(), &'static str> {\n\n if self.ships.len() == self.ships.capacity() {\n\n Err(\"tried to add ship to a player with all ships already added\")\n\n } else {\n\n let pos = self\n\n .get_ship_position(head, direction, length)\n\n .ok_or(\"tried to place a ship partially out of bounds\")?;\n\n\n\n if !placement && !self.valid_ship_position(&pos) {\n\n Err(\"tried to place a ship in an invalid position\")\n\n } else {\n\n let mut ship = Ship::new(pos)?;\n\n\n\n if !placement {\n\n ship.set_active()?;\n\n }\n\n\n", "file_path": "src/player.rs", "rank": 25, "score": 22883.236440267443 }, { "content": " self.ships.push(ship);\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n /// Moves the player's placement ship in the given direction.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the player does not have a placement ship, or if the ship could not be\n\n /// moved in `direction` without going out of bounds.\n\n pub fn move_placement_ship(&mut self, direction: Direction) -> Result<(), &'static str> {\n\n let index = self.ships.len() - 1;\n\n let old_head = self.ships[index].pos()[0];\n\n let new_head = self\n\n .movement(&old_head, direction)\n\n .ok_or(\"movement not possible without going out of bounds\")?;\n\n let ship_pos = self\n", "file_path": "src/player.rs", "rank": 26, "score": 22882.303812438495 }, { "content": " }\n\n }\n\n\n\n // If no candidates have been found yet, just add any unchecked space.\n\n // TODO: smarter checking for spaces that remaining ships could realistically occupy.\n\n if select.is_empty() {\n\n select = self\n\n .spaces\n\n .iter()\n\n .filter(|space| space.is_unchecked())\n\n .map(|space| *space.pos())\n\n .collect::<Vec<[u8; 2]>>();\n\n }\n\n\n\n select\n\n }\n\n\n\n pub fn add_ship(\n\n &mut self,\n\n head: [u8; 2],\n", "file_path": "src/player.rs", "rank": 27, "score": 22880.12643000605 }, { "content": " /// `direction` refers to the direction the ship is facing, not the\n\n /// direction in which positions are generated.\n\n /// Returns `None` if the resulting ship position would not be contained\n\n /// within the grid.\n\n pub fn get_ship_position(\n\n &self,\n\n head: [u8; 2],\n\n direction: Direction,\n\n length: u8,\n\n ) -> Option<Vec<[u8; 2]>> {\n\n let valid = match direction {\n\n Direction::North => head[1] + length <= self.grid_size[1],\n\n Direction::East => head[0] >= length - 1,\n\n Direction::South => head[1] >= length - 1,\n\n Direction::West => head[0] + length <= self.grid_size[0],\n\n };\n\n\n\n if valid {\n\n let mut ship = Vec::with_capacity(length as usize);\n\n\n", "file_path": "src/player.rs", "rank": 28, "score": 22879.53570336118 }, { "content": " for pos in 0..length {\n\n let pos_u8 = pos as u8;\n\n\n\n ship.push(match direction {\n\n Direction::North => [head[0], head[1] + pos_u8],\n\n Direction::East => [head[0] - pos_u8, head[1]],\n\n Direction::South => [head[0], head[1] - pos_u8],\n\n Direction::West => [head[0] + pos_u8, head[1]],\n\n });\n\n }\n\n\n\n Some(ship)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Checks that the given ship position is valid.\n\n ///\n\n /// If the player is CPU-controlled, a ship in a space next to another ship\n", "file_path": "src/player.rs", "rank": 29, "score": 22879.004252525214 }, { "content": "\n\n /// Finds the first non-hit, unchecked space in a `direction` from `pos`.\n\n /// Can also make sure the space is at the end of a `line` of hit spaces.\n\n /// Returns `None` if the first non-hit space has been checked or if a grid\n\n /// boundary is reached.\n\n fn find_unchecked_space(\n\n &self,\n\n pos: &[u8; 2],\n\n direction: Direction,\n\n check_for_line: bool,\n\n ) -> Option<[u8; 2]> {\n\n let mut check_pos = self.movement(pos, direction);\n\n\n\n while let Some(next_pos) = check_pos {\n\n let next_space = self.space(&next_pos);\n\n\n\n match next_space.is_hit() {\n\n true => check_pos = self.movement(&next_pos, direction),\n\n false => {\n\n if !next_space.is_unchecked() {\n", "file_path": "src/player.rs", "rank": 30, "score": 22878.992069400996 }, { "content": " /// Gets a reference to the spaces.\n\n pub fn spaces(&self) -> &[Space] {\n\n &self.spaces\n\n }\n\n\n\n /// Returns whether the given position is valid.\n\n fn valid_space(&self, pos: &[u8; 2]) -> bool {\n\n pos[0] < self.grid_size[0] && pos[1] < self.grid_size[1]\n\n }\n\n\n\n /// Gets a reference to the space with the given position.\n\n pub fn space(&self, pos: &[u8; 2]) -> &Space {\n\n self.spaces.get(self.space_index(pos)).unwrap()\n\n }\n\n\n\n /// Calculates the index of the given position in the spaces vector.\n\n fn space_index(&self, pos: &[u8; 2]) -> usize {\n\n self.grid_size[0] as usize * pos[0] as usize + pos[1] as usize\n\n }\n\n\n", "file_path": "src/player.rs", "rank": 31, "score": 22878.871048506095 }, { "content": " pub fn ship_is_in_space(&self, pos: &[u8; 2]) -> bool {\n\n self.ships\n\n .iter()\n\n .any(|s| s.pos().contains(pos) && !s.is_placement())\n\n }\n\n\n\n /// Returns whether there is a ship next to the specified grid coordinates.\n\n fn ship_is_next_to(&self, pos: &[u8; 2]) -> bool {\n\n let &[x, y] = pos;\n\n\n\n // Left\n\n x > 0 && self.ship_is_in_space(&[x - 1, y])\n\n // Right\n\n || x < self.grid_size[0] - 1 && self.ship_is_in_space(&[x + 1, y])\n\n // Above\n\n || y > 0 && self.ship_is_in_space(&[x, y - 1])\n\n // Below\n\n || y < self.grid_size[1] - 1 && self.ship_is_in_space(&[x, y + 1])\n\n }\n\n\n", "file_path": "src/player.rs", "rank": 32, "score": 22878.401416791996 }, { "content": " /// will be considered invalid.\n\n fn valid_ship_position(&self, new_ship: &[[u8; 2]]) -> bool {\n\n new_ship.iter().all(|s| {\n\n self.valid_space(s)\n\n && !self.ship_is_in_space(s)\n\n && !(self.ship_is_next_to(s) && self.is_cpu)\n\n })\n\n }\n\n\n\n /// Gets a reference to the ships.\n\n pub fn ships(&self) -> &[Ship] {\n\n &self.ships\n\n }\n\n\n\n /// Gets a reference to a ship if it is in the given position.\n\n fn ship(&self, pos: &[u8; 2]) -> Option<&Ship> {\n\n self.ships.iter().find(|s| s.pos().contains(pos))\n\n }\n\n\n\n /// Returns whether a ship occupies the specified grid coordinates.\n", "file_path": "src/player.rs", "rank": 33, "score": 22878.02894877796 }, { "content": " /// Returns the coordinates of a movement from `pos` in a `direction`.\n\n /// Returns `None` if the movement is not possible.\n\n fn movement(&self, pos: &[u8; 2], direction: Direction) -> Option<[u8; 2]> {\n\n let valid = match direction {\n\n Direction::North => pos[1] > 0,\n\n Direction::East => pos[0] < self.grid_size[0] - 1,\n\n Direction::South => pos[1] < self.grid_size[1] - 1,\n\n Direction::West => pos[0] > 0,\n\n };\n\n\n\n match valid {\n\n true => Some(match direction {\n\n Direction::North => [pos[0], pos[1] - 1],\n\n Direction::East => [pos[0] + 1, pos[1]],\n\n Direction::South => [pos[0], pos[1] + 1],\n\n Direction::West => [pos[0] - 1, pos[1]],\n\n }),\n\n false => None,\n\n }\n\n }\n", "file_path": "src/player.rs", "rank": 34, "score": 22877.95595089863 }, { "content": " for direction in &directions {\n\n let unchecked = self.find_unchecked_space(space.pos(), *direction, true);\n\n\n\n if let Some(pos) = unchecked {\n\n if !select.contains(&pos) {\n\n select.push(pos);\n\n }\n\n }\n\n }\n\n }\n\n\n\n // If a hit space was found, but no hit spaces next to it, look for unchecked spaces next\n\n // to it.\n\n if hit_spaces.len() > 0 && select.is_empty() {\n\n for direction in &directions {\n\n let unchecked = self.find_unchecked_space(hit_spaces[0].pos(), *direction, false);\n\n\n\n if let Some(pos) = unchecked {\n\n select.push(pos);\n\n }\n", "file_path": "src/player.rs", "rank": 35, "score": 22875.95245563649 }, { "content": " old_head[0],\n\n cmp::min(old_head[1], self.grid_size[1] - ship_len),\n\n ],\n\n Direction::East => [cmp::max(old_head[0], ship_len - 1), old_head[1]],\n\n Direction::South => [old_head[0], cmp::max(old_head[1], ship_len - 1)],\n\n Direction::West => [\n\n cmp::min(old_head[0], self.grid_size[0] - ship_len),\n\n old_head[1],\n\n ],\n\n };\n\n\n\n // get_ship_position() should always return Some(ship_pos) in this situation.\n\n let ship_pos = self.get_ship_position(new_head, dir, ship_len).unwrap();\n\n self.ships[index].set_pos(ship_pos)?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Returns a ship position, given its head position, direction and length.\n\n ///\n", "file_path": "src/player.rs", "rank": 36, "score": 22875.477494518163 }, { "content": " check_pos = None;\n\n }\n\n break;\n\n }\n\n };\n\n }\n\n\n\n if check_for_line && check_pos.is_some() {\n\n let unchecked = check_pos.unwrap();\n\n let opposite_dir = direction.opposite();\n\n let prev_pos = self.movement(&unchecked, opposite_dir).unwrap();\n\n\n\n if &prev_pos == pos {\n\n check_pos = None;\n\n }\n\n }\n\n\n\n check_pos\n\n }\n\n\n", "file_path": "src/player.rs", "rank": 37, "score": 22874.26940562385 }, { "content": "#[derive(PartialEq)]\n\nenum ShipState {\n\n Placement,\n\n Active,\n\n Sunk,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn new() {\n\n let hopefully_ship = Ship::new(vec![[0, 0], [0, 1]]);\n\n assert!(hopefully_ship.is_ok());\n\n }\n\n\n\n #[test]\n\n fn pos() {\n\n let pos = vec![[0, 0], [0, 1]];\n\n let ship = Ship::new(pos.clone()).unwrap();\n", "file_path": "src/ship.rs", "rank": 38, "score": 21308.557464101737 }, { "content": "use crate::direction::Direction;\n\nuse crate::game::Game;\n\nuse crate::settings::{AppSettings, GameSettings};\n\nuse piston_window::*;\n\nuse std::{env::current_exe, path::PathBuf};\n\n\n\npub struct App<'a> {\n\n window: PistonWindow,\n\n settings: &'a AppSettings,\n\n game: Game,\n\n turn_active: bool,\n\n turn_end_timer: f64,\n\n cpu_turn_timer: f64,\n\n mouse_cursor: [f64; 2],\n\n grid_area: [u32; 4],\n\n}\n\n\n\nimpl<'a> App<'a> {\n\n pub fn new(settings: &AppSettings) -> App {\n\n let game_settings = GameSettings::defaults();\n", "file_path": "src/app.rs", "rank": 39, "score": 15.369757017497239 }, { "content": "use crate::direction::Direction;\n\n\n\npub struct Ship {\n\n state: ShipState,\n\n position: Vec<[u8; 2]>,\n\n dir: Direction,\n\n}\n\n\n\nimpl Ship {\n\n /// Creates a new `Ship` with the given position.\n\n pub fn new(pos: Vec<[u8; 2]>) -> Result<Ship, &'static str> {\n\n let dir = Direction::from_positions(&pos[1], &pos[0])?;\n\n\n\n Ok(Ship {\n\n state: ShipState::Placement,\n\n position: pos,\n\n dir: dir,\n\n })\n\n }\n\n\n", "file_path": "src/ship.rs", "rank": 40, "score": 14.536096078649429 }, { "content": " /// Returns the ship's position.\n\n pub fn pos(&self) -> &[[u8; 2]] {\n\n &self.position\n\n }\n\n\n\n /// Sets the ship's position.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if `pos` does not form a vertical or horizontal line.\n\n pub fn set_pos(&mut self, pos: Vec<[u8; 2]>) -> Result<(), &'static str> {\n\n if pos.is_empty() {\n\n Err(\"tried to set an empty position to a ship\")\n\n } else if pos.len() == 1 {\n\n self.position = pos;\n\n\n\n Ok(())\n\n } else {\n\n let mut valid = true;\n\n let dir = Direction::from_positions(&pos[1], &pos[0])?;\n", "file_path": "src/ship.rs", "rank": 41, "score": 14.104466163052866 }, { "content": "\n\n /// Returns whether the ship is in the placement state.\n\n pub fn is_placement(&self) -> bool {\n\n self.state == ShipState::Placement\n\n }\n\n\n\n /// Returns whether the ship is active.\n\n pub fn is_active(&self) -> bool {\n\n self.state == ShipState::Active\n\n }\n\n\n\n /// Sets the ship as active.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the ship's state is not `ShipState::Placement`.\n\n pub fn set_active(&mut self) -> Result<(), &'static str> {\n\n if self.state != ShipState::Placement {\n\n Err(\"tried to set a ship as active that wasn't in placement state\")\n\n } else {\n", "file_path": "src/ship.rs", "rank": 42, "score": 13.024369120883863 }, { "content": " window: window,\n\n settings: &settings,\n\n game: Game::new(game_settings).unwrap(),\n\n turn_active: true,\n\n turn_end_timer: 0.0,\n\n cpu_turn_timer: 0.0,\n\n mouse_cursor: [0.0; 2],\n\n grid_area: grid_area,\n\n }\n\n }\n\n\n\n pub fn init(&mut self) {\n\n self.window.set_ups(60);\n\n self.window.set_max_fps(60);\n\n\n\n // TODO set textures in a not terrible way\n\n let assets_dir = Self::get_assets_dir(current_exe().unwrap()).unwrap();\n\n let images_dir: PathBuf = assets_dir.join(\"images\");\n\n let mut space_textures = vec![];\n\n\n", "file_path": "src/app.rs", "rank": 43, "score": 12.810439392153533 }, { "content": " 22.0,\n\n ),\n\n g,\n\n );\n\n }\n\n });\n\n }\n\n }\n\n }\n\n\n\n fn update(&mut self, u: &UpdateArgs) {\n\n if self.game.is_state_placement() && self.game.active_player_placed_all_ships() {\n\n self.game.switch_active_player();\n\n\n\n if self.game.active_player_placed_all_ships() {\n\n // All ships have been placed; start the game.\n\n // This will also set player 1 as active so no need to switch active player.\n\n self.game\n\n .set_state_active()\n\n .expect(\"failed to start the game\");\n", "file_path": "src/app.rs", "rank": 44, "score": 12.709882881493012 }, { "content": " }\n\n } else {\n\n if !self.turn_active {\n\n // Continue/end the end-of-turn delay.\n\n if self.turn_end_timer < 1.5 {\n\n self.turn_end_timer += u.dt;\n\n } else if self.game.is_state_active() {\n\n self.game.switch_active_player();\n\n self.turn_end_timer = 0.0;\n\n self.turn_active = true;\n\n }\n\n }\n\n\n\n // Continue/end the delay when CPU players take their turn.\n\n if self.turn_active && self.game.active_player().is_cpu() {\n\n self.cpu_turn_timer += u.dt;\n\n\n\n if self.cpu_turn_timer >= 1.0 {\n\n let cpu_space = self.game.suggested_check();\n\n self.game\n", "file_path": "src/app.rs", "rank": 45, "score": 12.48185910298897 }, { "content": " self.state = ShipState::Active;\n\n\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Returns whether the ship has sunk.\n\n pub fn is_sunk(&self) -> bool {\n\n self.state == ShipState::Sunk\n\n }\n\n\n\n /// Sets the ship as having been sunk.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the ship's state is not `ShipState::Active`.\n\n pub fn set_sunk(&mut self) -> Result<(), &'static str> {\n\n if self.state != ShipState::Active {\n\n Err(\"tried to sink a ship that was not active\")\n\n } else {\n\n self.state = ShipState::Sunk;\n\n\n\n Ok(())\n\n }\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n", "file_path": "src/ship.rs", "rank": 46, "score": 12.443127197771492 }, { "content": " ///\n\n /// # Errors\n\n ///\n\n /// Returns an error if the space's state is not `SpaceState::Unchecked`.\n\n pub fn set_checked(&mut self, hit: bool) -> Result<(), &'static str> {\n\n if self.state != SpaceState::Unchecked {\n\n Err(\"tried to check an already checked space\")\n\n } else {\n\n self.state = SpaceState::Checked(hit);\n\n Ok(())\n\n }\n\n }\n\n\n\n pub fn is_unchecked(&self) -> bool {\n\n self.state == SpaceState::Unchecked\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.state == SpaceState::Checked(false)\n\n }\n", "file_path": "src/space.rs", "rank": 47, "score": 12.404220951594057 }, { "content": " if let Some(c) = e.mouse_cursor_args() {\n\n self.mouse_cursor_movement(&c);\n\n }\n\n\n\n if let Some(u) = e.update_args() {\n\n self.update(&u);\n\n }\n\n\n\n if e.render_args().is_some() {\n\n let current_player = self.game.active_player();\n\n let game_state_placement = self.game.is_state_placement();\n\n let game_state_active = self.game.is_state_active();\n\n let game_state_complete = self.game.is_state_complete();\n\n let shown_player = match game_state_placement {\n\n true => current_player,\n\n false => self.game.inactive_player(),\n\n };\n\n\n\n let space_size_u32 = self.settings.space_size as u32;\n\n let grid_area = self.grid_area;\n", "file_path": "src/app.rs", "rank": 48, "score": 12.373353964223305 }, { "content": "\n\n pub fn is_hit(&self) -> bool {\n\n self.state == SpaceState::Checked(true)\n\n }\n\n\n\n pub fn pos(&self) -> &[u8; 2] {\n\n &self.position\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, PartialEq)]\n\npub enum SpaceState {\n\n Unchecked,\n\n Checked(bool),\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/space.rs", "rank": 49, "score": 12.071577325210006 }, { "content": "pub struct Space {\n\n state: SpaceState,\n\n position: [u8; 2],\n\n}\n\n\n\nimpl Space {\n\n pub fn new(pos: [u8; 2]) -> Space {\n\n Space {\n\n state: SpaceState::Unchecked,\n\n position: pos,\n\n }\n\n }\n\n\n\n pub fn all_grid_spaces(grid_size: &[u8; 2]) -> Vec<Space> {\n\n (0..grid_size[0])\n\n .flat_map(|col| (0..grid_size[1]).map(move |row| Space::new([col, row])))\n\n .collect()\n\n }\n\n\n\n /// Sets this space as having been checked, and whether it was hit.\n", "file_path": "src/space.rs", "rank": 50, "score": 11.359017897076722 }, { "content": " .select_space(&cpu_space)\n\n .expect(\"CPU player tried to select a checked space\");\n\n self.cpu_turn_timer = 0.0;\n\n self.turn_active = false;\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn primary_action(&mut self, grid_pos: &[u8; 2]) {\n\n if self.game.is_player_placing_ship() && self.game.place_ship().is_err() {\n\n // TODO: more specific error checking.\n\n // For now, just assume it's the overlap error and ignore it.\n\n }\n\n\n\n if self.game.is_player_selecting_space() && self.game.select_space(grid_pos).is_ok() {\n\n self.turn_active = false;\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 51, "score": 10.979613299509136 }, { "content": " /// Performs grid movement according to the current program state.\n\n fn movement(&mut self, direction: Direction) {\n\n if self.game.is_player_placing_ship() && self.game.move_ship(direction).is_err() {\n\n // TODO: more specific error checking.\n\n }\n\n\n\n if self.game.is_player_selecting_space()\n\n && self.turn_active\n\n && self.game.move_grid_cursor(direction).is_err()\n\n {\n\n // TODO: might be good to have some visual effect.\n\n }\n\n }\n\n\n\n /// Records the last known mouse cursor position.\n\n fn mouse_cursor_movement(&mut self, c: &[f64; 2]) {\n\n self.mouse_cursor = *c;\n\n\n\n if let Some(grid_pos) = self.mouse_cursor_grid_position() {\n\n if self.game.is_state_placement() {\n", "file_path": "src/app.rs", "rank": 52, "score": 10.392593961756864 }, { "content": "\n\n // During the game, show the player's grid cursor.\n\n if game_state_active && turn_end_timer == 0.0 && !current_player.is_cpu() {\n\n let grid_cursor = current_player.grid_cursor();\n\n let transform = c.transform.trans(\n\n (space_size_u32 * grid_cursor[0] as u32 + grid_area[0]) as f64,\n\n (space_size_u32 * grid_cursor[1] as u32 + grid_area[1]) as f64,\n\n );\n\n image(&grid_cursor_texture, transform, g);\n\n }\n\n\n\n // Current player text image\n\n if game_winner.is_none() {\n\n let turn = game_turn;\n\n let player_text_size = player_text[turn].get_size();\n\n let transform = c\n\n .transform\n\n .trans((window_size.width - player_text_size.0 as f64) / 2.0, 2.0);\n\n image(&player_text[turn], transform, g);\n\n }\n", "file_path": "src/app.rs", "rank": 53, "score": 9.669334537798841 }, { "content": " let player = self.game.active_player();\n\n let ship_dir = player\n\n .placement_ship()\n\n .expect(\"failed to get player's placement ship\")\n\n .dir();\n\n // Subtract 1 from the ship count to not consider the placement ship itself.\n\n let ship_count = player.ships().len() - 1;\n\n let ship_len = self.game.settings().ships[ship_count];\n\n\n\n if let Some(ship) = player.get_ship_position(grid_pos, ship_dir, ship_len) {\n\n // `set_pos()` will return an error if the position was invalid.\n\n self.game\n\n .set_placement_ship(ship)\n\n .expect(\"tried to set placement ship to invalid position\");\n\n }\n\n } else if self.game.is_state_active()\n\n && !self.game.active_player().is_cpu()\n\n && self.game.set_grid_cursor(&grid_pos).is_err()\n\n {\n\n // TODO: might be good to have some visual effect.\n", "file_path": "src/app.rs", "rank": 54, "score": 9.580934079759565 }, { "content": "\n\n // During turn transitions / game over, cover the window with\n\n // a black rectangle of increasing opacity.\n\n if !turn_active && turn_end_timer >= 0.75 {\n\n let alpha = match game_state_complete {\n\n true => (turn_end_timer as f32 - 0.75) / 1.125,\n\n false => (turn_end_timer as f32 - 0.75) / 0.75,\n\n };\n\n rectangle(\n\n [0.0, 0.0, 0.0, alpha],\n\n [0.0, 0.0, window_size.width, window_size.height],\n\n c.transform,\n\n g,\n\n );\n\n }\n\n\n\n // Game over content, to appear over the black rectangle.\n\n if turn_end_timer >= 1.5 && game_winner.is_some() {\n\n let winner = game_winner.unwrap();\n\n let game_over_text_size = game_over_text[0].get_size();\n", "file_path": "src/app.rs", "rank": 55, "score": 9.411174534205454 }, { "content": " /// Processes primary button presses according to the current program state.\n\n fn button_primary(&mut self) {\n\n let grid_pos = self.game.active_player().grid_cursor().clone();\n\n self.primary_action(&grid_pos);\n\n }\n\n\n\n /// Processes secondary button presses according to the current program state.\n\n fn button_secondary(&mut self) {\n\n if self.game.is_player_placing_ship() {\n\n self.game.rotate_ship().expect(\"failed to rotate ship\");\n\n }\n\n }\n\n\n\n /// Processes left mouse clicks according to the current program state.\n\n fn mouse_left_click(&mut self) {\n\n if let Some(grid_pos) = self.mouse_cursor_grid_position() {\n\n self.primary_action(&grid_pos);\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 56, "score": 9.066790970582616 }, { "content": "use rand::{thread_rng, Rng};\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum Direction {\n\n North,\n\n East,\n\n South,\n\n West,\n\n}\n\n\n\nimpl Direction {\n\n pub fn opposite(&self) -> Direction {\n\n match *self {\n\n Direction::North => Direction::South,\n\n Direction::East => Direction::West,\n\n Direction::South => Direction::North,\n\n Direction::West => Direction::East,\n\n }\n\n }\n\n\n", "file_path": "src/direction.rs", "rank": 57, "score": 8.84585510669602 }, { "content": " #[test]\n\n fn set_checked() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(space.set_checked(false).is_err());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(true).is_ok());\n\n assert!(space.set_checked(true).is_err());\n\n }\n\n\n\n #[test]\n\n fn is_unchecked() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(space.is_unchecked());\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(!space.is_unchecked());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(true).is_ok());\n", "file_path": "src/space.rs", "rank": 58, "score": 7.681324079263499 }, { "content": " let window_size = self.window.size();\n\n let turn_end_timer = self.turn_end_timer;\n\n let game_winner = self.game.get_winner();\n\n let game_turn = self.game.turn();\n\n let turn_active = self.turn_active;\n\n\n\n self.window.draw_2d(&e, |c, g, _| {\n\n clear([0.6, 0.6, 1.0, 1.0], g);\n\n\n\n // Ship icons above grid\n\n for (i, ship) in shown_player.ships().iter().enumerate() {\n\n if ship.is_active() {\n\n let transform = c.transform.trans(\n\n (space_size_u32 * 2 * i as u32 + grid_area[0] * 2) as f64,\n\n 30.0 as f64,\n\n );\n\n image(&ship_textures[i], transform, g);\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 59, "score": 7.658559625427386 }, { "content": " assert!(!space.is_unchecked());\n\n }\n\n\n\n #[test]\n\n fn is_empty() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(!space.is_empty());\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(space.is_empty());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(true).is_ok());\n\n assert!(!space.is_empty());\n\n }\n\n\n\n #[test]\n\n fn is_hit() {\n\n let mut space = Space::new([0, 0]);\n\n assert!(!space.is_hit());\n\n assert!(space.set_checked(true).is_ok());\n", "file_path": "src/space.rs", "rank": 60, "score": 7.1636860011883865 }, { "content": "mod app;\n\nmod direction;\n\nmod game;\n\nmod player;\n\nmod settings;\n\nmod ship;\n\nmod space;\n\n\n", "file_path": "src/main.rs", "rank": 61, "score": 7.098989544926931 }, { "content": " let grid_area = [\n\n settings.space_size,\n\n settings.space_size * 3,\n\n game_settings.spaces[0] as u32 * settings.space_size,\n\n game_settings.spaces[1] as u32 * settings.space_size,\n\n ];\n\n\n\n let window_size = [\n\n grid_area[2] + settings.space_size * 2,\n\n grid_area[3] + settings.space_size * 4,\n\n ];\n\n\n\n let window_title = \"Battleship\";\n\n let window: PistonWindow = WindowSettings::new(window_title, window_size)\n\n .exit_on_esc(true)\n\n .resizable(false)\n\n .build()\n\n .unwrap();\n\n\n\n App {\n", "file_path": "src/app.rs", "rank": 62, "score": 7.007628171391382 }, { "content": " && self.mouse_cursor[1] >= self.grid_area[1] as f64\n\n && self.mouse_cursor[0] < (self.grid_area[0] + self.grid_area[2]) as f64\n\n && self.mouse_cursor[1] < (self.grid_area[1] + self.grid_area[3]) as f64\n\n }\n\n\n\n /// Returns the texture from the file at the given path.\n\n fn get_texture(&mut self, path: PathBuf) -> G2dTexture {\n\n Texture::from_path(\n\n &mut self.window.create_texture_context(),\n\n path,\n\n Flip::None,\n\n &TextureSettings::new(),\n\n )\n\n .unwrap()\n\n }\n\n\n\n /// Returns the assets directory, if it could be found.\n\n fn get_assets_dir(mut dir: PathBuf) -> Result<PathBuf, &'static str> {\n\n let mut result = None;\n\n\n", "file_path": "src/app.rs", "rank": 63, "score": 6.700079071679067 }, { "content": " 0 => Direction::North,\n\n 1 => Direction::East,\n\n 2 => Direction::South,\n\n 3 => Direction::West,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n /// Returns the direction travelled from `pos1` to `pos2` if the positions\n\n /// represent travel in exactly north, south, east or west direction, or\n\n /// returns an error.\n\n pub fn from_positions(pos1: &[u8; 2], pos2: &[u8; 2]) -> Result<Direction, &'static str> {\n\n let x_diff = pos1[0] as i16 - pos2[0] as i16;\n\n let y_diff = pos1[1] as i16 - pos2[1] as i16;\n\n\n\n if x_diff == 0 && y_diff > 0 {\n\n Ok(Direction::North)\n\n } else if x_diff == 0 && y_diff < 0 {\n\n Ok(Direction::South)\n\n } else if x_diff > 0 && y_diff == 0 {\n", "file_path": "src/direction.rs", "rank": 64, "score": 6.642941894419533 }, { "content": "rust-battleship\n\n===============\n\n\n\nA simple Battleship game written in Rust, using the Piston game engine.\n\n\n\nIt is currently a one-player vs CPU opponent game.\n\n\n\nKeyboard controls:\n\n\n\n| Key | Ship Placement | Game |\n\n| ------ | -------------- | ---------------- |\n\n| Arrows | Move ship | Move grid cursor |\n\n| Enter | Place ship | Select space |\n\n| Space | Rotate ship | n/a |\n\n\n\nMouse controls:\n\n\n\n| Button | Ship Placement | Game |\n\n| ------ | -------------- | ------------ |\n\n| Left | Place ship | Select space |\n\n| Right | Rotate ship | n/a |\n\n\n", "file_path": "README.md", "rank": 65, "score": 6.181904473946382 }, { "content": " // Grid spaces\n\n for space in shown_player.spaces() {\n\n let space_pos = space.pos();\n\n let transform = c.transform.trans(\n\n (space_size_u32 * space_pos[0] as u32 + grid_area[0]) as f64,\n\n (space_size_u32 * space_pos[1] as u32 + grid_area[1]) as f64,\n\n );\n\n\n\n // Only show ship locations during ship placement or if the\n\n // current player is computer-controlled.\n\n if shown_player.ship_is_in_space(space_pos)\n\n && (game_state_placement\n\n || (space.is_unchecked() && current_player.is_cpu()))\n\n {\n\n image(&space_textures[3], transform, g);\n\n } else {\n\n let space_state = if space.is_unchecked() {\n\n 0\n\n } else if space.is_empty() {\n\n 1\n", "file_path": "src/app.rs", "rank": 66, "score": 6.056033860964641 }, { "content": " let wins_text_size = game_over_text[1].get_size();\n\n let player_text_size = player_text[winner].get_size();\n\n image(\n\n &game_over_text[0],\n\n c.transform\n\n .trans((window_size.width - game_over_text_size.0 as f64) / 2.0, 2.0),\n\n g,\n\n );\n\n image(\n\n &player_text[winner],\n\n c.transform.trans(\n\n (window_size.width - (player_text_size.0 + wins_text_size.0 + 2) as f64) / 2.0,\n\n 22.0,\n\n ),\n\n g,\n\n );\n\n image(\n\n &game_over_text[1],\n\n c.transform.trans(\n\n (window_size.width + (player_text_size.0 - wins_text_size.0 + 2) as f64) / 2.0,\n", "file_path": "src/app.rs", "rank": 67, "score": 5.647092750415187 }, { "content": " /// Processes left button presses according to the current program state.\n\n fn button_left(&mut self) {\n\n self.movement(Direction::West);\n\n }\n\n\n\n /// Processes right button presses according to the current program state.\n\n fn button_right(&mut self) {\n\n self.movement(Direction::East);\n\n }\n\n\n\n /// Processes up button presses according to the current program state.\n\n fn button_up(&mut self) {\n\n self.movement(Direction::North);\n\n }\n\n\n\n /// Processes down button presses according to the current program state.\n\n fn button_down(&mut self) {\n\n self.movement(Direction::South);\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 68, "score": 5.5613021999379475 }, { "content": " } else {\n\n 2\n\n };\n\n image(&space_textures[space_state], transform, g);\n\n }\n\n }\n\n\n\n // During ship placement, show the temporary position of the\n\n // next ship to be placed.\n\n if game_state_placement {\n\n if let Ok(ship) = shown_player.placement_ship() {\n\n for pos in ship.pos() {\n\n let transform = c.transform.trans(\n\n (space_size_u32 * pos[0] as u32 + grid_area[0]) as f64,\n\n (space_size_u32 * pos[1] as u32 + grid_area[1]) as f64,\n\n );\n\n image(&space_textures[3], transform, g);\n\n }\n\n }\n\n }\n", "file_path": "src/app.rs", "rank": 69, "score": 5.389513607303277 }, { "content": " for state in 0..3 {\n\n let image_file = format!(\"gridspace-{}.png\", state);\n\n space_textures.push(self.get_texture(images_dir.join(&image_file)));\n\n }\n\n\n\n space_textures.push(self.get_texture(images_dir.join(\"shipspace.png\")));\n\n\n\n let grid_cursor_texture = self.get_texture(images_dir.join(\"grid-cursor.png\"));\n\n\n\n let mut ship_textures = vec![];\n\n for ship_size in 2..6 {\n\n let image_file = format!(\"ship-{}.png\", ship_size);\n\n ship_textures.push(self.get_texture(images_dir.join(&image_file)));\n\n }\n\n\n\n let player_text = [\n\n self.get_texture(images_dir.join(\"player-1.png\")),\n\n self.get_texture(images_dir.join(\"player-2.png\")),\n\n ];\n\n\n", "file_path": "src/app.rs", "rank": 70, "score": 5.29341392607556 }, { "content": " assert!(ship.is_active());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(!ship.is_active());\n\n }\n\n\n\n #[test]\n\n fn set_active() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.set_active().is_ok());\n\n assert!(ship.set_active().is_err());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(ship.set_active().is_err());\n\n }\n\n\n\n #[test]\n\n fn is_sunk() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(!ship.is_sunk());\n\n assert!(ship.set_active().is_ok());\n\n assert!(!ship.is_sunk());\n", "file_path": "src/ship.rs", "rank": 71, "score": 5.080001374418035 }, { "content": " assert!(ship.set_sunk().is_ok());\n\n assert!(ship.is_sunk());\n\n }\n\n\n\n #[test]\n\n fn set_sunk() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.set_sunk().is_err());\n\n assert!(ship.set_active().is_ok());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(ship.set_sunk().is_err());\n\n }\n\n}\n", "file_path": "src/ship.rs", "rank": 72, "score": 4.901277606399667 }, { "content": " pub fn rotated(&self) -> Direction {\n\n match *self {\n\n Direction::North => Direction::East,\n\n Direction::East => Direction::South,\n\n Direction::South => Direction::West,\n\n Direction::West => Direction::North,\n\n }\n\n }\n\n\n\n pub fn all() -> [Direction; 4] {\n\n [\n\n Direction::North,\n\n Direction::East,\n\n Direction::South,\n\n Direction::West,\n\n ]\n\n }\n\n\n\n pub fn random() -> Direction {\n\n match thread_rng().gen_range(0, 4) {\n", "file_path": "src/direction.rs", "rank": 73, "score": 4.652503830411979 }, { "content": " let pos = vec![[0, 0], [0, 1]];\n\n let ship = Ship::new(pos.clone()).unwrap();\n\n assert_eq!(ship.len(), pos.len());\n\n }\n\n\n\n #[test]\n\n fn is_placement() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.is_placement());\n\n assert!(ship.set_active().is_ok());\n\n assert!(!ship.is_placement());\n\n assert!(ship.set_sunk().is_ok());\n\n assert!(!ship.is_placement());\n\n }\n\n\n\n #[test]\n\n fn is_active() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(!ship.is_active());\n\n assert!(ship.set_active().is_ok());\n", "file_path": "src/ship.rs", "rank": 74, "score": 4.47883500433732 }, { "content": " assert_eq!(ship.pos(), pos.as_slice());\n\n }\n\n\n\n #[test]\n\n fn set_pos() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert!(ship.set_pos(vec![[1, 0], [0, 0]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::East);\n\n\n\n assert!(ship.set_pos(vec![[0, 1], [0, 0]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::South);\n\n\n\n assert!(ship.set_pos(vec![[0, 0], [1, 0]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::West);\n\n\n\n assert!(ship.set_pos(vec![[0, 0], [0, 1]]).is_ok());\n\n assert_eq!(ship.dir(), Direction::North);\n\n\n\n assert!(ship.set_pos(vec![[0, 0], [0, 0]]).is_err());\n\n assert!(ship.set_pos(vec![[0, 0], [0, 2]]).is_err());\n", "file_path": "src/ship.rs", "rank": 75, "score": 4.128189314006262 }, { "content": " assert!(ship.set_pos(vec![]).is_err());\n\n }\n\n\n\n #[test]\n\n fn dir() {\n\n let mut ship = Ship::new(vec![[0, 0], [0, 1]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::North);\n\n\n\n ship = Ship::new(vec![[0, 1], [0, 0]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::South);\n\n\n\n ship = Ship::new(vec![[0, 0], [1, 0]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::West);\n\n\n\n ship = Ship::new(vec![[1, 0], [0, 0]]).unwrap();\n\n assert_eq!(ship.dir(), Direction::East);\n\n }\n\n\n\n #[test]\n\n fn len() {\n", "file_path": "src/ship.rs", "rank": 76, "score": 2.8530196708358972 }, { "content": " }\n\n }\n\n }\n\n\n\n /// Returns the grid coordinates of the mouse cursor position.\n\n fn mouse_cursor_grid_position(&self) -> Option<[u8; 2]> {\n\n if self.mouse_over_grid() {\n\n let grid_area_f64 = [self.grid_area[0] as f64, self.grid_area[1] as f64];\n\n\n\n Some([\n\n ((self.mouse_cursor[0] - grid_area_f64[0]) / grid_area_f64[0]) as u8,\n\n ((self.mouse_cursor[1] - grid_area_f64[1]) / grid_area_f64[0]) as u8,\n\n ])\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn mouse_over_grid(&self) -> bool {\n\n self.mouse_cursor[0] >= self.grid_area[0] as f64\n", "file_path": "src/app.rs", "rank": 77, "score": 2.6495321720397076 }, { "content": " } else {\n\n let dir = Direction::from_positions(&pos[1], &pos[0])?;\n\n\n\n self.position = pos;\n\n self.dir = dir;\n\n\n\n Ok(())\n\n }\n\n }\n\n }\n\n\n\n /// Returns the ship's direction.\n\n pub fn dir(&self) -> Direction {\n\n self.dir\n\n }\n\n\n\n /// Returns the ship's length.\n\n pub fn len(&self) -> usize {\n\n self.position.len()\n\n }\n", "file_path": "src/ship.rs", "rank": 78, "score": 2.614421294040567 }, { "content": "\n\n for i in 0..pos.len() - 1 {\n\n let x_diff = (pos[i][0] as i16 - pos[i + 1][0] as i16).abs() as u8;\n\n let y_diff = (pos[i][1] as i16 - pos[i + 1][1] as i16).abs() as u8;\n\n\n\n if x_diff + y_diff != 1 {\n\n valid = false;\n\n break;\n\n }\n\n\n\n let next_dir = Direction::from_positions(&pos[i + 1], &pos[i])?;\n\n\n\n if next_dir != dir {\n\n valid = false;\n\n break;\n\n }\n\n }\n\n\n\n if !valid {\n\n Err(\"ship position does not form a continuous line\")\n", "file_path": "src/ship.rs", "rank": 79, "score": 2.434981232399571 }, { "content": " assert!(space.is_hit());\n\n\n\n space = Space::new([0, 0]);\n\n assert!(space.set_checked(false).is_ok());\n\n assert!(!space.is_hit());\n\n }\n\n\n\n #[test]\n\n fn pos() {\n\n let space = Space::new([0, 0]);\n\n assert_eq!(space.pos(), &[0, 0]);\n\n }\n\n}\n", "file_path": "src/space.rs", "rank": 80, "score": 2.0223792606465754 }, { "content": " let game_over_text = [\n\n self.get_texture(images_dir.join(\"game-over.png\")),\n\n self.get_texture(images_dir.join(\"wins.png\")),\n\n ];\n\n\n\n while let Some(e) = self.window.next() {\n\n if let Some(p) = e.press_args() {\n\n match p {\n\n Button::Mouse(mouse::MouseButton::Left) => self.mouse_left_click(),\n\n Button::Mouse(mouse::MouseButton::Right) => self.button_secondary(),\n\n Button::Keyboard(keyboard::Key::Left) => self.button_left(),\n\n Button::Keyboard(keyboard::Key::Right) => self.button_right(),\n\n Button::Keyboard(keyboard::Key::Up) => self.button_up(),\n\n Button::Keyboard(keyboard::Key::Down) => self.button_down(),\n\n Button::Keyboard(keyboard::Key::Return) => self.button_primary(),\n\n Button::Keyboard(keyboard::Key::Space) => self.button_secondary(),\n\n _ => {}\n\n }\n\n }\n\n\n", "file_path": "src/app.rs", "rank": 81, "score": 1.9027902906553167 }, { "content": " Ok(Direction::West)\n\n } else if x_diff < 0 && y_diff == 0 {\n\n Ok(Direction::East)\n\n } else {\n\n Err(\"positions do not represent a supported direction\")\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn opposite() {\n\n assert_eq!(Direction::North.opposite(), Direction::South);\n\n assert_eq!(Direction::South.opposite(), Direction::North);\n\n assert_eq!(Direction::East.opposite(), Direction::West);\n\n assert_eq!(Direction::West.opposite(), Direction::East);\n\n }\n", "file_path": "src/direction.rs", "rank": 82, "score": 1.7151535254045567 } ]
Rust
src/kmain.rs
hobo0xcc/citron
46fb564f0f7e360fa4e210f5963b121eb8f98594
use crate::arch::riscv64::virtio::gpu_device; use crate::arch::riscv64::virtio::keyboard_device; use crate::arch::riscv64::virtio::mouse_device; use crate::arch::target::interrupt; use crate::arch::target::virtio::virtio_input::*; use crate::graphics::layer_manager; use crate::graphics::*; use crate::process::*; use crate::*; pub unsafe extern "C" fn kproc() { let pm = process_manager(); let mouse = mouse_device(); let keyboard = keyboard_device(); let gpu = gpu_device(); mouse.lock().init_input_event(); keyboard.lock().init_input_event(); gpu.lock().init_display(); graphics::init(); let lm = layer_manager(); loop { let queue = &mut mouse.lock().event_queue; if queue.is_empty() { pm.event_wait(pm.running, ProcessEvent::MouseEvent) .expect("process"); continue; } let mut event = queue.pop_front(); while let Some(ev) = event { match EventType::from(ev.type_) { EventType::EV_REL => { if ev.code == EV_REL::REL_X as u16 { lm.move_rel(MOUSE_LAYER_ID, ev.value as i32, 0); } else if ev.code == EV_REL::REL_Y as u16 { lm.move_rel(MOUSE_LAYER_ID, 0, ev.value as i32); } } EventType::EV_KEY => { if ev.code == EV_KEY::BTN_LEFT as u16 && ev.value == 1 { let x = lm.get_layer_x(MOUSE_LAYER_ID); let y = lm.get_layer_y(MOUSE_LAYER_ID); lm.on_event(ObjectEvent::MouseLeftPress(x, y), MOUSE_LAYER_ID); } else if ev.code == EV_KEY::BTN_LEFT as u16 && ev.value == 0 { let x = lm.get_layer_x(MOUSE_LAYER_ID); let y = lm.get_layer_y(MOUSE_LAYER_ID); lm.on_event(ObjectEvent::MouseLeftRelease(x, y), MOUSE_LAYER_ID); } } EventType::EV_SYN => { lm.update(MOUSE_LAYER_ID); } _ => {} } event = queue.pop_front(); } } } pub unsafe extern "C" fn fs_proc() { fs::fat::init(); fs::init(); let pm = process_manager(); let pid = pm.create_process("user", 1, true).expect("process"); pm.load_program(pid, "/bin/main").expect("process"); pm.ready(pid).expect("process"); let running = pm.running; get_process_mut!(pm.ptable_lock_mut(), running) .expect("process") .state = State::Free; pm.schedule().expect("process"); loop { pm.schedule().expect("process"); } } #[no_mangle] pub extern "C" fn kmain() { let mut hart_id: usize; unsafe { asm!("mv {}, tp", out(reg)hart_id); } if hart_id != 0 { loop {} } unsafe { init::init_all(); } println!("Initialization done"); println!("Hello, citron!"); let pm = unsafe { process::process_manager() }; interrupt::timer_interrupt_off(); interrupt::interrupt_on(); pm.defer_schedule(DeferCommand::Start).expect("process"); let pid = pm .create_kernel_process("fs", 1, fs_proc as usize) .expect("process"); pm.ready(pid).expect("process"); let pid = pm .create_kernel_process("kproc", 2, kproc as usize) .expect("process"); pm.ready(pid).expect("process"); pm.defer_schedule(DeferCommand::Stop).expect("process"); interrupt::timer_interrupt_on(); loop { pm.schedule().expect("process"); } }
use crate::arch::riscv64::virtio::gpu_device; use crate::arch::riscv64::virtio::keyboard_device; use crate::arch::riscv64::virtio::mouse_device; use crate::arch::target::interrupt; use crate::arch::target::virtio::virtio_input::*; use crate::graphics::layer_manager; use crate::graphics::*; use crate::process::*; use crate::*; pub unsafe extern "C" fn kproc() { let pm = process_manager(); let mouse = mouse_device(); let keyboard = keyboard_device(); let gpu = gpu_device(); mouse.lock().init_input_event(); keyboard.lock().init_input_event(); gpu.lock().init_display(); graphics::init(); let lm = layer_manager(); loop { let queue = &mut mouse.lock().event_queue; if queue.is_empty() { pm.event_wait(pm.running, ProcessEvent::MouseEvent) .expect("process"); continue; } let mut event = queue.pop_front(); while let Some(ev) = event { match EventType::from(ev.type_) { EventType::EV_REL => { if ev.code == EV_REL::REL_X as u16 { lm.move_rel(MOUSE_LAYER_ID, ev.value as i32, 0); } else if ev.code == EV_REL::REL_Y as u16 { lm.move_rel(MOUSE_LAYER_ID, 0, ev.value as i32); } } EventType::EV_KEY => {
} EventType::EV_SYN => { lm.update(MOUSE_LAYER_ID); } _ => {} } event = queue.pop_front(); } } } pub unsafe extern "C" fn fs_proc() { fs::fat::init(); fs::init(); let pm = process_manager(); let pid = pm.create_process("user", 1, true).expect("process"); pm.load_program(pid, "/bin/main").expect("process"); pm.ready(pid).expect("process"); let running = pm.running; get_process_mut!(pm.ptable_lock_mut(), running) .expect("process") .state = State::Free; pm.schedule().expect("process"); loop { pm.schedule().expect("process"); } } #[no_mangle] pub extern "C" fn kmain() { let mut hart_id: usize; unsafe { asm!("mv {}, tp", out(reg)hart_id); } if hart_id != 0 { loop {} } unsafe { init::init_all(); } println!("Initialization done"); println!("Hello, citron!"); let pm = unsafe { process::process_manager() }; interrupt::timer_interrupt_off(); interrupt::interrupt_on(); pm.defer_schedule(DeferCommand::Start).expect("process"); let pid = pm .create_kernel_process("fs", 1, fs_proc as usize) .expect("process"); pm.ready(pid).expect("process"); let pid = pm .create_kernel_process("kproc", 2, kproc as usize) .expect("process"); pm.ready(pid).expect("process"); pm.defer_schedule(DeferCommand::Stop).expect("process"); interrupt::timer_interrupt_on(); loop { pm.schedule().expect("process"); } }
if ev.code == EV_KEY::BTN_LEFT as u16 && ev.value == 1 { let x = lm.get_layer_x(MOUSE_LAYER_ID); let y = lm.get_layer_y(MOUSE_LAYER_ID); lm.on_event(ObjectEvent::MouseLeftPress(x, y), MOUSE_LAYER_ID); } else if ev.code == EV_KEY::BTN_LEFT as u16 && ev.value == 0 { let x = lm.get_layer_x(MOUSE_LAYER_ID); let y = lm.get_layer_y(MOUSE_LAYER_ID); lm.on_event(ObjectEvent::MouseLeftRelease(x, y), MOUSE_LAYER_ID); }
if_condition
[ { "content": "pub fn unmap(root: &mut Table) {\n\n let page_layout = Layout::from_size_align(0x1000, 0x1000).unwrap();\n\n #[cfg(target_pointer_width = \"32\")]\n\n for lv2 in 0..Table::len() {\n\n let ref entry_lv2 = root.entries[lv2];\n\n if entry_lv2.is_valid() && entry_lv2.is_branch() {\n\n let memaddr_lv1 = (entry_lv2.get_entry() & !0x3ff) << 2;\n\n let table_lv1 = unsafe { (memaddr_lv1 as *mut Table).as_mut().unwrap() };\n\n for lv1 in 0..Table::len() {\n\n let ref entry_lv1 = table_lv1.entries[lv1];\n\n if entry_lv1.is_valid() && entry_lv1.is_branch() {\n\n let memaddr_lv0 = (entry_lv1.get_entry() & !0x3ff) << 2;\n\n unsafe {\n\n dealloc(memaddr_lv0 as *mut u8, page_layout);\n\n }\n\n }\n\n }\n\n unsafe {\n\n dealloc(memaddr_lv1 as *mut u8, page_layout);\n\n }\n", "file_path": "src/arch/riscv64/paging.rs", "rank": 0, "score": 149223.28375026296 }, { "content": "pub fn init(base: usize) -> VirtioGpu {\n\n let mut gpu = VirtioGpu::new(base);\n\n gpu.init();\n\n gpu\n\n}\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 1, "score": 131094.8513543954 }, { "content": "pub fn init() {\n\n let pm = ProcessManager::new();\n\n unsafe {\n\n PM = Some(pm);\n\n let pm = process_manager();\n\n pm.init().expect(\"process\");\n\n }\n\n}\n", "file_path": "src/process.rs", "rank": 2, "score": 117221.93529712412 }, { "content": "pub fn init() {\n\n let dev = unsafe { fat::fat32() };\n\n let fs = FileSystem::new(dev.get_mut());\n\n unsafe {\n\n FS = MaybeUninit::new(Mutex::new(fs));\n\n }\n\n}\n", "file_path": "src/fs.rs", "rank": 3, "score": 117221.93529712412 }, { "content": "pub fn init() {\n\n let arena = ObjectArena::new();\n\n unsafe {\n\n OBJECT_ARENA = Some(arena);\n\n }\n\n\n\n let arena = unsafe { object_arena() };\n\n\n\n let display = unsafe { gpu_device() };\n\n let width = display.lock().width;\n\n let height = display.lock().height;\n\n let mut lm = LayerManager::new(display);\n\n let wm = WindowManager::new();\n\n let mouse_transparent_color = 0xff00ff00;\n\n let mouse = Mouse::new(mouse_transparent_color);\n\n let mouse_id = arena.alloc(Box::new(mouse));\n\n let desktop = Desktop::new(0xffffffff, width, height);\n\n let desktop_id = arena.alloc(Box::new(desktop));\n\n unsafe {\n\n MOUSE_LAYER_ID = lm.create_layer(mouse_id, 0, 0, 16, 23);\n", "file_path": "src/graphics.rs", "rank": 4, "score": 117221.93529712412 }, { "content": "pub fn load_exe(path: &str, page_table: &mut Table) -> Result<ExecutableInfo, Error> {\n\n let fs = unsafe { file_system() };\n\n let fd = fs.lock().open_file(path)?;\n\n let size = fs.lock().get_file_size(fd)?;\n\n\n\n let layout = Layout::from_size_align(size, 0x1000).unwrap();\n\n let bin_data = unsafe { alloc_zeroed(layout) };\n\n let bin_slice = unsafe { slice::from_raw_parts_mut(bin_data, size) };\n\n fs.lock().read(fd, bin_slice)?;\n\n\n\n let obj = Object::parse(bin_slice).unwrap();\n\n let elf = match obj {\n\n Object::Elf(elf) => elf,\n\n _ => return Err(Error::Msg(format!(\"{} is not an elf file\", path))),\n\n };\n\n\n\n let mut segment_buffers = Vec::new();\n\n\n\n for ph in elf.program_headers.iter() {\n\n let vm_range = ph.vm_range();\n", "file_path": "src/arch/riscv64/loader.rs", "rank": 5, "score": 117094.97504136909 }, { "content": "pub fn id_map_range(root: &mut Table, start: usize, end: usize, bits: usize) {\n\n let mut memaddr = start & !(PAGE_SIZE - 1);\n\n let num_kb_pages = (align_val(end, 12) - memaddr) / PAGE_SIZE;\n\n for _ in 0..num_kb_pages {\n\n map(root, memaddr, memaddr, bits, 0);\n\n memaddr += PAGE_SIZE;\n\n }\n\n}\n\n\n\npub extern \"C\" fn init() {\n\n // println!();\n\n // println!(\"SECTION _text_start\\t: {:#010x}\", _text_start as usize);\n\n // println!(\"SECTION _text_end\\t: {:#010x}\", _text_end as usize);\n\n // println!(\"SECTION _rodata_start\\t: {:#010x}\", _rodata_start as usize);\n\n // println!(\"SECTION _rodata_end\\t: {:#010x}\", _rodata_end as usize);\n\n // println!(\"SECTION _data_start\\t: {:#010x}\", _data_start as usize);\n\n // println!(\"SECTION _data_end\\t: {:#010x}\", _data_end as usize);\n\n // println!(\"SECTION _bss_start\\t: {:#010x}\", _bss_start as usize);\n\n // println!(\"SECTION _bss_end\\t: {:#010x}\", _bss_end as usize);\n\n // println!(\"SECTION _stack_start\\t: {:#010x}\", _stack_start as usize);\n", "file_path": "src/arch/riscv64/paging.rs", "rank": 6, "score": 115089.44320216321 }, { "content": "pub fn init() {\n\n #[cfg(target_arch = \"riscv64\")]\n\n let dev = unsafe { block_device() };\n\n let mut fat32 = Fat32::<VirtioBlk>::new(dev.get_mut());\n\n unsafe {\n\n fat32.init();\n\n FAT32_FS = MaybeUninit::new(Mutex::new(fat32));\n\n }\n\n}\n", "file_path": "src/fs/fat.rs", "rank": 7, "score": 114787.59419585037 }, { "content": "pub fn interrupt_off() {\n\n let mut sstatus = Csr::Sstatus.read();\n\n sstatus &= !(1 << 1); // unset SSTATUS.SIE\n\n Csr::Sstatus.write(sstatus);\n\n}\n\n\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 8, "score": 112510.00695606622 }, { "content": "#[allow(unaligned_references)]\n\npub fn init() {\n\n let virtio_base = layout::_virtio_start as usize;\n\n // let virtio_end = layout::_virtio_end as usize;\n\n\n\n for i in 0..4 {\n\n let offset = i * 0x1000;\n\n let ptr = virtio_base + offset;\n\n if read_reg32(ptr, VirtioReg::MagicValue.val()) != 0x74726976 {\n\n continue;\n\n }\n\n\n\n match read_reg32(ptr, VirtioReg::DeviceId.val()) {\n\n 2 => {\n\n let blk = virtio_blk::init(ptr);\n\n println!(\"virtio_blk: {:#018x}\", ptr);\n\n unsafe {\n\n BLOCK_DEVICE = MaybeUninit::new(Mutex::new(blk));\n\n }\n\n }\n\n 16 => {\n", "file_path": "src/arch/riscv64/virtio.rs", "rank": 9, "score": 112510.00695606622 }, { "content": "pub fn init() {\n\n let ptr = layout::_uart0_start as *mut u8;\n\n unsafe {\n\n // First, set the word length, which\n\n // are bits 0 and 1 of the line control register (LCR)\n\n // which is at base_address + 3\n\n // We can easily write the value 3 here or 0b11, but I'm\n\n // extending it so that it is clear we're setting two individual\n\n // fields\n\n // Word 0 Word 1\n\n // ~~~~~~ ~~~~~~\n\n ptr.add(3).write_volatile((1 << 0) | (1 << 1));\n\n\n\n // Now, enable the FIFO, which is bit index 0 of the FIFO\n\n // control register (FCR at offset 2).\n\n // Again, we can just write 1 here, but when we use left shift,\n\n // it's easier to see that we're trying to write bit index #0.\n\n ptr.add(2).write_volatile(1 << 0);\n\n\n\n // Enable receiver buffer interrupts, which is at bit index\n", "file_path": "src/arch/riscv64/uart.rs", "rank": 10, "score": 112510.00695606622 }, { "content": "pub fn init_all() {\n\n serial::init();\n\n}", "file_path": "src/arch/aarch64/init.rs", "rank": 11, "score": 112510.00695606622 }, { "content": "pub fn init() {\n\n uart::init();\n\n}\n", "file_path": "src/arch/riscv64/serial.rs", "rank": 12, "score": 112510.00695606622 }, { "content": "pub fn init() {\n\n let mut dev = uart::Uart::new();\n\n dev.init();\n\n}\n", "file_path": "src/arch/aarch64/serial.rs", "rank": 13, "score": 112510.00695606622 }, { "content": "pub fn interrupt_on() {\n\n let mut sstatus = Csr::Sstatus.read();\n\n sstatus |= 1 << 1; // set SSTATUS.SIE\n\n Csr::Sstatus.write(sstatus);\n\n}\n\n\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 14, "score": 112510.00695606622 }, { "content": "pub fn init_all() {\n\n serial::init();\n\n plic::init();\n\n paging::init();\n\n virtio::init();\n\n}\n", "file_path": "src/arch/riscv64/init.rs", "rank": 15, "score": 112510.00695606622 }, { "content": "pub fn map(root: &mut Table, vaddr: usize, paddr: usize, bits: usize, level: usize) {\n\n assert!(bits & 0xe != 0);\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n let vpn = [(vaddr >> 12) & 0x3ff, (vaddr >> 22) & 0x3ff];\n\n #[cfg(target_pointer_width = \"64\")]\n\n let vpn = [\n\n (vaddr >> 12) & 0x1ff,\n\n (vaddr >> 21) & 0x1ff,\n\n (vaddr >> 30) & 0x1ff,\n\n ];\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n let ppn = [(paddr >> 12) & 0x3ff, (paddr >> 22) & 0xfff];\n\n #[cfg(target_pointer_width = \"64\")]\n\n let ppn = [\n\n (paddr >> 12) & 0x1ff,\n\n (paddr >> 21) & 0x1ff,\n\n (paddr >> 30) & 0x3ffffff,\n\n ];\n", "file_path": "src/arch/riscv64/paging.rs", "rank": 16, "score": 111616.474624961 }, { "content": "pub fn timer_interrupt_on() {\n\n // enable supervisor software interrupt\n\n let mut sie_val = Csr::Sie.read();\n\n sie_val |= 1 << 1;\n\n Csr::Sie.write(sie_val);\n\n}\n\n\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 17, "score": 110374.50513540438 }, { "content": "pub fn timer_interrupt_off() {\n\n // disable supervisor software interrupt\n\n let mut sie_val = Csr::Sie.read();\n\n sie_val &= !(1 << 1);\n\n Csr::Sie.write(sie_val);\n\n}\n\n\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 18, "score": 110374.50513540438 }, { "content": "pub fn map_range(root: &mut Table, vaddr: usize, paddr: usize, size: usize, bits: usize) {\n\n let mut memaddr = paddr & !(PAGE_SIZE - 1);\n\n let mut memaddr_v = vaddr & !(PAGE_SIZE - 1);\n\n let num_kb_pages = (align_val(memaddr + size, 12) - memaddr) / PAGE_SIZE;\n\n for _ in 0..num_kb_pages {\n\n map(root, memaddr_v, memaddr, bits, 0);\n\n memaddr += PAGE_SIZE;\n\n memaddr_v += PAGE_SIZE;\n\n }\n\n}\n\n\n", "file_path": "src/arch/riscv64/paging.rs", "rank": 19, "score": 109845.49845058 }, { "content": "pub fn claim() -> u32 {\n\n read_reg32(PlicReg::Sclaim.val())\n\n}\n\n\n", "file_path": "src/arch/riscv64/plic.rs", "rank": 20, "score": 106811.56709728678 }, { "content": "pub fn interrupt_disable() -> usize {\n\n let mask = Csr::Sstatus.read() & (1 << 1);\n\n interrupt_off();\n\n return mask;\n\n}\n\n\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 21, "score": 104805.25705130614 }, { "content": "pub fn is_interrupt_enable() -> bool {\n\n let sstatus = Csr::Sstatus.read();\n\n let enable = (sstatus & 1 << 1) >> 1;\n\n enable != 0\n\n}\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 22, "score": 104805.25705130614 }, { "content": "pub fn interrupt(irq: u32) {\n\n let index = irq as usize - plic::Irq::VirtioFirstIrq.val();\n\n match index {\n\n 0 => {\n\n let blk = unsafe { block_device() };\n\n blk.get_mut().pending();\n\n }\n\n 1 => {\n\n let gpu = unsafe { gpu_device() };\n\n gpu.get_mut().pending();\n\n }\n\n 2 => {\n\n let mouse = unsafe { mouse_device() };\n\n mouse.get_mut().pending();\n\n }\n\n 3 => {\n\n let keyboard = unsafe { keyboard_device() };\n\n keyboard.get_mut().pending();\n\n }\n\n _ => panic!(\"unknown virtio device: {}\", index),\n\n }\n\n}\n\n\n", "file_path": "src/arch/riscv64/virtio.rs", "rank": 23, "score": 101775.5902644079 }, { "content": "pub fn complete(irq: u32) {\n\n write_reg32(PlicReg::Sclaim.val(), irq);\n\n}\n\n\n\npub extern \"C\" fn init() {\n\n // uart priority\n\n write_reg32(Irq::UartIrq.val() * 4, 1);\n\n // virtio priority\n\n // TODO: add more virtio devices\n\n write_reg32(Irq::VirtioFirstIrq.val() * 4, 1);\n\n write_reg32((Irq::VirtioFirstIrq.val() + 1) * 4, 1);\n\n write_reg32((Irq::VirtioFirstIrq.val() + 2) * 4, 1);\n\n write_reg32((Irq::VirtioFirstIrq.val() + 3) * 4, 1);\n\n\n\n let senable = (1 << Irq::UartIrq.val())\n\n | (1 << Irq::VirtioFirstIrq.val())\n\n | (1 << (Irq::VirtioFirstIrq.val() + 1))\n\n | (1 << (Irq::VirtioFirstIrq.val() + 2))\n\n | (1 << (Irq::VirtioFirstIrq.val() + 3));\n\n write_reg32(PlicReg::Senable.val(), senable);\n\n\n\n write_reg32(PlicReg::Spriority.val(), 0);\n\n}\n", "file_path": "src/arch/riscv64/plic.rs", "rank": 24, "score": 101775.5902644079 }, { "content": "pub fn interrupt_restore(mask: usize) {\n\n Csr::Sstatus.write(Csr::Sstatus.read() | mask);\n\n}\n\n\n", "file_path": "src/arch/riscv64/interrupt.rs", "rank": 25, "score": 99887.09260214915 }, { "content": "pub fn test_runner(tests: &[&dyn Testable]) {\n\n println!(\"Running {} tests\", tests.len());\n\n for test in tests {\n\n test.run();\n\n }\n\n debug::exit_qemu(0, debug::Status::Pass);\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! print {\n\n ($($args:tt)+) => {{\n\n use core::fmt::Write;\n\n let _ = write!(crate::arch::target::serial::get_serial(), $($args)+);\n\n }};\n\n}\n\n#[macro_export]\n\nmacro_rules! println\n\n{\n\n\t() => ({\n\n\t\tprint!(\"\\r\\n\")\n", "file_path": "src/lib.rs", "rank": 26, "score": 99167.80704474059 }, { "content": "pub fn read_reg32(offset: usize) -> u32 {\n\n let base = layout::_plic_start as usize;\n\n let ptr = (base + offset) as *mut u32;\n\n unsafe { ptr.read_volatile() }\n\n}\n\n\n", "file_path": "src/arch/riscv64/plic.rs", "rank": 27, "score": 95498.54301110067 }, { "content": "pub fn exit_qemu(code: u32, status: Status) -> ! {\n\n #[cfg(target_arch = \"riscv64\")]\n\n {\n\n let exit_status = match status {\n\n Status::Pass => debug::QemuExitStatus::Pass,\n\n Status::Fail => debug::QemuExitStatus::Fail,\n\n };\n\n debug::exit_qemu(code, exit_status);\n\n }\n\n\n\n loop {}\n\n}\n", "file_path": "src/debug.rs", "rank": 28, "score": 95011.02439696994 }, { "content": "pub fn init(base: usize) -> VirtioBlk {\n\n let mut blk = VirtioBlk::new(base);\n\n blk.init();\n\n blk\n\n}\n", "file_path": "src/arch/riscv64/virtio/virtio_blk.rs", "rank": 29, "score": 92225.32027351904 }, { "content": "pub fn get_serial() -> serial::Serial<uart::Uart> {\n\n serial::Serial::<uart::Uart>::new()\n\n}\n\n\n", "file_path": "src/arch/aarch64/serial.rs", "rank": 30, "score": 91548.26012825151 }, { "content": "pub fn write_reg32(offset: usize, val: u32) {\n\n let base = layout::_plic_start as usize;\n\n let ptr = (base + offset) as *mut u32;\n\n unsafe {\n\n ptr.write_volatile(val);\n\n }\n\n}\n\n\n", "file_path": "src/arch/riscv64/plic.rs", "rank": 31, "score": 91548.26012825151 }, { "content": "pub fn get_serial() -> serial::Serial<uart::Uart> {\n\n serial::Serial::<uart::Uart>::new()\n\n}\n\n\n", "file_path": "src/arch/riscv64/serial.rs", "rank": 32, "score": 91548.26012825151 }, { "content": "pub fn exit_qemu(code: u32, status: QemuExitStatus) {\n\n unsafe {\n\n let virt_test = QEMU_VIRT_TEST as *mut u32;\n\n let val = status as u32 | code << 16;\n\n virt_test.write_volatile(val);\n\n }\n\n}\n", "file_path": "src/arch/riscv64/debug.rs", "rank": 33, "score": 88449.42839701989 }, { "content": "pub fn read_reg64(base: usize, offset: usize) -> u64 {\n\n let ptr = (base + offset) as *mut u64;\n\n unsafe { ptr.read_volatile() }\n\n}\n\n\n", "file_path": "src/arch/riscv64/virtio.rs", "rank": 34, "score": 87966.00314918754 }, { "content": "pub fn read_reg32(base: usize, offset: usize) -> u32 {\n\n let ptr = (base + offset) as *mut u32;\n\n unsafe { ptr.read_volatile() }\n\n}\n\n\n", "file_path": "src/arch/riscv64/virtio.rs", "rank": 35, "score": 87966.00314918754 }, { "content": "pub fn virt_to_phys(root: &Table, vaddr: usize) -> Option<usize> {\n\n #[cfg(target_pointer_width = \"32\")]\n\n let vpn = [(vaddr >> 12) & 0x3ff, (vaddr >> 22) & 0x3ff];\n\n #[cfg(target_pointer_width = \"64\")]\n\n let vpn = [\n\n (vaddr >> 12) & 0x1ff,\n\n (vaddr >> 21) & 0x1ff,\n\n (vaddr >> 30) & 0x1ff,\n\n ];\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n let mut v = &root.entries[vpn[1]];\n\n #[cfg(target_pointer_width = \"64\")]\n\n let mut v = &root.entries[vpn[2]];\n\n\n\n for i in (0..=(LEVELS - 1)).rev() {\n\n if v.is_invalid() {\n\n break;\n\n } else if v.is_leaf() {\n\n #[cfg(target_pointer_width = \"32\")]\n", "file_path": "src/arch/riscv64/paging.rs", "rank": 36, "score": 84696.71170829152 }, { "content": "pub fn write_reg64(base: usize, offset: usize, val: u64) {\n\n let ptr = (base + offset) as *mut u64;\n\n unsafe {\n\n ptr.write_volatile(val);\n\n }\n\n}\n\n\n\npub unsafe fn block_device() -> &'static mut Mutex<virtio_blk::VirtioBlk> {\n\n BLOCK_DEVICE.assume_init_mut()\n\n // match *BLOCK_DEVICE.lock() {\n\n // Some(ref mut blk) => blk,\n\n // None => panic!(\"block device is uninitialized\"),\n\n // }\n\n}\n\n\n\npub unsafe fn gpu_device() -> &'static mut Mutex<virtio_gpu::VirtioGpu> {\n\n GPU_DEVICE.assume_init_mut()\n\n // match *GPU_DEVICE.lock() {\n\n // Some(ref mut gpu) => gpu,\n\n // None => panic!(\"gpu device is uninitialized\"),\n", "file_path": "src/arch/riscv64/virtio.rs", "rank": 37, "score": 84696.71170829152 }, { "content": "pub fn write_reg32(base: usize, offset: usize, val: u32) {\n\n let ptr = (base + offset) as *mut u32;\n\n unsafe {\n\n ptr.write_volatile(val);\n\n }\n\n}\n\n\n", "file_path": "src/arch/riscv64/virtio.rs", "rank": 38, "score": 84696.71170829152 }, { "content": "pub fn init(base: usize, device_type: DeviceType) -> VirtioInput {\n\n let mut input = VirtioInput::new(base, device_type);\n\n input.init();\n\n input\n\n}\n", "file_path": "src/arch/riscv64/virtio/virtio_input.rs", "rank": 39, "score": 82376.18794172592 }, { "content": "#[test_case]\n\nfn test_print() {\n\n println!(\"Hello, world!\");\n\n assert_eq!(2, 2);\n\n}\n", "file_path": "tests/print.rs", "rank": 40, "score": 61162.48050313849 }, { "content": "pub trait Disk {\n\n fn read_sector(&mut self, sector: usize, buffer: &mut [u8]);\n\n fn write_sector(&mut self, sector: usize, buffer: &mut [u8]);\n\n fn sector_size(&self) -> usize;\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n Msg(String),\n\n FileNotOpen,\n\n FileNotExist,\n\n UnknownOption,\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 41, "score": 58218.24203330094 }, { "content": "pub trait Testable {\n\n fn run(&self) -> ();\n\n}\n\n\n\nimpl<T> Testable for T\n\nwhere\n\n T: Fn(),\n\n{\n\n fn run(&self) {\n\n println!(\"{}...\\t\", core::any::type_name::<T>());\n\n self();\n\n println!(\"[ok]\");\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 42, "score": 58218.24203330094 }, { "content": "pub trait Painter {\n\n fn draw_at(&mut self, x: u32, y: u32, pixel: u32);\n\n fn copy_buf(&mut self, src: *mut u32, src_offset: usize, dst_offset: usize, size: usize);\n\n fn flush(&mut self);\n\n fn get_width(&self) -> u32;\n\n fn get_height(&self) -> u32;\n\n}\n\n\n\nimpl<T: Painter> Painter for Mutex<T> {\n\n fn draw_at(&mut self, x: u32, y: u32, pixel: u32) {\n\n self.lock().draw_at(x, y, pixel);\n\n }\n\n\n\n fn copy_buf(&mut self, src: *mut u32, src_offset: usize, dst_offset: usize, size: usize) {\n\n self.lock().copy_buf(src, src_offset, dst_offset, size);\n\n }\n\n\n\n fn flush(&mut self) {\n\n self.lock().flush();\n\n }\n", "file_path": "src/graphics.rs", "rank": 43, "score": 58218.24203330094 }, { "content": "pub trait Paging {\n\n fn map(vaddr: usize, paddr: usize, perm: usize);\n\n fn unmap(vaddr: usize);\n\n}\n", "file_path": "src/arch/paging.rs", "rank": 44, "score": 57002.59100738721 }, { "content": "pub trait Object: AToAny {\n\n fn draw_to(&mut self, buffer: &mut FrameBuffer, x: u32, y: u32);\n\n fn on_event(&mut self, event: ObjectEvent, layer_id: LayerId);\n\n fn get_width(&self) -> u32;\n\n fn get_height(&self) -> u32;\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct FrameBuffer {\n\n pub buffer: *mut u32,\n\n pub width: u32,\n\n pub height: u32,\n\n}\n\n\n\nimpl Painter for FrameBuffer {\n\n fn draw_at(&mut self, x: u32, y: u32, pixel: u32) {\n\n let index = (y * self.width + x) as usize;\n\n unsafe {\n\n self.buffer.add(index).write(pixel);\n\n }\n", "file_path": "src/graphics.rs", "rank": 45, "score": 56464.8456167854 }, { "content": "pub trait AToAny: 'static {\n\n fn as_any(&self) -> &dyn Any;\n\n fn as_mut_any(&mut self) -> &mut dyn Any;\n\n}\n\n\n\nimpl<T: 'static> AToAny for T {\n\n fn as_any(&self) -> &dyn Any {\n\n self\n\n }\n\n\n\n fn as_mut_any(&mut self) -> &mut dyn Any {\n\n self\n\n }\n\n}\n\n\n", "file_path": "src/graphics.rs", "rank": 46, "score": 56464.8456167854 }, { "content": "#[alloc_error_handler]\n\nfn on_oom(_layout: Layout) -> ! {\n\n loop {}\n\n}\n", "file_path": "src/allocator.rs", "rank": 47, "score": 55975.461566861115 }, { "content": "pub trait SerialIO {\n\n fn put(&mut self, c: u8);\n\n fn get(&mut self) -> Option<u8>;\n\n}\n\n\n", "file_path": "src/arch/serial.rs", "rank": 48, "score": 55865.2190658779 }, { "content": "pub trait BackingFileSystem {\n\n fn read_at(&mut self, buffer: &mut [u8], path: &str, offset: usize) -> Result<usize, Error>;\n\n fn file_size(&mut self, path: &str) -> Result<usize, Error>;\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[repr(u32)]\n\npub enum SeekWhence {\n\n SEEK_SET = 0,\n\n SEEK_CUR = 1,\n\n SEEK_END = 2,\n\n}\n\n\n", "file_path": "src/fs.rs", "rank": 49, "score": 55865.2190658779 }, { "content": "pub trait SerialInit {\n\n fn init() -> Self;\n\n}\n\n\n\npub struct Serial<T>\n\nwhere\n\n T: SerialIO,\n\n{\n\n pub dev: T,\n\n}\n\n\n\nimpl<T> Serial<T>\n\nwhere\n\n T: SerialIO,\n\n Serial<T>: SerialInit,\n\n{\n\n pub fn new() -> Self {\n\n Self::init()\n\n }\n\n}\n", "file_path": "src/arch/serial.rs", "rank": 50, "score": 55865.2190658779 }, { "content": "pub trait SysCallInfo {\n\n fn get_arg_raw(&self, idx: usize) -> usize;\n\n fn get_arg_ptr<T>(&self, idx: usize) -> *mut T;\n\n}\n", "file_path": "src/arch/syscall.rs", "rank": 51, "score": 54798.80114368538 }, { "content": "#[panic_handler]\n\nfn panic(info: &core::panic::PanicInfo) -> ! {\n\n print!(\"Aborting: \");\n\n if let Some(p) = info.location() {\n\n println!(\n\n \"line {}, file {}: {}\",\n\n p.line(),\n\n p.file(),\n\n info.message().unwrap()\n\n );\n\n } else {\n\n println!(\"no information available.\");\n\n }\n\n\n\n debug::exit_qemu(1, debug::Status::Fail);\n\n}\n", "file_path": "src/main.rs", "rank": 52, "score": 49898.612190896485 }, { "content": "fn align_val(val: usize, align: usize) -> usize {\n\n let t = (1usize << align) - 1;\n\n (val + t) & !t\n\n}\n\n\n", "file_path": "src/arch/riscv64/paging.rs", "rank": 53, "score": 46067.62650167805 }, { "content": " pub framebuffer: *mut u8,\n\n virtqueue: [NonNull<Virtqueue>; 2],\n\n curr_queue: VirtioGpuQueue,\n\n free_desc: [bool; VIRTIO_RING_SIZE], // true if the desc is free\n\n desc_indexes: Option<Vec<u16>>,\n\n ack_used_index: u16,\n\n resource_id: u32,\n\n pub width: u32,\n\n pub height: u32,\n\n sid: usize, // Semaphore id\n\n pid: usize,\n\n}\n\n\n\nimpl VirtioGpu {\n\n pub fn new(base: usize) -> Self {\n\n let pm = unsafe { process_manager() };\n\n VirtioGpu {\n\n base,\n\n framebuffer: null_mut(),\n\n virtqueue: [NonNull::dangling(); 2],\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 54, "score": 34311.56421242435 }, { "content": " panic!(\"free desc exhausted\");\n\n }\n\n\n\n pub fn allocate_desc(&mut self, n: usize, indexes: &mut Vec<u16>) {\n\n for _ in 0..n {\n\n let index = self.find_free_desc();\n\n indexes.push(index);\n\n }\n\n }\n\n\n\n pub fn deallocate_desc(&mut self, indexes: &Vec<u16>) {\n\n for i in indexes.iter() {\n\n self.free_desc[*i as usize] = true;\n\n }\n\n }\n\n\n\n pub fn write_desc(&mut self, i: usize, queue: VirtioGpuQueue, desc: VirtqDesc) {\n\n unsafe {\n\n let desc_ptr = self.virtqueue[queue as usize].as_mut().desc.add(i);\n\n *desc_ptr = desc;\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 55, "score": 34307.99156372303 }, { "content": " }\n\n }\n\n\n\n pub fn send_desc(&mut self, queue: VirtioGpuQueue, desc_indexes: Vec<u16>) {\n\n unsafe {\n\n let virtqueue = self.virtqueue[queue as usize].as_mut();\n\n self.write_reg64(VirtioReg::QueueDescLow.val(), virtqueue.desc as u64);\n\n self.write_reg64(VirtioReg::QueueDriverLow.val(), virtqueue.avail as u64);\n\n self.write_reg64(VirtioReg::QueueDeviceLow.val(), virtqueue.used as u64);\n\n self.curr_queue = queue;\n\n\n\n let pm = process_manager();\n\n\n\n let mut avail = virtqueue.avail.as_mut().unwrap();\n\n let index = avail.idx as usize;\n\n avail.ring[index % VIRTIO_RING_SIZE] = desc_indexes[0];\n\n asm!(\"fence iorw, iorw\");\n\n avail.idx = avail.idx.wrapping_add(1);\n\n asm!(\"fence iorw, iorw\");\n\n self.desc_indexes = Some(desc_indexes);\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 56, "score": 34307.21376747523 }, { "content": "\n\n let response_type = unsafe { (*req).response.type_ };\n\n\n\n if response_type != VirtioGpuCtrlType::RespOkNodata.val() {\n\n // panic!(\"virtio_gpu: resource_flush error {:?}\", response_type);\n\n }\n\n }\n\n\n\n #[allow(unaligned_references)]\n\n pub fn pending(&mut self) {\n\n // println!(\"virtio_gpu pending start\");\n\n let mask = interrupt_disable();\n\n let interrupt_status = self.read_reg32(VirtioReg::InterruptStatus.val());\n\n self.write_reg32(VirtioReg::InterruptACK.val(), interrupt_status & 0x3);\n\n let virtqueue = unsafe { self.virtqueue[self.curr_queue as usize].as_mut() };\n\n let desc = virtqueue.desc;\n\n let used = unsafe { virtqueue.used.as_mut().unwrap() };\n\n let mut freed_desc = BTreeSet::new();\n\n\n\n while self.ack_used_index != used.idx {\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 57, "score": 34306.35203036639 }, { "content": " pub fn write_reg32(&mut self, offset: usize, val: u32) {\n\n virtio::write_reg32(self.base, offset, val)\n\n }\n\n\n\n pub fn write_reg64(&mut self, offset: usize, val: u64) {\n\n virtio::write_reg64(self.base, offset, val)\n\n }\n\n\n\n pub fn init_virtq(&mut self, queue: VirtioGpuQueue) {\n\n assert_eq!(size_of::<VirtqDesc>(), 16);\n\n let desc_layout = Layout::from_size_align(16 * VIRTIO_RING_SIZE, 16).unwrap();\n\n let desc = unsafe { alloc_zeroed(desc_layout) } as *mut VirtqDesc;\n\n\n\n assert_eq!(size_of::<VirtqAvail>(), 6 + 2 * VIRTIO_RING_SIZE);\n\n let avail_layout = Layout::from_size_align(6 + 2 * VIRTIO_RING_SIZE, 2).unwrap();\n\n let avail = unsafe { alloc_zeroed(avail_layout) } as *mut VirtqAvail;\n\n\n\n assert_eq!(size_of::<VirtqUsed>(), 6 + 8 * VIRTIO_RING_SIZE);\n\n let used_layout = Layout::from_size_align(6 + 8 * VIRTIO_RING_SIZE, 2).unwrap();\n\n let used = unsafe { alloc_zeroed(used_layout) } as *mut VirtqUsed;\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 58, "score": 34306.00832295068 }, { "content": " curr_queue: VirtioGpuQueue::Controlq,\n\n free_desc: [true; VIRTIO_RING_SIZE],\n\n desc_indexes: None,\n\n ack_used_index: 0,\n\n resource_id: 1,\n\n width: 0,\n\n height: 0,\n\n sid: pm.create_semaphore(1),\n\n pid: 0,\n\n }\n\n }\n\n\n\n pub fn read_reg32(&mut self, offset: usize) -> u32 {\n\n virtio::read_reg32(self.base, offset)\n\n }\n\n\n\n pub fn read_reg64(&mut self, offset: usize) -> u64 {\n\n virtio::read_reg64(self.base, offset)\n\n }\n\n\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 59, "score": 34305.70315951759 }, { "content": "\n\n self.write_reg32(VirtioReg::QueueNum.val(), VIRTIO_RING_SIZE as u32);\n\n\n\n let virtqueue = unsafe { self.virtqueue[0].as_mut() };\n\n self.write_reg64(VirtioReg::QueueDescLow.val(), virtqueue.desc as u64);\n\n self.write_reg64(VirtioReg::QueueDriverLow.val(), virtqueue.avail as u64);\n\n self.write_reg64(VirtioReg::QueueDeviceLow.val(), virtqueue.used as u64);\n\n\n\n self.write_reg32(VirtioReg::QueueReady.val(), 1);\n\n\n\n status_bits |= VirtioDeviceStatus::DriverOk.val();\n\n self.write_reg32(VirtioReg::Status.val(), status_bits);\n\n\n\n pm.signal_semaphore(self.sid).expect(\"process\");\n\n }\n\n\n\n pub fn init_display(&mut self) {\n\n let pm = unsafe { process_manager() };\n\n pm.wait_semaphore(self.sid).expect(\"process\");\n\n self.pid = pm.running;\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 60, "score": 34304.856761568466 }, { "content": "use crate::arch::riscv64::interrupt::interrupt_disable;\n\nuse crate::arch::riscv64::interrupt::interrupt_restore;\n\nuse crate::process::process_manager;\n\nuse alloc::alloc::{alloc, alloc_zeroed, dealloc};\n\nuse alloc::collections::BTreeSet;\n\nuse alloc::vec::Vec;\n\nuse core::alloc::Layout;\n\nuse core::mem::size_of;\n\nuse core::ptr::{null_mut, NonNull};\n\n\n\nuse super::super::virtio;\n\nuse super::*;\n\n\n\n#[derive(Copy, Clone)]\n\npub enum VirtioGpuFeature {\n\n VirtioGpuFVirgl = 0,\n\n VirtioGpuFEdid = 1,\n\n}\n\n\n\nimpl VirtioGpuFeature {\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 61, "score": 34304.72806188782 }, { "content": " pm.io_wait(self.pid).expect(\"process\");\n\n self.write_reg32(VirtioReg::QueueNotify.val(), queue as u32);\n\n pm.schedule().expect(\"process\");\n\n }\n\n }\n\n\n\n pub fn update_range(&mut self, x: u32, y: u32, width: u32, height: u32) {\n\n self.transfer_to_host_2d(x, y, width, height, self.resource_id);\n\n self.resource_flush(x, y, width, height, self.resource_id);\n\n }\n\n\n\n pub fn update_display(&mut self) {\n\n self.transfer_to_host_2d(0, 0, self.width, self.height, self.resource_id);\n\n self.resource_flush(0, 0, self.width, self.height, self.resource_id);\n\n }\n\n\n\n pub fn get_display_info(&mut self) -> VirtioGpuRespDisplayInfo {\n\n let req = Request::<VirtioGpuCtrlHdr, VirtioGpuRespDisplayInfo>::new(VirtioGpuCtrlHdr {\n\n type_: VirtioGpuCtrlType::CmdGetDisplayInfo.val(),\n\n flags: 0,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 62, "score": 34304.12978719419 }, { "content": " let index = self.ack_used_index % VIRTIO_RING_SIZE as u16;\n\n let elem = used.ring[index as usize];\n\n\n\n self.ack_used_index = self.ack_used_index.wrapping_add(1);\n\n unsafe {\n\n if freed_desc.contains(&elem.id) {\n\n continue;\n\n }\n\n freed_desc.insert(elem.id);\n\n let desc = desc.add(elem.id as usize).as_mut().unwrap();\n\n let req_layout = Layout::from_size_align(desc.len as usize, 8).unwrap();\n\n let req = desc.addr as *mut u8;\n\n dealloc(req, req_layout);\n\n }\n\n }\n\n\n\n let desc_indexes = match self.desc_indexes {\n\n Some(ref indexes) => indexes.clone(),\n\n None => panic!(\"desc_indexes must be saved\"),\n\n };\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 63, "score": 34303.567615917695 }, { "content": "\n\n assert_eq!(size_of::<Virtqueue>(), 24);\n\n let virtqueue_layout = Layout::from_size_align(size_of::<Virtqueue>(), 8).unwrap();\n\n let virtqueue = unsafe { alloc(virtqueue_layout) } as *mut Virtqueue;\n\n unsafe {\n\n *virtqueue = Virtqueue::new(desc, avail, used);\n\n }\n\n\n\n self.virtqueue[queue as usize] = NonNull::new(virtqueue).unwrap();\n\n }\n\n\n\n pub fn init(&mut self) {\n\n let pm = unsafe { process_manager() };\n\n\n\n pm.wait_semaphore(self.sid).expect(\"process\");\n\n\n\n let magic_value = self.read_reg32(VirtioReg::MagicValue.val());\n\n let version = self.read_reg32(VirtioReg::Version.val());\n\n let device_id = self.read_reg32(VirtioReg::DeviceId.val());\n\n if magic_value != 0x74726976 || version != 2 || device_id != 16 {\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 64, "score": 34303.4388023763 }, { "content": " self.init_virtq(VirtioGpuQueue::Cursorq);\n\n\n\n // assert_eq!(size_of::<VirtqDesc>(), 16);\n\n // let desc_layout = Layout::from_size_align(16 * VIRTIO_RING_SIZE, 16).unwrap();\n\n // let desc = unsafe { alloc_zeroed(desc_layout) } as *mut VirtqDesc;\n\n\n\n // assert_eq!(size_of::<VirtqAvail>(), 6 + 2 * VIRTIO_RING_SIZE);\n\n // let avail_layout = Layout::from_size_align(6 + 2 * VIRTIO_RING_SIZE, 2).unwrap();\n\n // let avail = unsafe { alloc_zeroed(avail_layout) } as *mut VirtqAvail;\n\n\n\n // assert_eq!(size_of::<VirtqUsed>(), 6 + 8 * VIRTIO_RING_SIZE);\n\n // let used_layout = Layout::from_size_align(6 + 8 * VIRTIO_RING_SIZE, 2).unwrap();\n\n // let used = unsafe { alloc_zeroed(used_layout) } as *mut VirtqUsed;\n\n\n\n // assert_eq!(size_of::<Virtqueue>(), 24);\n\n // let virtqueue_layout = Layout::from_size_align(size_of::<Virtqueue>(), 8).unwrap();\n\n // let virtqueue = unsafe { alloc(virtqueue_layout) } as *mut Virtqueue;\n\n // unsafe {\n\n // *virtqueue = Virtqueue::new(desc, avail, used);\n\n // }\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 65, "score": 34302.91328335006 }, { "content": "\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).response as *const VirtioGpuCtrlHdr as u64 },\n\n size_of::<VirtioGpuCtrlHdr>() as u32,\n\n VirtqDescFlag::VirtqDescFWrite.val(),\n\n 0,\n\n );\n\n self.write_desc(desc_indexes[1] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n self.send_desc(VirtioGpuQueue::Controlq, desc_indexes);\n\n\n\n let response_type = unsafe { (*req).response.type_ };\n\n\n\n if response_type != VirtioGpuCtrlType::RespOkNodata.val() {\n\n // panic!(\"virtio_gpu: transfer_to_host_2d error {:?}\", response_type);\n\n }\n\n }\n\n\n\n pub fn resource_flush(&mut self, x: u32, y: u32, width: u32, height: u32, resource_id: u32) {\n\n let req =\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 66, "score": 34302.20112666815 }, { "content": " unsafe {\n\n (*ptr).request = request;\n\n (*ptr).mementries = meminfo;\n\n }\n\n ptr\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\nconst VIRTIO_GPU_FLAG_FENCE: usize = 1 << 0;\n\nconst PIXEL_SIZE: u32 = 4;\n\n\n\n#[derive(Copy, Clone)]\n\npub enum VirtioGpuQueue {\n\n Controlq = 0,\n\n Cursorq = 1,\n\n}\n\n\n\npub struct VirtioGpu {\n\n base: usize,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 67, "score": 34302.08563242231 }, { "content": "\n\n pub fn get_framebuffer(&mut self) -> *mut u8 {\n\n self.framebuffer\n\n }\n\n\n\n pub fn init_framebuffer(&mut self, width: u32, height: u32, pixel_size: u32) {\n\n let size = width * pixel_size * height;\n\n let layout = Layout::from_size_align(size as usize, 0x1000).unwrap();\n\n let framebuffer = unsafe { alloc_zeroed(layout) };\n\n self.framebuffer = framebuffer;\n\n }\n\n\n\n pub fn find_free_desc(&mut self) -> u16 {\n\n for (i, is_free) in self.free_desc.iter_mut().enumerate() {\n\n if *is_free {\n\n *is_free = false;\n\n return i as u16;\n\n }\n\n }\n\n\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 68, "score": 34301.8128951973 }, { "content": " self.write_desc(desc_indexes[2] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n self.send_desc(VirtioGpuQueue::Controlq, desc_indexes);\n\n\n\n let response_type = unsafe { (*req).response.type_ };\n\n\n\n if response_type != VirtioGpuCtrlType::RespOkNodata.val() {\n\n panic!(\n\n \"virtio_gpu: resource_attach_backing error {:?}\",\n\n response_type\n\n );\n\n }\n\n }\n\n\n\n pub fn set_scanout(&mut self, width: u32, height: u32, resource_id: u32) {\n\n let req = Request::<VirtioGpuSetScanout, VirtioGpuCtrlHdr>::new(VirtioGpuSetScanout {\n\n hdr: VirtioGpuCtrlHdr {\n\n type_: VirtioGpuCtrlType::CmdSetScanout.val(),\n\n flags: 0,\n\n fence_id: 0,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 69, "score": 34301.472772043286 }, { "content": "}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuResourceFlush {\n\n hdr: VirtioGpuCtrlHdr,\n\n r: VirtioGpuRect,\n\n resource_id: u32,\n\n padding: u32,\n\n}\n\n\n\n#[repr(C, packed)]\n\npub struct Request<RqT, RpT> {\n\n request: RqT,\n\n response: RpT,\n\n}\n\n\n\nimpl<RqT, RpT> Request<RqT, RpT> {\n\n pub fn new(request: RqT) -> *mut Self {\n\n let size = size_of::<RqT>() + size_of::<RpT>();\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 70, "score": 34300.92870673707 }, { "content": " pub width: u32,\n\n pub height: u32,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuDisplayOne {\n\n pub r: VirtioGpuRect,\n\n pub enabled: u32,\n\n pub flags: u32,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuRespDisplayInfo {\n\n pub hdr: VirtioGpuCtrlHdr,\n\n pub pmodes: [VirtioGpuDisplayOne; VIRTIO_GPU_MAX_SCANOUTS],\n\n}\n\n\n\n#[derive(Copy, Clone)]\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 71, "score": 34300.84646200719 }, { "content": " fence_id: 0,\n\n ctx_id: 0,\n\n padding: 0,\n\n });\n\n\n\n let mut desc_indexes = Vec::new();\n\n self.allocate_desc(2, &mut desc_indexes);\n\n\n\n // request desc\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).request as *const VirtioGpuCtrlHdr as u64 },\n\n size_of::<VirtioGpuCtrlHdr>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n\n desc_indexes[1],\n\n );\n\n self.write_desc(desc_indexes[0] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n // response desc\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).response as *const VirtioGpuRespDisplayInfo as u64 },\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 72, "score": 34300.50692125438 }, { "content": " y: y,\n\n width: width,\n\n height: height,\n\n },\n\n offset: 0,\n\n resource_id: resource_id,\n\n padding: 0,\n\n },\n\n );\n\n\n\n let mut desc_indexes = Vec::new();\n\n self.allocate_desc(2, &mut desc_indexes);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).request as *const VirtioGpuTransferToHost2d as u64 },\n\n size_of::<VirtioGpuTransferToHost2d>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n\n desc_indexes[1],\n\n );\n\n self.write_desc(desc_indexes[0] as usize, VirtioGpuQueue::Controlq, desc);\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 73, "score": 34299.99645043143 }, { "content": "\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).request as *const VirtioGpuResourceCreate2d as u64 },\n\n size_of::<VirtioGpuResourceCreate2d>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n\n desc_indexes[1],\n\n );\n\n self.write_desc(desc_indexes[0] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).response as *const VirtioGpuCtrlHdr as u64 },\n\n size_of::<VirtioGpuCtrlHdr>() as u32,\n\n VirtqDescFlag::VirtqDescFWrite.val(),\n\n 0,\n\n );\n\n self.write_desc(desc_indexes[1] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n self.send_desc(VirtioGpuQueue::Controlq, desc_indexes);\n\n\n\n let response_type = unsafe { (*req).response.type_ };\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 74, "score": 34299.69975230563 }, { "content": "\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).request as *const VirtioGpuResourceFlush as u64 },\n\n size_of::<VirtioGpuResourceFlush>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n\n desc_indexes[1],\n\n );\n\n self.write_desc(desc_indexes[0] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).response as *const VirtioGpuCtrlHdr as u64 },\n\n size_of::<VirtioGpuCtrlHdr>() as u32,\n\n VirtqDescFlag::VirtqDescFWrite.val(),\n\n 0,\n\n );\n\n self.write_desc(desc_indexes[1] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n self.send_desc(VirtioGpuQueue::Controlq, desc_indexes);\n\n\n\n // println!(\"resource_flush type: {}\", unsafe { (*req).response.type_ });\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 75, "score": 34299.6629522272 }, { "content": " padding: u32,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuSetScanout {\n\n hdr: VirtioGpuCtrlHdr,\n\n r: VirtioGpuRect,\n\n scanout_id: u32,\n\n resource_id: u32,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuTransferToHost2d {\n\n hdr: VirtioGpuCtrlHdr,\n\n r: VirtioGpuRect,\n\n offset: u64,\n\n resource_id: u32,\n\n padding: u32,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 76, "score": 34299.65931622133 }, { "content": " desc_indexes[1],\n\n );\n\n self.write_desc(desc_indexes[0] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).response as *const VirtioGpuCtrlHdr as u64 },\n\n size_of::<VirtioGpuCtrlHdr>() as u32,\n\n VirtqDescFlag::VirtqDescFWrite.val(),\n\n 0,\n\n );\n\n self.write_desc(desc_indexes[1] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n self.send_desc(VirtioGpuQueue::Controlq, desc_indexes);\n\n\n\n let response_type = unsafe { (*req).response.type_ };\n\n\n\n if response_type != VirtioGpuCtrlType::RespOkNodata.val() {\n\n panic!(\"virtio_gpu: set_scanout error {:?}\", response_type);\n\n }\n\n }\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 77, "score": 34299.45054497756 }, { "content": " hdr: VirtioGpuCtrlHdr,\n\n resource_id: u32,\n\n format: u32,\n\n width: u32,\n\n height: u32,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuResourceAttachBacking {\n\n hdr: VirtioGpuCtrlHdr,\n\n resource_id: u32,\n\n nr_entries: u32,\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuMemEntry {\n\n addr: u64,\n\n length: u32,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 78, "score": 34299.311329570504 }, { "content": " self.deallocate_desc(&desc_indexes);\n\n\n\n let pm = unsafe { process_manager() };\n\n pm.io_signal(self.pid).expect(\"process\");\n\n pm.signal_semaphore(self.sid).expect(\"process\");\n\n\n\n interrupt_restore(mask);\n\n // println!(\"virtio_gpu pending end\");\n\n }\n\n}\n\n\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 79, "score": 34299.13099942818 }, { "content": " size_of::<VirtioGpuRespDisplayInfo>() as u32,\n\n VirtqDescFlag::VirtqDescFWrite.val(),\n\n 0,\n\n );\n\n self.write_desc(desc_indexes[1] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n // send an request and wait for response\n\n self.send_desc(VirtioGpuQueue::Controlq, desc_indexes);\n\n\n\n let response_type = unsafe { (*req).response.hdr.type_ };\n\n\n\n if response_type != VirtioGpuCtrlType::RespOkDisplayInfo.val() {\n\n panic!(\"virtio_gpu: get_display_info error {:?}\", response_type);\n\n }\n\n\n\n unsafe { (*req).response }\n\n }\n\n\n\n // VIRTIO_GPU_CMD_RESOURCE_CREATE_2D\n\n // returns an resource id\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 80, "score": 34298.92541827531 }, { "content": " pub fn val(&self) -> u32 {\n\n *self as u32\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuCtrlHdr {\n\n pub type_: u32,\n\n pub flags: u32,\n\n pub fence_id: u64,\n\n pub ctx_id: u32,\n\n pub padding: u32,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum VirtioGpuCtrlType {\n\n /* 2d commands */\n\n CmdGetDisplayInfo = 0x0100,\n\n CmdResourceCreate2d,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 81, "score": 34298.89871536534 }, { "content": " RespErrOutOfMemory,\n\n RespErrInvalidScanoutId,\n\n RespErrInvalidResourceId,\n\n RespErrInvalidContextId,\n\n RespErrInvalidParameter,\n\n}\n\n\n\nimpl VirtioGpuCtrlType {\n\n pub fn val(&self) -> u32 {\n\n *self as u32\n\n }\n\n}\n\n\n\nconst VIRTIO_GPU_MAX_SCANOUTS: usize = 16;\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuRect {\n\n pub x: u32,\n\n pub y: u32,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 82, "score": 34298.845216981535 }, { "content": " size_of::<VirtioGpuResourceAttachBacking>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n\n desc_indexes[1],\n\n );\n\n self.write_desc(desc_indexes[0] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).mementries as *const VirtioGpuMemEntry as u64 },\n\n size_of::<VirtioGpuMemEntry>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n\n desc_indexes[2],\n\n );\n\n self.write_desc(desc_indexes[1] as usize, VirtioGpuQueue::Controlq, desc);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).response as *const VirtioGpuCtrlHdr as u64 },\n\n size_of::<VirtioGpuCtrlHdr>() as u32,\n\n VirtqDescFlag::VirtqDescFWrite.val(),\n\n 0,\n\n );\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 83, "score": 34298.40589118252 }, { "content": " pub fn resource_create_2d(&mut self, width: u32, height: u32, format: VirtioGpuFormats) -> u32 {\n\n let req = Request::<VirtioGpuResourceCreate2d, VirtioGpuCtrlHdr>::new(\n\n VirtioGpuResourceCreate2d {\n\n hdr: VirtioGpuCtrlHdr {\n\n type_: VirtioGpuCtrlType::CmdResourceCreate2d.val(),\n\n flags: 0,\n\n fence_id: 0,\n\n ctx_id: 0,\n\n padding: 0,\n\n },\n\n resource_id: self.resource_id,\n\n format: format.val(),\n\n width,\n\n height,\n\n },\n\n );\n\n let res = self.resource_id;\n\n\n\n let mut desc_indexes = Vec::new();\n\n self.allocate_desc(2, &mut desc_indexes);\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 84, "score": 34298.21329944808 }, { "content": " self.base\n\n );\n\n }\n\n\n\n self.write_reg32(VirtioReg::QueueSel.val(), 0);\n\n\n\n if self.read_reg32(VirtioReg::QueueReady.val()) != 0 {\n\n panic!(\"queue is already in use\");\n\n }\n\n\n\n let queue_num_max = self.read_reg32(VirtioReg::QueueNumMax.val());\n\n if queue_num_max == 0 {\n\n panic!(\"queue is not available\");\n\n } else if queue_num_max < (VIRTIO_RING_SIZE as u32) {\n\n panic!(\"QueueNumMax too short\");\n\n }\n\n\n\n self.write_reg32(VirtioReg::QueueNum.val(), VIRTIO_RING_SIZE as u32);\n\n\n\n self.init_virtq(VirtioGpuQueue::Controlq);\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 85, "score": 34297.943815566134 }, { "content": " ctx_id: 0,\n\n padding: 0,\n\n },\n\n r: VirtioGpuRect {\n\n x: 0,\n\n y: 0,\n\n width: width,\n\n height: height,\n\n },\n\n resource_id: resource_id,\n\n scanout_id: 0,\n\n });\n\n\n\n let mut desc_indexes = Vec::new();\n\n self.allocate_desc(2, &mut desc_indexes);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).request as *const VirtioGpuSetScanout as u64 },\n\n size_of::<VirtioGpuSetScanout>() as u32,\n\n VirtqDescFlag::VirtqDescFNext.val(),\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 86, "score": 34297.93991491751 }, { "content": "pub enum VirtioGpuFormats {\n\n B8G8R8A8Unorm = 1,\n\n B8G8R8X8Unorm = 2,\n\n A8R8G8B8Unorm = 3,\n\n X8R8G8B8Unorm = 4,\n\n R8G8B8A8Unorm = 67,\n\n X8B8G8R8Unorm = 68,\n\n A8B8G8R8Unorm = 121,\n\n R8G8B8X8Unorm = 134,\n\n}\n\n\n\nimpl VirtioGpuFormats {\n\n pub fn val(&self) -> u32 {\n\n *self as u32\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\n#[repr(C, packed)]\n\npub struct VirtioGpuResourceCreate2d {\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 87, "score": 34297.86117999624 }, { "content": " // virtio_gpu settings\n\n let display_info = self.get_display_info();\n\n self.width = display_info.pmodes[0].r.width;\n\n self.height = display_info.pmodes[0].r.height;\n\n\n\n let resource_id =\n\n self.resource_create_2d(self.width, self.height, VirtioGpuFormats::R8G8B8A8Unorm);\n\n\n\n self.init_framebuffer(self.width, self.height, PIXEL_SIZE);\n\n\n\n self.resource_attach_backing(self.width, self.height, PIXEL_SIZE, resource_id);\n\n\n\n self.set_scanout(self.width, self.height, resource_id);\n\n\n\n pm.signal_semaphore(self.sid).expect(\"process\");\n\n }\n\n\n\n pub fn get_pixel_size(&mut self) -> u32 {\n\n PIXEL_SIZE\n\n }\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 88, "score": 34297.72926234249 }, { "content": "\n\n pub fn transfer_to_host_2d(\n\n &mut self,\n\n x: u32,\n\n y: u32,\n\n width: u32,\n\n height: u32,\n\n resource_id: u32,\n\n ) {\n\n let req = Request::<VirtioGpuTransferToHost2d, VirtioGpuCtrlHdr>::new(\n\n VirtioGpuTransferToHost2d {\n\n hdr: VirtioGpuCtrlHdr {\n\n type_: VirtioGpuCtrlType::CmdTransferToHost2d.val(),\n\n flags: 0,\n\n fence_id: 0,\n\n ctx_id: 0,\n\n padding: 0,\n\n },\n\n r: VirtioGpuRect {\n\n x: x,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 89, "score": 34297.68023486885 }, { "content": " flags: 0,\n\n fence_id: 0,\n\n ctx_id: 0,\n\n padding: 0,\n\n },\n\n resource_id: resource_id,\n\n nr_entries: 1,\n\n },\n\n VirtioGpuMemEntry {\n\n addr: self.framebuffer as u64,\n\n length: width * pixel_size * height,\n\n padding: 0,\n\n },\n\n );\n\n\n\n let mut desc_indexes = Vec::new();\n\n self.allocate_desc(3, &mut desc_indexes);\n\n\n\n let desc = VirtqDesc::new(\n\n unsafe { &(*req).request as *const VirtioGpuResourceAttachBacking as u64 },\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 90, "score": 34297.64547947766 }, { "content": " let layout = Layout::from_size_align(size, 8).unwrap();\n\n let ptr = unsafe { alloc(layout) } as *mut Self;\n\n unsafe {\n\n (*ptr).request = request;\n\n }\n\n ptr\n\n }\n\n}\n\n\n\npub struct Request3<RqT, RmT, RpT> {\n\n request: RqT,\n\n mementries: RmT,\n\n response: RpT,\n\n}\n\n\n\nimpl<RqT, RmT, RpT> Request3<RqT, RmT, RpT> {\n\n pub fn new(request: RqT, meminfo: RmT) -> *mut Self {\n\n let size = size_of::<RqT>() + size_of::<RmT>() + size_of::<RpT>();\n\n let layout = Layout::from_size_align(size, 8).unwrap();\n\n let ptr = unsafe { alloc(layout) } as *mut Self;\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 91, "score": 34297.60616219451 }, { "content": "\n\n if response_type != VirtioGpuCtrlType::RespOkNodata.val() {\n\n panic!(\"virtio_gpu: resource_create_2d error {:?}\", response_type);\n\n }\n\n\n\n res\n\n }\n\n\n\n pub fn resource_attach_backing(\n\n &mut self,\n\n width: u32,\n\n height: u32,\n\n pixel_size: u32,\n\n resource_id: u32,\n\n ) {\n\n let req =\n\n Request3::<VirtioGpuResourceAttachBacking, VirtioGpuMemEntry, VirtioGpuCtrlHdr>::new(\n\n VirtioGpuResourceAttachBacking {\n\n hdr: VirtioGpuCtrlHdr {\n\n type_: VirtioGpuCtrlType::CmdResourceAttachBacking.val(),\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 92, "score": 34297.06782899939 }, { "content": " Request::<VirtioGpuResourceFlush, VirtioGpuCtrlHdr>::new(VirtioGpuResourceFlush {\n\n hdr: VirtioGpuCtrlHdr {\n\n type_: VirtioGpuCtrlType::CmdResourceFlush.val(),\n\n flags: 0,\n\n fence_id: 0,\n\n ctx_id: 0,\n\n padding: 0,\n\n },\n\n r: VirtioGpuRect {\n\n x: x,\n\n y: y,\n\n width: width,\n\n height: height,\n\n },\n\n resource_id: resource_id,\n\n padding: 0,\n\n });\n\n\n\n let mut desc_indexes = Vec::new();\n\n self.allocate_desc(2, &mut desc_indexes);\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 93, "score": 34296.191661355886 }, { "content": " panic!(\"unrecognized virtio device: {:#018x}\", self.base);\n\n }\n\n\n\n let mut status_bits: u32 = 0;\n\n status_bits |= VirtioDeviceStatus::Acknowoledge.val();\n\n self.write_reg32(VirtioReg::Status.val(), status_bits);\n\n\n\n status_bits |= VirtioDeviceStatus::Driver.val();\n\n self.write_reg32(VirtioReg::Status.val(), status_bits);\n\n\n\n let features = self.read_reg32(VirtioReg::DeviceFeatures.val());\n\n self.write_reg32(VirtioReg::DeviceFeatures.val(), features);\n\n\n\n status_bits |= VirtioDeviceStatus::FeaturesOk.val();\n\n self.write_reg32(VirtioReg::Status.val(), status_bits);\n\n\n\n if self.read_reg32(VirtioReg::Status.val()) & VirtioDeviceStatus::FeaturesOk.val() == 0 {\n\n self.write_reg32(VirtioReg::Status.val(), VirtioDeviceStatus::Failed.val());\n\n panic!(\n\n \"virtio-blk({:#018x}) does not support the required features\",\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 94, "score": 34289.91316361349 }, { "content": " CmdResourceUnref,\n\n CmdSetScanout,\n\n CmdResourceFlush,\n\n CmdTransferToHost2d,\n\n CmdResourceAttachBacking,\n\n CmdResourceDetachBacking,\n\n CmdGetCapsetInfo,\n\n CmdGetCapset,\n\n CmdGetEdid,\n\n /* cursor commands */\n\n CmdUpdateCursor = 0x0300,\n\n CmdMoveCursor,\n\n /* success responses */\n\n RespOkNodata = 0x1100,\n\n RespOkDisplayInfo,\n\n RespOkCapsetInfo,\n\n RespOkCapset,\n\n RespOkEdid,\n\n /* error responses */\n\n RespErrUnspec = 0x1200,\n", "file_path": "src/arch/riscv64/virtio/virtio_gpu.rs", "rank": 95, "score": 34288.31922169788 }, { "content": "\n\n self.repopulate_event(elem.id as usize);\n\n\n\n self.event_ack_used_index = self.event_ack_used_index.wrapping_add(1);\n\n unsafe {\n\n let desc = desc.add(elem.id as usize).as_mut().unwrap();\n\n let event = (desc.addr as *mut VirtioInputEvent).as_mut().unwrap();\n\n // println!(\"event: {:?}\", event);\n\n self.event_queue.push_back(*event);\n\n }\n\n }\n\n\n\n let pm = unsafe { process_manager() };\n\n match self.device_type {\n\n DeviceType::Mouse => pm.event_signal(ProcessEvent::MouseEvent).expect(\"process\"),\n\n DeviceType::Keyboard => pm\n\n .event_signal(ProcessEvent::KeyboardEvent)\n\n .expect(\"process\"),\n\n }\n\n\n\n interrupt_restore(mask);\n\n }\n\n}\n\n\n", "file_path": "src/arch/riscv64/virtio/virtio_input.rs", "rank": 98, "score": 28.742269614283902 }, { "content": "use super::init::*;\n\nuse crate::*;\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn start() {\n\n init_all();\n\n println!(\"Hello, world!\");\n\n loop {}\n\n}", "file_path": "src/arch/aarch64/start.rs", "rank": 99, "score": 25.6482598567085 } ]
Rust
src/main.rs
Shirataki2/atcoder-discord-bot
236023c7d2a29d09c62785249230eddea92ceeae
#![warn(clippy::all)] mod commands; mod tasks; mod models; mod database; mod utils; mod data; mod event_handler; mod error; mod http; #[macro_use] extern crate log; #[macro_use] extern crate sqlx; use crate::{ utils::send_error, data::{DatabasePool, ShardManagerContainer, SubmissionQueue}, event_handler::Handler, models::{account::Account as DiscordAccount, submission::Submission} }; use commands::{ help::*, general::{invite::*, source::*}, account::{register::*, unregister::*, subscribe::*, unsubscribe::*}, settings::{start::*, stop::*}, stat::{streak::*, problem_count::*, point_sum::*}, }; use std::{collections::*, env, sync::Arc}; use serenity::{ prelude::*, http::Http, framework::{ StandardFramework, standard::{ macros::{group, hook}, DispatchError, CommandResult } }, model::{ channel::Message, }, }; use dotenv::dotenv; #[hook] async fn before(_: &Context, msg: &Message, command_name: &str) -> bool { info!("Get command `{}` by user {}({})", command_name, msg.author.name, msg.author.id); true } #[hook] async fn after(ctx: &Context, msg: &Message, command_name: &str, error: CommandResult) { if let Err(why) = &error { error!("Error while running command {}", &command_name); error!("{:?}", &error); if send_error(ctx, msg, "Error!", format!("{}", why).as_str()).await.is_err() { error!( "Unable to send messages on channel id {}", &msg.channel_id.0 ); }; } } #[hook] #[allow(clippy::useless_format)] async fn on_dispatch_error(ctx: &Context, msg: &Message, error: DispatchError) { match error { DispatchError::NotEnoughArguments { min, given } => { let description = match (min, given) { (1, 0) => format!("This command requires one argument"), (m, 0) => format!("This command requires at least {} arguments to run", m), (m, g) => format!("This command requires at least {} arguments, but you give {} arguments", m, g), }; let _ = send_error(ctx, msg, "Not Enough Arguments!", &description).await; } DispatchError::OnlyForGuilds => { let description = format!("This command does not work on DM."); let _ = send_error(ctx, msg, "Only For Guilds!", &description).await; } DispatchError::Ratelimited(dur) => { let description = format!("You cannot run this command for {} seconds.", dur.as_secs()); let _ = send_error(ctx, msg, "Rate Limited!", &description).await; } DispatchError::LackingPermissions(perms) => { let description = format!("This command requires `{}` permission(s).", perms); let _ = send_error(ctx, msg, "Permission Error!", &description).await; } _ => { error!("Unhandled dispatch error: {:?}", error); } } } #[group] #[commands(invite, source)] struct General; #[group] #[commands(register, unregister, subscribe, unsubscribe)] struct Account; #[group] #[commands(start, stop)] struct Settings; #[group] #[commands(streak, problem, point)] struct Ranking; #[tokio::main(flavor = "multi_thread", worker_threads = 8)] async fn main() { if let Err(e) = dotenv() { warn!("Failed to load .env: {:?}", e); } env_logger::init(); let token = env::var("DISCORD_TOKEN") .expect("Expected a token in the environment (DISCORD_TOKEN)"); let http = Http::new_with_token(&token); let owners = http.get_current_application_info() .await .map(|info| { let mut map = HashSet::new(); map.insert(info.owner.id); map }) .unwrap(); let framework = StandardFramework::new() .configure(|c| c.owners(owners).prefix("^")) .bucket("basic", |b| b.delay(1).time_span(5).limit(5)).await .bucket("account", |b| b.delay(3).time_span(10).limit(2)).await .before(before) .after(after) .on_dispatch_error(on_dispatch_error) .help(&MY_HELP) .group(&GENERAL_GROUP) .group(&SETTINGS_GROUP) .group(&ACCOUNT_GROUP) .group(&RANKING_GROUP); let mut client = Client::builder(&token) .framework(framework) .event_handler(Handler::new()) .await .expect("Failed to create client."); { let mut data = client.data.write().await; data.insert::<ShardManagerContainer>(client.shard_manager.clone()); let pg_pool = database::create_pgpool().await.expect("Failed to connect database"); data.insert::<DatabasePool>(pg_pool); let map = Arc::new(Mutex::new(HashMap::<i64, VecDeque<(DiscordAccount, Submission)>>::new())); data.insert::<SubmissionQueue>(map); } let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::ctrl_c().await.expect("Failed to register SIGINT handler"); shard_manager.lock().await.shutdown_all().await; }); if let Err(why) = client.start().await { error!("Client error: {:?}", why); } }
#![warn(clippy::all)] mod commands; mod tasks; mod models; mod database; mod utils; mod data; mod event_handler; mod error; mod http; #[macro_use] extern crate log; #[macro_use] extern crate sqlx; use crate::{ utils::send_error, data::{DatabasePool, ShardManagerContainer, SubmissionQueue}, event_handler::Handler, models::{account::Account as DiscordAccount, submission::Submission} }; use commands::{ help::*, general::{invite::*, source::*}, account::{register::*, unregister::*, subscribe::*, unsubscribe::*}, settings::{start::*, stop::*}, stat::{streak::*, problem_count::*, point_sum::*}, }; use std::{collections::*, env, sync::Arc}; use serenity::{ prelude::*, http::Http, framework::{ StandardFramework, standard::{ macros::{group, hook}, DispatchError, CommandResult } }, model::{ channel::Message, }, }; use dotenv::dotenv; #[hook] async fn before(_: &Context, msg: &Message, command_name: &str) -> bool { info!("Get command `{}` by user {}({})", command_name, msg.author.name, msg.author.id); true } #[hook] async fn after(ctx: &Context, msg: &Message, command_name: &str, error: CommandResult) { if let Err(why) = &error { error!("Error while running command {}", &command_name); error!("{:?}", &error); if send_error(ctx, msg, "Error!", format!("{}", why).as_str()).await.is_err() { error!( "Unable to send messages on channel id {}", &msg.channel_id.0 ); }; } } #[hook] #[allow(clippy::useless_format)] async fn on_dispatch_error(ctx: &Context, msg: &Message, error: DispatchError) { match error { DispatchError::NotEnoughArguments { min, given } => { let description = match (min, given) { (1, 0) => format!("This command requires one argument"), (
#[group] #[commands(invite, source)] struct General; #[group] #[commands(register, unregister, subscribe, unsubscribe)] struct Account; #[group] #[commands(start, stop)] struct Settings; #[group] #[commands(streak, problem, point)] struct Ranking; #[tokio::main(flavor = "multi_thread", worker_threads = 8)] async fn main() { if let Err(e) = dotenv() { warn!("Failed to load .env: {:?}", e); } env_logger::init(); let token = env::var("DISCORD_TOKEN") .expect("Expected a token in the environment (DISCORD_TOKEN)"); let http = Http::new_with_token(&token); let owners = http.get_current_application_info() .await .map(|info| { let mut map = HashSet::new(); map.insert(info.owner.id); map }) .unwrap(); let framework = StandardFramework::new() .configure(|c| c.owners(owners).prefix("^")) .bucket("basic", |b| b.delay(1).time_span(5).limit(5)).await .bucket("account", |b| b.delay(3).time_span(10).limit(2)).await .before(before) .after(after) .on_dispatch_error(on_dispatch_error) .help(&MY_HELP) .group(&GENERAL_GROUP) .group(&SETTINGS_GROUP) .group(&ACCOUNT_GROUP) .group(&RANKING_GROUP); let mut client = Client::builder(&token) .framework(framework) .event_handler(Handler::new()) .await .expect("Failed to create client."); { let mut data = client.data.write().await; data.insert::<ShardManagerContainer>(client.shard_manager.clone()); let pg_pool = database::create_pgpool().await.expect("Failed to connect database"); data.insert::<DatabasePool>(pg_pool); let map = Arc::new(Mutex::new(HashMap::<i64, VecDeque<(DiscordAccount, Submission)>>::new())); data.insert::<SubmissionQueue>(map); } let shard_manager = client.shard_manager.clone(); tokio::spawn(async move { tokio::signal::ctrl_c().await.expect("Failed to register SIGINT handler"); shard_manager.lock().await.shutdown_all().await; }); if let Err(why) = client.start().await { error!("Client error: {:?}", why); } }
m, 0) => format!("This command requires at least {} arguments to run", m), (m, g) => format!("This command requires at least {} arguments, but you give {} arguments", m, g), }; let _ = send_error(ctx, msg, "Not Enough Arguments!", &description).await; } DispatchError::OnlyForGuilds => { let description = format!("This command does not work on DM."); let _ = send_error(ctx, msg, "Only For Guilds!", &description).await; } DispatchError::Ratelimited(dur) => { let description = format!("You cannot run this command for {} seconds.", dur.as_secs()); let _ = send_error(ctx, msg, "Rate Limited!", &description).await; } DispatchError::LackingPermissions(perms) => { let description = format!("This command requires `{}` permission(s).", perms); let _ = send_error(ctx, msg, "Permission Error!", &description).await; } _ => { error!("Unhandled dispatch error: {:?}", error); } } }
function_block-function_prefix_line
[ { "content": "pub fn unknown_error() -> CommandError {\n\n CommandError::from(\"Unknown error has occurred.\\n\\\n\n If you get this error repeatedly, please contact `[email protected]`.\")\n\n}\n", "file_path": "src/utils.rs", "rank": 0, "score": 111059.39148688925 }, { "content": "fn create_client() -> Result<reqwest::Client, reqwest::Error> {\n\n reqwest::Client::builder().gzip(true).build()\n\n}\n\n\n\npub async fn get_user_submissions(user_name: &str) -> Result<Vec<RawSubmission>, AppError> {\n\n let url = format!(\"{}/atcoder-api/results?user={}\", API_ENDPOINT, user_name);\n\n debug!(\"GET: {}\", url);\n\n let client = create_client()?;\n\n let resp = client.get(&url)\n\n .send()\n\n .await?\n\n .json::<Vec<RawSubmission>>()\n\n .await?;\n\n Ok(resp)\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 1, "score": 84800.91641350523 }, { "content": "type ExecuteResult = Result<sqlx::postgres::PgDone, AppError>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct UserStat {\n\n pub atcoder_id: String,\n\n pub streak: i32,\n\n pub problem_count: i32,\n\n pub point_sum: f64,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct StreakData {\n\n pub user_id: String,\n\n pub streak: i32,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct ProblemCountData {\n\n pub user_id: String,\n\n pub problem_count: i32,\n", "file_path": "src/models/user_stat.rs", "rank": 2, "score": 81760.83008849576 }, { "content": "type Error = Box<dyn std::error::Error + Send + Sync>;\n\n\n\npub async fn create_pgpool() -> Result<PgPool, Error> {\n\n let pg_url = env::var(\"DATABASE_URL\")\n\n .expect(\"Expected a database url in the environment (DATABASE_URL)\");\n\n let pool = PgPoolOptions::new()\n\n .max_connections(20)\n\n .connect(&pg_url)\n\n .compat()\n\n .await\n\n .unwrap();\n\n Ok(pool)\n\n}", "file_path": "src/database.rs", "rank": 3, "score": 80777.63791862366 }, { "content": "type ExecuteResult = Result<sqlx::postgres::PgDone, AppError>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Guild {\n\n pub id: i64,\n\n pub channel_id: Option<i64>,\n\n}\n\n\n\nimpl Guild {\n\n pub async fn get(pool: &sqlx::PgPool, id: i64) -> Result<Self, sqlx::Error> {\n\n let guild = query_as!(\n\n Self, \"SELECT * FROM guild WHERE id = $1\", id\n\n )\n\n .fetch_one(pool)\n\n .compat()\n\n .await?;\n\n Ok(guild)\n\n }\n\n\n\n pub async fn create(pool: &sqlx::PgPool, id: i64, channel_id: Option<i64>) -> ExecuteResult {\n", "file_path": "src/models/guild.rs", "rank": 4, "score": 65922.62045576499 }, { "content": "type ExecuteResult = Result<sqlx::postgres::PgDone, AppError>;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Account {\n\n pub id: i64,\n\n pub atcoder_id: String,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct GuildAccounts {\n\n pub id: i32,\n\n pub guild_id: i64,\n\n pub account_id: i64,\n\n}\n\n\n\nimpl Account {\n\n pub async fn list(pool: &sqlx::PgPool) -> Result<Vec<Self>, AppError> {\n\n let accounts = query_as!(\n\n Self, \"SELECT * FROM account;\"\n\n )\n", "file_path": "src/models/account.rs", "rank": 5, "score": 65922.62045576499 }, { "content": "pub mod ac_fetcher;\n\npub mod submitter;\n\npub mod stat_updater;", "file_path": "src/tasks/mod.rs", "rank": 6, "score": 54310.33498028523 }, { "content": "pub mod guild;\n\npub mod account;\n\npub mod submission;\n\npub mod user_stat;\n", "file_path": "src/models/mod.rs", "rank": 7, "score": 53677.050829238506 }, { "content": "use std::collections::HashSet;\n\n\n\nuse serenity::{\n\n prelude::*,\n\n framework::standard::{\n\n Args, CommandResult, CommandGroup,\n\n HelpOptions, help_commands,\n\n macros::help,\n\n },\n\n model::{\n\n channel::Message,\n\n id::UserId,\n\n },\n\n utils::Colour\n\n};\n\n\n\n\n\n#[help]\n\n#[individual_command_tip(r\"If you want more information about a specific command, just pass the command as argument.\n\n\n", "file_path": "src/commands/help.rs", "rank": 8, "score": 53049.89336210161 }, { "content": "AtCoder Problems API is used internally, and notifications are sent 2-5 minutes later, depending on how often the API is updated.\n\n\")]\n\n#[command_not_found_text = \"Could not find: `{}`.\"]\n\n#[strikethrough_commands_tip_in_dm = \"~~`Strikethrough commands`~~ are unavailabe because the bot is unable to run them.\"]\n\n#[strikethrough_commands_tip_in_guild = \"~~`Strikethrough commands`~~ are unavailabe because the bot is unable to run them.\"]\n\n#[max_levenshtein_distance(3)]\n\n#[lacking_permissions = \"Nothing\"]\n\n#[wrong_channel = \"Strike\"]\n\npub async fn my_help(\n\n context: &Context,\n\n msg: &Message,\n\n args: Args,\n\n help_options: &'static HelpOptions,\n\n groups: &[&'static CommandGroup],\n\n owners: HashSet<UserId>\n\n) -> CommandResult {\n\n let mut ho = help_options.clone();\n\n ho.embed_error_colour = Colour::from_rgb(255, 30, 30);\n\n ho.embed_success_colour = Colour::from_rgb(141, 91, 255);\n\n let _ = help_commands::with_embeds(context, msg, args, &ho, groups, owners).await;\n\n Ok(())\n\n}\n", "file_path": "src/commands/help.rs", "rank": 9, "score": 53040.29549666182 }, { "content": "This bot will send your AtCoder submissions to the Discord channel.\n\n\n\n日本語のヘルプは[GitHubのREADME](https://github.com/Shirataki2/atcoder-discord-bot/blob/main/README.md)をご参照ください.\n\n\n\n**Usage**\n\n\n\n__1. Install this Bot __\n\n\n\nYou can install it [here](https://discord.com/api/oauth2/authorize?client_id=801783771526856704&permissions=126016&scope=bot).\n\n\n\n__2. Run `^start` command on the channel you want to receive notifications from __\n\n\n\nTo prevent abuse, only users with `message management` permission can execute this command.\n\n\n\n__3. Run `^register <your_atcoder_id>` to link your AtCoder ID to your Discord user data__\n\n\n\nAt this step, no AC will be sent.\n\n\n\n__4. Run `^subscribe` to receive AC information__\n\n\n", "file_path": "src/commands/help.rs", "rank": 10, "score": 53034.647123239236 }, { "content": "pub mod general;\n\npub mod account;\n\npub mod settings;\n\npub mod help;\n\npub mod stat;", "file_path": "src/commands/mod.rs", "rank": 11, "score": 52842.232335596746 }, { "content": "}\n\n\n\n#[derive(Debug, Deserialize, Clone)]\n\npub struct PointSumData {\n\n pub user_id: String,\n\n pub point_sum: f64,\n\n}\n\n\n\nimpl UserStat {\n\n pub async fn get(pool: &sqlx::PgPool, atcoder_id: &str) -> Result<Self, AppError> {\n\n let account = query_as!(\n\n Self, \"SELECT * FROM user_stat WHERE atcoder_id = $1\", atcoder_id\n\n )\n\n .fetch_one(pool)\n\n .compat()\n\n .await?;\n\n Ok(account)\n\n }\n\n\n\n pub async fn create(pool: &sqlx::PgPool, atcoder_id: &str) -> ExecuteResult {\n", "file_path": "src/models/user_stat.rs", "rank": 12, "score": 50966.4698722035 }, { "content": " query!(\n\n \"INSERT INTO user_stat(atcoder_id) VALUES ($1)\", atcoder_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn set_streak(pool: &sqlx::PgPool, atcoder_id: &str, streak: &i32) -> ExecuteResult {\n\n query!(\n\n \"UPDATE user_stat SET streak = $1 WHERE atcoder_id = $2\", streak, atcoder_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn set_problem_count(pool: &sqlx::PgPool, atcoder_id: &str, problem_count: &i32) -> ExecuteResult {\n", "file_path": "src/models/user_stat.rs", "rank": 13, "score": 50962.8724980228 }, { "content": " query!(\n\n \"UPDATE user_stat SET problem_count = $1 WHERE atcoder_id = $2\", problem_count, atcoder_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn set_point_sum(pool: &sqlx::PgPool, atcoder_id: &str, point_sum: &f64) -> ExecuteResult {\n\n query!(\n\n \"UPDATE user_stat SET point_sum = $1 WHERE atcoder_id = $2\", point_sum, atcoder_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n}\n", "file_path": "src/models/user_stat.rs", "rank": 14, "score": 50961.12102536159 }, { "content": "use tokio_compat_02::FutureExt;\n\nuse serde::Deserialize;\n\nuse crate::error::AppError;\n\n\n", "file_path": "src/models/user_stat.rs", "rank": 15, "score": 50957.74134290546 }, { "content": "use serenity::prelude::*;\n\nuse serenity::model::prelude::*;\n\nuse serenity::framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n};\n\n\n\n#[command]\n\n#[description(\n\n \"Get link of source code\"\n\n)]\n\n#[usage(\"\")]\n\npub async fn source(ctx: &Context, msg: &Message) -> CommandResult {\n\n msg.channel_id.say(&ctx.http, \"https://github.com/Shirataki2/atcoder-discord-bot\").await?;\n\n Ok(())\n\n}\n", "file_path": "src/commands/general/source.rs", "rank": 16, "score": 50455.02564297294 }, { "content": "use serenity::{\n\n model::prelude::*,\n\n prelude::*,\n\n framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n }\n\n};\n\n\n\nuse crate::{\n\n models::account::Account,\n\n data::DatabasePool,\n\n utils::unknown_error,\n\n};\n\n\n\n#[command]\n\n#[only_in(\"guild\")]\n\n#[aliases(\"ur\")]\n\n#[description(\n\n \"Delete your account of this bot.\"\n", "file_path": "src/commands/account/unregister.rs", "rank": 17, "score": 50401.99678599061 }, { "content": ")]\n\n#[example(\"\")]\n\n#[bucket(\"account\")]\n\npub async fn unregister(ctx: &Context, msg: &Message) -> CommandResult {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n if let Ok(account) = Account::get(&pool, msg.author.id.0 as i64).await {\n\n \n\n if let Err(e) = Account::delete(&pool, msg).await {\n\n error!(\"Failed to delete account: {:?}\", e);\n\n return Err(unknown_error())\n\n }\n\n info!(\"Delete Account: {}\", &account.atcoder_id);\n\n let _ = msg.reply(ctx, \"Delete your registration!\").await;\n\n \n\n } else {\n\n\n\n let _ = msg.reply(ctx, \"You have not registered yet!\").await;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/account/unregister.rs", "rank": 18, "score": 50389.427299684205 }, { "content": "use serenity::{\n\n model::prelude::*,\n\n prelude::*,\n\n framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n }\n\n};\n\n\n\nuse crate::{\n\n models::guild::Guild,\n\n data::DatabasePool,\n\n utils::send_error,\n\n};\n\n\n\n#[command]\n\n#[only_in(\"guild\")]\n\n#[required_permissions(\"MANAGE_MESSAGES\")]\n\n#[description(\n\n \"Stop sending notifications.\"\n", "file_path": "src/commands/settings/stop.rs", "rank": 19, "score": 50367.93317854264 }, { "content": "use serenity::{\n\n model::prelude::*,\n\n prelude::*,\n\n framework::standard::{\n\n CommandResult,\n\n CommandError,\n\n macros::command,\n\n }\n\n};\n\n\n\nuse crate::{\n\n models::account::Account,\n\n data::DatabasePool,\n\n utils::*\n\n};\n\n\n\n#[command]\n\n#[aliases(\"s\")]\n\n#[only_in(\"guild\")]\n\n#[description(\n", "file_path": "src/commands/account/subscribe.rs", "rank": 20, "score": 50359.79154314297 }, { "content": "use serenity::{\n\n model::prelude::*,\n\n prelude::*,\n\n framework::standard::{\n\n CommandResult,\n\n CommandError,\n\n macros::command,\n\n }\n\n};\n\n\n\nuse crate::{\n\n models::account::Account,\n\n data::DatabasePool,\n\n utils::*\n\n};\n\n\n\n#[command]\n\n#[aliases(\"u\")]\n\n#[only_in(\"guild\")]\n\n#[description(\n", "file_path": "src/commands/account/unsubscribe.rs", "rank": 21, "score": 50359.79154314297 }, { "content": ")]\n\n#[usage(\"[channel_name]\")]\n\n#[example(\"\")]\n\n#[example(\"#general\")]\n\n#[bucket(\"account\")]\n\npub async fn stop(ctx: &Context, msg: &Message) -> CommandResult {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n let guild_id = msg.guild_id.unwrap().0 as i64;\n\n match Guild::get(&pool, guild_id).await {\n\n Ok(_) => {\n\n if let Err(err) = Guild::unset_channel(&pool, guild_id).await {\n\n error!(\"Failed to change channel: {:?}\", err);\n\n let _ = send_error(ctx, msg, \"Internal Error!\", REGISTRATION_ERROR).await;\n\n return Ok(())\n\n }\n\n let description = \"Notification successfully stopped!\";\n\n let _ = msg.reply(&ctx, description).await;\n", "file_path": "src/commands/settings/stop.rs", "rank": 22, "score": 50354.70256480928 }, { "content": " \"Disable notifications in this server.\"\n\n)]\n\n#[usage(\"\")]\n\n#[bucket(\"account\")]\n\npub async fn unsubscribe(ctx: &Context, msg: &Message) -> CommandResult {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n if Account::get(&pool, msg.author.id.0 as i64).await.is_ok() {\n\n match Account::is_subscribed(&pool, msg).await {\n\n Ok(false) => {\n\n let _ = msg.reply(&ctx, \"You have not subscribed yet!\").await;\n\n },\n\n Ok(true) => {\n\n if let Err(e) = Account::unsubscribe(&pool, msg).await {\n\n error!(\"Database Error: {:?}\", e);\n\n return Err(unknown_error()); \n\n }\n\n let _ = msg.reply(&ctx, \"Successfully unsubscribed!\").await;\n", "file_path": "src/commands/account/unsubscribe.rs", "rank": 23, "score": 50352.27626224153 }, { "content": " \"Enable notifications in this server when you get an AC from AtCoder.\"\n\n)]\n\n#[usage(\"\")]\n\n#[bucket(\"account\")]\n\npub async fn subscribe(ctx: &Context, msg: &Message) -> CommandResult {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n if Account::get(&pool, msg.author.id.0 as i64).await.is_ok() {\n\n match Account::is_subscribed(&pool, msg).await {\n\n Ok(true) => {\n\n let _ = msg.reply(&ctx, \"You are already subscribed!\").await;\n\n },\n\n Ok(false) => {\n\n if let Err(e) = Account::subscribe(&pool, msg).await {\n\n error!(\"Database Error: {:?}\", e);\n\n return Err(unknown_error()); \n\n }\n\n let _ = msg.reply(&ctx, \"Successfully subscribed!\").await;\n", "file_path": "src/commands/account/subscribe.rs", "rank": 24, "score": 50349.91505530475 }, { "content": " },\n\n Err(e) => {\n\n error!(\"Database Error: {:?}\", e);\n\n return Err(unknown_error()); \n\n }\n\n }\n\n \n\n Ok(())\n\n } else {\n\n Err(CommandError::from(\"You have not run registration yet!\\n\\n\\\n\n First, please enter your AtCoder ID with the `register` command.\"))\n\n }\n\n}\n", "file_path": "src/commands/account/subscribe.rs", "rank": 25, "score": 50338.57695256344 }, { "content": " },\n\n Err(e) => {\n\n error!(\"Database Error: {:?}\", e);\n\n return Err(unknown_error()); \n\n }\n\n }\n\n \n\n Ok(())\n\n } else {\n\n Err(CommandError::from(\"You have not run registration yet!\\n\\n\\\n\n First, please enter your AtCoder ID with the `register` command.\"))\n\n }\n\n}\n", "file_path": "src/commands/account/unsubscribe.rs", "rank": 26, "score": 50338.57695256344 }, { "content": " }\n\n Err(err) => {\n\n error!(\"Failed to get guild: {:?}\", err);\n\n let _ = send_error(ctx, msg, \"Internal Error!\", REGISTRATION_ERROR).await;\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n\nconst REGISTRATION_ERROR: &str = \"Your registration to the database has not been completed because the internal process did not finish successfully.\\\n\nPlease report the details of the error to `[email protected]`.\";\n", "file_path": "src/commands/settings/stop.rs", "rank": 27, "score": 50337.151474202736 }, { "content": "pub mod register;\n\npub mod unregister;\n\npub mod subscribe;\n\npub mod unsubscribe;", "file_path": "src/commands/account/mod.rs", "rank": 28, "score": 50168.68596161542 }, { "content": "pub mod adder;\n\npub mod source;\n\npub mod invite;", "file_path": "src/commands/general/mod.rs", "rank": 29, "score": 50162.63000450239 }, { "content": "pub mod start;\n\npub mod stop;\n", "file_path": "src/commands/settings/mod.rs", "rank": 30, "score": 50162.6120343715 }, { "content": "pub mod streak;\n\npub mod point_sum;\n\npub mod problem_count;", "file_path": "src/commands/stat/mod.rs", "rank": 31, "score": 50159.390908065165 }, { "content": "-- Add migration script here\n\nCREATE TABLE user_stat\n\n(\n\n atcoder_id TEXT NOT NULL,\n\n streak INT DEFAULT 0 NOT NULL,\n\n problem_count INT DEFAULT 0 NOT NULL,\n\n point_sum DOUBLE PRECISION DEFAULT 0.0 NOT NULL,\n\n CONSTRAINT user_stat_pk PRIMARY KEY (atcoder_id)\n\n);\n", "file_path": "migrations/20210224165429_user_stat.sql", "rank": 32, "score": 31579.673951262565 }, { "content": "use crate::models::{account::Account, submission::Submission};\n\nuse std::sync::Arc;\n\nuse std::collections::{HashMap, VecDeque};\n\nuse serenity::{\n\n prelude::{TypeMapKey, Mutex},\n\n client::bridge::gateway::ShardManager,\n\n};\n\n\n\npub struct ShardManagerContainer;\n\n\n\nimpl TypeMapKey for ShardManagerContainer {\n\n type Value = Arc<Mutex<ShardManager>>;\n\n}\n\n\n\npub struct DatabasePool;\n\n\n\nimpl TypeMapKey for DatabasePool {\n\n type Value = sqlx::PgPool;\n\n}\n\n\n\npub struct SubmissionQueue;\n\n\n\nimpl TypeMapKey for SubmissionQueue {\n\n type Value = Arc<Mutex<HashMap<i64, VecDeque<(Account, Submission)>>>>;\n\n}\n", "file_path": "src/data.rs", "rank": 33, "score": 29060.514807667463 }, { "content": "use std::collections::HashMap;\n\nuse serde::Deserialize;\n\nuse crate::{error::AppError, models::{submission::RawSubmission, user_stat::{StreakData, ProblemCountData, PointSumData}}};\n\n\n\nconst API_ENDPOINT: &str = \"https://kenkoooo.com/atcoder\";\n\n\n", "file_path": "src/http.rs", "rank": 34, "score": 28776.870467573528 }, { "content": " Ok(resp.get(problem_id).cloned())\n\n}\n\n\n\npub async fn get_contest_name(contest_id: &str) -> Result<Option<String>, AppError> {\n\n let url = format!(\"{}/resources/contests.json\", API_ENDPOINT);\n\n debug!(\"GET: {}\", url);\n\n let client = create_client()?;\n\n let resp = client.get(&url)\n\n .send()\n\n .await?\n\n .json::<Vec<Contest>>()\n\n .await?\n\n .iter()\n\n .map(|c| (c.id.clone(), c.title.clone()))\n\n .collect::<HashMap<String, String>>();\n\n Ok(resp.get(contest_id).cloned())\n\n}\n\n\n\npub async fn get_streak() -> Result<Vec<StreakData>, AppError> {\n\n let url = format!(\"{}/resources/streaks.json\", API_ENDPOINT);\n", "file_path": "src/http.rs", "rank": 35, "score": 28770.131961697967 }, { "content": "}\n\n\n\npub async fn get_point_sum() -> Result<Vec<PointSumData>, AppError> {\n\n let url = format!(\"{}/resources/sums.json\", API_ENDPOINT);\n\n debug!(\"GET: {}\", url);\n\n let client = create_client()?;\n\n let resp = client.get(&url)\n\n .send()\n\n .await?\n\n .json::<Vec<PointSumData>>()\n\n .await?;\n\n Ok(resp)\n\n}\n", "file_path": "src/http.rs", "rank": 36, "score": 28767.35746970851 }, { "content": " debug!(\"GET: {}\", url);\n\n let client = create_client()?;\n\n let resp = client.get(&url)\n\n .send()\n\n .await?\n\n .json::<Vec<StreakData>>()\n\n .await?;\n\n Ok(resp)\n\n}\n\n\n\npub async fn get_problem_count() -> Result<Vec<ProblemCountData>, AppError> {\n\n let url = format!(\"{}/resources/ac.json\", API_ENDPOINT);\n\n debug!(\"GET: {}\", url);\n\n let client = create_client()?;\n\n let resp = client.get(&url)\n\n .send()\n\n .await?\n\n .json::<Vec<ProblemCountData>>()\n\n .await?;\n\n Ok(resp)\n", "file_path": "src/http.rs", "rank": 37, "score": 28767.275338204316 }, { "content": "#[derive(Debug)]\n\npub enum AppError {\n\n Sqlx(sqlx::Error),\n\n Reqwest(reqwest::Error),\n\n Io(std::io::Error),\n\n}\n\n\n\npub(crate) fn custom_error(message: &str) -> AppError {\n\n let err = std::io::Error::new(std::io::ErrorKind::Other, message);\n\n AppError::Io(err)\n\n}\n\n\n\nimpl std::fmt::Display for AppError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n AppError::Sqlx(ref e) => e.fmt(f),\n\n AppError::Reqwest(ref e) => e.fmt(f),\n\n AppError::Io(ref e) => e.fmt(f),\n\n }\n\n }\n", "file_path": "src/error.rs", "rank": 38, "score": 28675.740092750915 }, { "content": "}\n\n\n\nimpl std::error::Error for AppError {\n\n fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {\n\n match *self {\n\n AppError::Sqlx(ref e) => Some(e),\n\n AppError::Reqwest(ref e) => Some(e),\n\n AppError::Io(ref e) => Some(e),\n\n }\n\n }\n\n}\n\n\n\nimpl From<sqlx::Error> for AppError {\n\n fn from(e: sqlx::Error) -> AppError {\n\n Self::Sqlx(e)\n\n }\n\n}\n\n\n\nimpl From<reqwest::Error> for AppError {\n\n fn from(e: reqwest::Error) -> AppError {\n", "file_path": "src/error.rs", "rank": 39, "score": 28673.077099708917 }, { "content": " Self::Reqwest(e)\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for AppError {\n\n fn from(e: std::io::Error) -> AppError {\n\n Self::Io(e)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 40, "score": 28665.263803565766 }, { "content": "use serenity::{\n\n prelude::Context, model::channel::Message,\n\n utils::Colour,\n\n framework::standard::CommandError\n\n};\n\nuse std::collections::VecDeque;\n\nuse chrono::prelude::*;\n\nuse crate::{\n\n data::{DatabasePool, SubmissionQueue},\n\n http::{get_problem_name, get_contest_name},\n\n models::{account::Account, submission::Submission, guild::Guild},\n\n error::AppError\n\n};\n\n\n\npub async fn send_error(ctx: &Context, msg: &Message, title: &str, description: &str) -> Result<Message, serenity::Error> {\n\n msg.channel_id.send_message(ctx, |m| {\n\n m\n\n .reference_message(msg)\n\n .allowed_mentions(|f| f.replied_user(false))\n\n .embed(|e| {\n", "file_path": "src/utils.rs", "rank": 41, "score": 28459.139806144125 }, { "content": " e\n\n .title(title)\n\n .description(description)\n\n .colour(Colour::from_rgb(255, 50, 50))\n\n .footer(|f| f.text(\"To show usage, type \\\"^help <command>\\\"\"))\n\n .timestamp(&Utc::now())\n\n })\n\n }).await\n\n}\n\n\n\n#[allow(clippy::eval_order_dependence)]\n\n#[allow(dead_code)]\n\npub async fn send_accepted_notification(ctx: &Context, account: &Account, submission: &Submission) -> Result<(), serenity::Error> {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n let guilds = match Account::list_guilds(&pool, account.id).await {\n\n Ok(guilds) => guilds,\n\n Err(e) => {\n", "file_path": "src/utils.rs", "rank": 42, "score": 28434.124240331774 }, { "content": " Some(channel_id) => channel_id as u64,\n\n None => {\n\n info!(\"Unsubscribe guild: {}\", guild_id);\n\n return Ok(())\n\n }\n\n },\n\n Err(e) => {\n\n error!(\"Failed to get channel: {:?}\", e);\n\n return Ok(());\n\n }\n\n };\n\n let channel = match ctx.http.get_channel(channel).await {\n\n Ok(channel) => channel,\n\n Err(e) => {\n\n error!(\"Failed to get channel: {:?}\", e);\n\n return Ok(());\n\n }\n\n };\n\n let avatar = match ctx.http.get_user(account.id as u64).await {\n\n Ok(user) => user\n", "file_path": "src/utils.rs", "rank": 43, "score": 28433.954197603525 }, { "content": " };\n\n\n\n let channel = match Guild::get(&pool, guild_id).await {\n\n Ok(data) => match data.channel_id {\n\n Some(channel_id) => channel_id as u64,\n\n None => {\n\n info!(\"Unsubscribe guild: {}\", guild_id);\n\n return Ok(())\n\n }\n\n },\n\n Err(e) => {\n\n error!(\"Failed to get channel: {:?}\", e);\n\n return Ok(());\n\n }\n\n };\n\n let channel = match ctx.http.get_channel(channel).await {\n\n Ok(channel) => channel,\n\n Err(e) => {\n\n error!(\"Failed to get channel: {:?}\", e);\n\n return Ok(());\n", "file_path": "src/utils.rs", "rank": 44, "score": 28433.214159814892 }, { "content": " dequeue_map\n\n .lock()\n\n .await\n\n .entry(guild_id)\n\n .or_insert(VecDeque::new())\n\n .push_back((account.clone(), submission.clone()));\n\n }\n\n \n\n Ok(())\n\n}\n\n\n\n#[allow(clippy::eval_order_dependence)]\n\npub async fn send_accepted_single(ctx: &Context, guild_id: i64, account: &Account, submission: &Submission) -> Result<(), serenity::Error> {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n\n\n let channel = match Guild::get(&pool, guild_id).await {\n\n Ok(data) => match data.channel_id {\n", "file_path": "src/utils.rs", "rank": 45, "score": 28432.96704989885 }, { "content": " error!(\"Failed to get guilds: {:?}\", e);\n\n return Ok(());\n\n }\n\n };\n\n\n\n for guild in guilds.iter() {\n\n let channel = match Guild::get(&pool, guild.guild_id).await {\n\n Ok(data) => match data.channel_id {\n\n Some(channel_id) => channel_id as u64,\n\n None => continue\n\n },\n\n Err(e) => {\n\n error!(\"Failed to get channel: {:?}\", e);\n\n return Ok(());\n\n }\n\n };\n\n let channel = match ctx.http.get_channel(channel).await {\n\n Ok(channel) => channel,\n\n Err(e) => {\n\n error!(\"Failed to get channel: {:?}\", e);\n", "file_path": "src/utils.rs", "rank": 46, "score": 28431.152938916537 }, { "content": " .field(\"Code Length\", format!(\"{} Bytes\", submission.length), true)\n\n .field(\"Link\", format!(\"[View](https://atcoder.jp/contests/{}/submissions/{})\", submission.contest_id, submission.id), true)\n\n .colour(Colour::from_rgb(0, 255, 55))\n\n .footer(|f| f.text(\"Submission Time\"))\n\n .thumbnail(&avatar)\n\n .timestamp(&Utc.from_local_datetime(&submission.epoch_second).unwrap())\n\n })\n\n }).await {\n\n error!(\"Failed to send notification: {:?}\", e);\n\n };\n\n\n\n Ok(())\n\n}\n\n\n\n\n\n#[allow(clippy::eval_order_dependence)]\n\npub async fn send_accepted_multiple(ctx: &Context, guild_id: i64, accounts_submissions: Vec<(Account, Submission)>) -> Result<(), serenity::Error> {\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n", "file_path": "src/utils.rs", "rank": 47, "score": 28429.91169693329 }, { "content": " return Ok(());\n\n }\n\n };\n\n let avatar = match ctx.http.get_user(account.id as u64).await {\n\n Ok(user) => user\n\n .avatar\n\n .map(|a| format!(\"https://cdn.discordapp.com/avatars/{}/{}.png\", account.id, a))\n\n .unwrap_or_else(|| \"https://img.atcoder.jp/assets/atcoder.png\".to_string()),\n\n Err(_) => \"https://img.atcoder.jp/assets/atcoder.png\".to_string(),\n\n };\n\n info!(\"Avatar URL: {}\", avatar);\n\n info!(\"Send new Accepted Submission!: {:?}\", submission);\n\n let description = match (get_contest_name(&submission.contest_id).await, get_problem_name(&submission.problem_id).await) {\n\n (Ok(Some(contest)), Ok(Some(problem))) => format!(\"<@{}> get new AC!\\n\\n Problem:\\n**{} - {}**\", account.id, contest, problem),\n\n _ => format!(\"<@{}> get new AC!\", account.id)\n\n };\n\n if let Err(e) = channel.id().send_message(ctx, |m| {\n\n m\n\n .embed(|e| {\n\n e\n", "file_path": "src/utils.rs", "rank": 48, "score": 28429.08344097976 }, { "content": " .avatar\n\n .map(|a| format!(\"https://cdn.discordapp.com/avatars/{}/{}.png\", account.id, a))\n\n .unwrap_or_else(|| \"https://img.atcoder.jp/assets/atcoder.png\".to_string()),\n\n Err(_) => \"https://img.atcoder.jp/assets/atcoder.png\".to_string(),\n\n };\n\n info!(\"Avatar URL: {}\", avatar);\n\n info!(\"Send new Accepted Submission!: {:?}\", submission);\n\n let description = match (get_contest_name(&submission.contest_id).await, get_problem_name(&submission.problem_id).await) {\n\n (Ok(Some(contest)), Ok(Some(problem))) => format!(\"<@{}> get new AC!\\n\\n Problem:\\n**{} - {}**\", account.id, contest, problem),\n\n _ => format!(\"<@{}> get new AC!\", account.id)\n\n };\n\n if let Err(e) = channel.id().send_message(ctx, |m| {\n\n m\n\n .embed(|e| {\n\n e\n\n .title(\"Accepted!!\")\n\n .description(description)\n\n .field(\"Point\", format!(\"{}\", submission.point), true)\n\n .field(\"Lang\", &submission.language, true)\n\n .field(\"Execution Time\", format!(\"{} ms\", submission.execution_time), true)\n", "file_path": "src/utils.rs", "rank": 49, "score": 28427.55150026802 }, { "content": "pub async fn insert_submission(ctx: &Context, account: &Account, submission: &Submission) -> Result<(), AppError> {\n\n let data = ctx.data.read().await;\n\n let dequeue_map = data.get::<SubmissionQueue>().unwrap();\n\n\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n\n\n let guilds = match Account::list_guilds(&pool, account.id).await {\n\n Ok(guilds) => guilds,\n\n Err(e) => {\n\n error!(\"Failed to get guilds: {:?}\", e);\n\n return Ok(());\n\n }\n\n };\n\n\n\n info!(\"Recieve new Accepted Submission!: {:?}\", submission);\n\n for guild in guilds.iter() {\n\n let guild_id = guild.guild_id;\n", "file_path": "src/utils.rs", "rank": 50, "score": 28426.724881334543 }, { "content": " }\n\n };\n\n info!(\"Send new Accepted Submission!: {:?}\", accounts_submissions);\n\n\n\n let mut problem_names = vec![];\n\n for (_account, submission) in accounts_submissions.iter() {\n\n let description = match (get_contest_name(&submission.contest_id).await, get_problem_name(&submission.problem_id).await) {\n\n (Ok(Some(contest)), Ok(Some(problem))) => format!(\"{} - {}\", contest, problem),\n\n _ => format!(\"Unknown Problem\")\n\n };\n\n problem_names.push(description);\n\n }\n\n\n\n if let Err(e) = channel.id().send_message(ctx, |m| {\n\n m\n\n .embed(|e| {\n\n let embed = e\n\n .title(\"Accepted!!\")\n\n .colour(Colour::from_rgb(0, 255, 55));\n\n for ((account, submission), problem) in accounts_submissions.iter().zip(problem_names.iter()) {\n", "file_path": "src/utils.rs", "rank": 51, "score": 28426.169685793448 }, { "content": " .title(\"Accepted!!\")\n\n .description(description)\n\n .field(\"Point\", format!(\"{}\", submission.point), true)\n\n .field(\"Lang\", &submission.language, true)\n\n .field(\"Execution Time\", format!(\"{} ms\", submission.execution_time), true)\n\n .field(\"Code Length\", format!(\"{} Bytes\", submission.length), true)\n\n .field(\"Link\", format!(\"[View](https://atcoder.jp/contests/{}/submissions/{})\", submission.contest_id, submission.id), true)\n\n .colour(Colour::from_rgb(0, 255, 55))\n\n .footer(|f| f.text(\"Submission Time\"))\n\n .thumbnail(&avatar)\n\n .timestamp(&Utc.from_local_datetime(&submission.epoch_second).unwrap())\n\n })\n\n }).await {\n\n error!(\"Failed to send notification: {:?}\", e);\n\n };\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 52, "score": 28424.96721003697 }, { "content": " let author = format!(\"<@{}>\", account.id);\n\n let point = format!(\"{} pts.\", submission.point);\n\n let execution_time = format!(\"{} ms\", submission.execution_time);\n\n let lang = format!(\"{}\", submission.language);\n\n let url = format!(\"[View](https://atcoder.jp/contests/{}/submissions/{})\", submission.contest_id, submission.id);\n\n let value = vec![author, point, execution_time, lang, url].join(\" | \");\n\n embed.field(problem, value, false);\n\n }\n\n embed\n\n })\n\n }).await {\n\n error!(\"Failed to send message: {:?}\", e);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n\n", "file_path": "src/utils.rs", "rank": 53, "score": 28421.895625324047 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Contest {\n\n id: String,\n\n start_epoch_second: i64,\n\n duration_second: i64,\n\n title: String,\n\n rate_change: String,\n\n}\n\n\n\npub async fn get_problem_name(problem_id: &str) -> Result<Option<String>, AppError> {\n\n let url = format!(\"{}/resources/problems.json\", API_ENDPOINT);\n\n debug!(\"GET: {}\", url);\n\n let client = create_client()?;\n\n let resp = client.get(&url)\n\n .send()\n\n .await?\n\n .json::<Vec<Problem>>()\n\n .await?\n\n .iter()\n\n .map(|c| (c.id.clone(), c.title.clone()))\n\n .collect::<HashMap<String, String>>();\n", "file_path": "src/http.rs", "rank": 54, "score": 27222.030992020507 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Problem {\n\n id: String,\n\n contest_id: String,\n\n title: String,\n\n}\n\n\n", "file_path": "src/http.rs", "rank": 55, "score": 27222.030992020507 }, { "content": "-- Add migration script here\n", "file_path": "migrations/20210224165429_user_stat.sql", "rank": 56, "score": 27222.030992020507 }, { "content": "use std::{\n\n sync::Arc,\n\n time::Duration,\n\n cmp::min,\n\n};\n\nuse serenity::{\n\n prelude::*,\n\n};\n\nuse crate::{\n\n data::SubmissionQueue,\n\n utils::{send_accepted_single, send_accepted_multiple},\n\n};\n\n\n\n\n\npub async fn submit_task(ctx: Arc<Context>) {\n\n let ctx = Arc::clone(&ctx);\n\n tokio::spawn(async move {\n\n loop {\n\n debug!(\"Submit Loop Start\");\n\n let data = ctx.data.read().await;\n", "file_path": "src/tasks/submitter.rs", "rank": 57, "score": 27109.826500778196 }, { "content": " let dequeue_map = data.get::<SubmissionQueue>().unwrap();\n\n \n\n for (guild_id, queue) in dequeue_map.lock().await.iter_mut() {\n\n let que_size = queue.len();\n\n match que_size {\n\n 0 => continue,\n\n 1 => {\n\n let (account, submission) = queue.pop_front().unwrap();\n\n if let Err(e) = send_accepted_single(&ctx, *guild_id, &account, &submission).await {\n\n error!(\"Failed to send accepted notification: {:?}\", e);\n\n }\n\n },\n\n _ => {\n\n let drain_size = min(que_size, 8);\n\n let accounts_submissions = queue.drain(..drain_size).collect::<Vec<_>>();\n\n if let Err(e) = send_accepted_multiple(&ctx, *guild_id, accounts_submissions).await {\n\n error!(\"Failed to send accepted notification: {:?}\", e);\n\n }\n\n },\n\n }\n\n }\n\n tokio::time::sleep(Duration::from_secs(180)).await;\n\n }\n\n });\n\n}", "file_path": "src/tasks/submitter.rs", "rank": 58, "score": 27096.285920983057 }, { "content": " .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn is_subscribed(pool: &sqlx::PgPool, msg: &Message) -> Result<bool, AppError> {\n\n let guild_id = match msg.guild_id {\n\n Some(guild_id) => guild_id.0 as i64,\n\n None => return Err(custom_error(\"Guild ID is None. Called in DM!\"))\n\n };\n\n let data = query_as!(\n\n GuildAccounts, \"SELECT * FROM guild_accounts WHERE guild_id = $1 AND account_id = $2\", guild_id, msg.author.id.0 as i64\n\n )\n\n .fetch_one(pool)\n\n .compat()\n\n .await;\n\n match data {\n\n Ok(_) => Ok(true),\n\n Err(sqlx::Error::RowNotFound) => Ok(false),\n\n Err(err) => return Err(err.into())\n\n }\n", "file_path": "src/models/account.rs", "rank": 59, "score": 26473.136514686514 }, { "content": " }\n\n\n\n pub async fn subscribe(pool: &sqlx::PgPool, msg: &Message) -> ExecuteResult {\n\n let guild_id = match msg.guild_id {\n\n Some(guild_id) => guild_id.0 as i64,\n\n None => return Err(custom_error(\"Guild ID is None. Called in DM!\"))\n\n };\n\n\n\n query!(\n\n \"INSERT INTO guild_accounts(guild_id, account_id) VALUES ($1, $2)\", guild_id, msg.author.id.0 as i64\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn unsubscribe(pool: &sqlx::PgPool, msg: &Message) -> ExecuteResult {\n\n let guild_id = match msg.guild_id {\n\n Some(guild_id) => guild_id.0 as i64,\n", "file_path": "src/models/account.rs", "rank": 60, "score": 26469.17861192241 }, { "content": "use tokio_compat_02::FutureExt;\n\nuse serenity::model::channel::Message;\n\nuse crate::error::{AppError, custom_error};\n\n\n", "file_path": "src/models/account.rs", "rank": 61, "score": 26468.643944445597 }, { "content": " .await?;\n\n Ok(account)\n\n }\n\n\n\n pub async fn create(pool: &sqlx::PgPool, msg: &Message, atcoder_id: &str) -> ExecuteResult {\n\n query!(\n\n \"INSERT INTO account(id, atcoder_id) VALUES ($1, $2)\", msg.author.id.0 as i64, atcoder_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn delete(pool: &sqlx::PgPool, msg: &Message) -> ExecuteResult {\n\n query!(\n\n \"DELETE FROM account WHERE id = $1\", msg.author.id.0 as i64\n\n )\n\n .execute(pool)\n\n .compat()\n", "file_path": "src/models/account.rs", "rank": 62, "score": 26465.382671591364 }, { "content": " query!(\n\n \"INSERT INTO guild(id, channel_id) VALUES ($1, $2)\", id, channel_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn remove(pool: &sqlx::PgPool, id: i64) -> ExecuteResult {\n\n query!(\n\n \"DELETE FROM guild WHERE id = $1\", id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn change_channel(pool: &sqlx::PgPool, id: i64, channel_id: i64) -> ExecuteResult {\n", "file_path": "src/models/guild.rs", "rank": 63, "score": 26462.06328852738 }, { "content": " query!(\n\n \"UPDATE guild SET channel_id = $1 WHERE id = $2\", channel_id, id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn unset_channel(pool: &sqlx::PgPool, id: i64) -> ExecuteResult {\n\n query!(\n\n \"UPDATE guild SET channel_id = NULL WHERE id = $1\", id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n}\n", "file_path": "src/models/guild.rs", "rank": 64, "score": 26461.148964982553 }, { "content": " None => return Err(custom_error(\"Guild ID is None. Called in DM!\"))\n\n };\n\n\n\n query!(\n\n \"DELETE FROM guild_accounts WHERE guild_id = $1 AND account_id = $2\", guild_id, msg.author.id.0 as i64\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n\n\n pub async fn update(pool: &sqlx::PgPool, id: i64, new_atcoder_id: &str) -> ExecuteResult {\n\n query!(\n\n \"UPDATE account SET atcoder_id = $2 WHERE id = $1\", id, new_atcoder_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await\n\n .map_err(AppError::from)\n\n }\n\n}\n", "file_path": "src/models/account.rs", "rank": 65, "score": 26460.757314382554 }, { "content": " INNER JOIN account\n\n ON account.id = guild_accounts.account_id\n\n WHERE guild_id = $1\n\n \"#, guild_id\n\n )\n\n .fetch_all(pool)\n\n .compat()\n\n .await?\n\n .iter()\n\n .map(|row| (row.atcoder_id.clone(), row.account_id))\n\n .collect::<Vec<_>>();\n\n Ok(accounts)\n\n }\n\n\n\n pub async fn get(pool: &sqlx::PgPool, id: i64) -> Result<Self, AppError> {\n\n let account = query_as!(\n\n Self, \"SELECT * FROM account WHERE id = $1\", id\n\n )\n\n .fetch_one(pool)\n\n .compat()\n", "file_path": "src/models/account.rs", "rank": 66, "score": 26458.588847564555 }, { "content": " contest_id: raw.contest_id,\n\n result: raw.result,\n\n atcoder_id: raw.user_id,\n\n language: raw.language,\n\n point: raw.point as i32,\n\n length: raw.length,\n\n execution_time: raw.execution_time.unwrap_or(0),\n\n account_id: 0,\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl Submission {\n\n pub async fn get(pool: &sqlx::PgPool, account_id: i64) -> Result<Vec<Submission>, AppError> {\n\n let submissions = query_as!(\n\n Submission,\n\n \"SELECT * FROM submission WHERE account_id = $1;\",\n\n account_id\n\n )\n", "file_path": "src/models/submission.rs", "rank": 67, "score": 26458.298407322894 }, { "content": " .fetch_all(pool)\n\n .compat()\n\n .await?;\n\n Ok(accounts)\n\n }\n\n\n\n pub async fn list_guilds(pool: &sqlx::PgPool, account_id: i64) -> Result<Vec<GuildAccounts>, AppError> {\n\n let accounts = query_as!(\n\n GuildAccounts, \"SELECT * FROM guild_accounts WHERE account_id = $1;\", account_id\n\n )\n\n .fetch_all(pool)\n\n .compat()\n\n .await?;\n\n Ok(accounts)\n\n }\n\n\n\n pub async fn list_accounts(pool: &sqlx::PgPool, guild_id: i64) -> Result<Vec<(String, i64)>, AppError> {\n\n let accounts = query!(\n\n r#\"\n\n SELECT atcoder_id, account_id FROM guild_accounts\n", "file_path": "src/models/account.rs", "rank": 68, "score": 26458.07916782413 }, { "content": "use chrono::NaiveDateTime;\n\nuse serde::Deserialize;\n\nuse crate::error::AppError;\n\nuse tokio_compat_02::FutureExt;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Submission {\n\n pub id: i32,\n\n pub epoch_second: NaiveDateTime,\n\n pub problem_id: String,\n\n pub contest_id: String,\n\n pub result: String,\n\n pub atcoder_id: String,\n\n pub language: String,\n\n pub point: i32,\n\n pub length: i32,\n\n pub execution_time: i32,\n\n pub account_id: i64,\n\n}\n\n\n", "file_path": "src/models/submission.rs", "rank": 69, "score": 26458.074769021398 }, { "content": "use tokio_compat_02::FutureExt;\n\nuse crate::error::AppError;\n\n\n", "file_path": "src/models/guild.rs", "rank": 70, "score": 26457.77653295325 }, { "content": " .fetch_all(pool)\n\n .compat()\n\n .await?;\n\n Ok(submissions)\n\n }\n\n\n\n pub async fn bulk_insert(pool: &sqlx::PgPool, account_id: i64, submissions: &[RawSubmission]) -> Result<(), AppError> {\n\n for submission in submissions.iter().cloned() {\n\n let submission = Submission::from(submission);\n\n if submission.result != \"AC\" { continue; }\n\n if let Err(err) = query!(\n\n \"INSERT INTO submission \n\n (id, epoch_second, problem_id, contest_id, result, atcoder_id, language, point, length, execution_time, account_id)\n\n VALUES \n\n ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11)\n\n ON CONFLICT DO NOTHING\",\n\n submission.id,\n\n submission.epoch_second,\n\n submission.problem_id,\n\n submission.contest_id,\n", "file_path": "src/models/submission.rs", "rank": 71, "score": 26455.960628566296 }, { "content": "#[derive(Debug, Deserialize, Clone)]\n\npub struct RawSubmission {\n\n pub id: i32,\n\n epoch_second: i64,\n\n problem_id: String,\n\n contest_id: String,\n\n pub result: String,\n\n user_id: String,\n\n language: String,\n\n point: f64,\n\n length: i32,\n\n execution_time: Option<i32>,\n\n}\n\n\n\nimpl From<RawSubmission> for Submission {\n\n fn from(raw: RawSubmission) -> Submission {\n\n Submission {\n\n id: raw.id,\n\n epoch_second: NaiveDateTime::from_timestamp(raw.epoch_second, 0),\n\n problem_id: raw.problem_id,\n", "file_path": "src/models/submission.rs", "rank": 72, "score": 26453.089498650697 }, { "content": " submission.result,\n\n submission.atcoder_id,\n\n submission.language,\n\n submission.point,\n\n submission.length,\n\n submission.execution_time,\n\n account_id\n\n )\n\n .execute(pool)\n\n .compat()\n\n .await {\n\n error!(\"Failed to insert submission: {:?}\", err);\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/models/submission.rs", "rank": 73, "score": 26452.93005159235 }, { "content": "-- Add migration script here\n\nALTER TABLE submission RENAME COLUMN content_id TO contest_id;\n", "file_path": "migrations/20210124123016_fix_submission_column_name.sql", "rank": 74, "score": 26079.4653692829 }, { "content": "use std::{\n\n sync::Arc,\n\n time::Duration,\n\n};\n\nuse serenity::{\n\n prelude::*,\n\n // model::id::ChannelId\n\n};\n\nuse crate::{\n\n models::{account::Account, submission::Submission},\n\n data::DatabasePool,\n\n http::get_user_submissions,\n\n utils::insert_submission,\n\n};\n\n\n\npub async fn ac_fetch(ctx: Arc<Context>) {\n\n let ctx = Arc::clone(&ctx);\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n", "file_path": "src/tasks/ac_fetcher.rs", "rank": 75, "score": 25738.808572290527 }, { "content": "use std::{\n\n sync::Arc,\n\n time::Duration,\n\n collections::HashMap,\n\n};\n\nuse serenity::{\n\n prelude::*,\n\n};\n\nuse crate::{data::DatabasePool, http::{get_streak, get_problem_count, get_point_sum}, models::{user_stat::UserStat, account::Account}};\n\n\n\npub async fn stat_updater(ctx: Arc<Context>) {\n\n let ctx = Arc::clone(&ctx);\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n tokio::spawn(async move {\n\n loop {\n\n debug!(\"Stat Loop Start\");\n\n let streaks = match get_streak().await {\n", "file_path": "src/tasks/stat_updater.rs", "rank": 76, "score": 25732.625942385403 }, { "content": " Ok(s) => s.iter().map(|v| (v.user_id.clone(), v.streak)).collect::<HashMap<_, _>>(),\n\n Err(e) => {\n\n error!(\"Failed to get streaks: {:?}\", e);\n\n continue\n\n }\n\n };\n\n let problem_counts = match get_problem_count().await {\n\n Ok(s) => s.iter().map(|v| (v.user_id.clone(), v.problem_count)).collect::<HashMap<_, _>>(),\n\n Err(e) => {\n\n error!(\"Failed to get problem counts: {:?}\", e);\n\n continue\n\n }\n\n };\n\n let point_sums = match get_point_sum().await {\n\n Ok(s) => s.iter().map(|v| (v.user_id.clone(), v.point_sum)).collect::<HashMap<_, _>>(),\n\n Err(e) => {\n\n error!(\"Failed to get point sum: {:?}\", e);\n\n continue\n\n }\n\n };\n", "file_path": "src/tasks/stat_updater.rs", "rank": 77, "score": 25720.99611761807 }, { "content": " let accounts = match Account::list(&pool).await {\n\n Ok(a) => a,\n\n Err(e) => {\n\n error!(\"Failed to get accounts: {:?}\", e);\n\n continue\n\n }\n\n };\n\n for account in accounts.iter() {\n\n let id = account.atcoder_id.clone();\n\n let _ = UserStat::create(&pool, &id).await;\n\n let streak = streaks.get(&id);\n\n let problem_count = problem_counts.get(&id);\n\n let point_sum = point_sums.get(&id);\n\n if let (Some(streak), Some(problem_count), Some(point_sum)) = (streak, problem_count, point_sum) {\n\n let _ = UserStat::set_streak(&pool, &id, streak).await;\n\n let _ = UserStat::set_problem_count(&pool, &id, problem_count).await;\n\n let _ = UserStat::set_point_sum(&pool, &id, point_sum).await;\n\n }\n\n }\n\n debug!(\"Stat Loop Finished\");\n\n tokio::time::sleep(Duration::from_secs(60)).await;\n\n }\n\n });\n\n}", "file_path": "src/tasks/stat_updater.rs", "rank": 78, "score": 25719.33357050888 }, { "content": " };\n\n let new_submissions = match get_user_submissions(&account.atcoder_id).await {\n\n Ok(submissions) => {submissions}\n\n Err(e) => {\n\n error!(\"Failed to fetch submissions: {:?}\", e);\n\n continue;\n\n }\n\n };\n\n if let Err(e) = Submission::bulk_insert(&pool, account.id, &new_submissions).await {\n\n error!(\"Failed to update submissions: {:?}\", e);\n\n }\n\n let mut old_submission_ids = std::collections::HashSet::new();\n\n for submission in old_submissions { old_submission_ids.insert(submission.id); }\n\n\n\n for submission in new_submissions.iter().cloned() {\n\n if submission.result != \"AC\" {\n\n continue;\n\n }\n\n if !old_submission_ids.contains(&submission.id) {\n\n // 3. キューに押し込む\n", "file_path": "src/tasks/ac_fetcher.rs", "rank": 79, "score": 25719.295413668548 }, { "content": " };\n\n tokio::spawn(async move {\n\n loop {\n\n debug!(\"Fetching loop Started\");\n\n // 1. 登録ユーザーの取得\n\n let accounts = match Account::list(&pool).await {\n\n Ok(accounts) => {accounts}\n\n Err(e) => {\n\n error!(\"Failed to fetch accounts: {:?}\", e);\n\n continue;\n\n }\n\n };\n\n for account in accounts {\n\n // 2. 新規ACの確認\n\n let old_submissions = match Submission::get(&pool, account.id).await {\n\n Ok(submissions) => {submissions}\n\n Err(e) => {\n\n error!(\"Failed to fetch old submissions: {:?}\", e);\n\n continue;\n\n }\n", "file_path": "src/tasks/ac_fetcher.rs", "rank": 80, "score": 25719.215570171134 }, { "content": " let _ = insert_submission(&ctx, &account, &Submission::from(submission)).await;\n\n }\n\n }\n\n tokio::time::sleep(Duration::from_secs(1)).await;\n\n }\n\n\n\n }\n\n });\n\n}\n", "file_path": "src/tasks/ac_fetcher.rs", "rank": 81, "score": 25710.60674124136 }, { "content": "use serenity::utils::Colour;\n\nuse serenity::prelude::*;\n\nuse serenity::model::prelude::*;\n\nuse serenity::framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n};\n\n\n\nuse crate::{\n\n data::DatabasePool,\n\n models::{account::Account, user_stat::UserStat},\n\n};\n\n\n\n\n\n#[command]\n\n#[description(\n\n \"Show guild streak ranking\"\n\n)]\n\n#[usage(\"\")]\n\npub async fn streak(ctx: &Context, msg: &Message) -> CommandResult {\n", "file_path": "src/commands/stat/streak.rs", "rank": 82, "score": 24352.499587297454 }, { "content": "use serenity::{\n\n model::prelude::*,\n\n prelude::*,\n\n framework::standard::{\n\n Args, CommandResult,\n\n macros::command,\n\n }\n\n};\n\n\n\nuse crate::{\n\n models::guild::Guild,\n\n data::DatabasePool,\n\n utils::send_error,\n\n};\n\n\n\n#[command]\n\n#[only_in(\"guild\")]\n\n//#[required_permissions(\"MANAGE_MESSAGES\")]\n\n#[description(\n\n \"Start sending notifications.\"\n", "file_path": "src/commands/settings/start.rs", "rank": 83, "score": 24352.1750325261 }, { "content": "use serenity::prelude::*;\n\nuse serenity::model::prelude::*;\n\nuse serenity::framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n};\n\n\n\n#[command]\n\n#[description(\n\n \"Get invitation link\"\n\n)]\n\n#[usage(\"\")]\n\npub async fn invite(ctx: &Context, msg: &Message) -> CommandResult {\n\n let link = std::env::var(\"INVITATION_URL\").unwrap_or_else(|_| \"http://discordapp.com\".to_string());\n\n msg.channel_id.say(&ctx.http, link).await?;\n\n Ok(())\n\n}\n", "file_path": "src/commands/general/invite.rs", "rank": 84, "score": 24350.89576703287 }, { "content": "use serenity::{\n\n model::prelude::*,\n\n prelude::*,\n\n framework::standard::{\n\n Args, CommandResult,\n\n macros::command,\n\n }\n\n};\n\n\n\nuse crate::{\n\n models::{account::Account, submission::Submission},\n\n data::DatabasePool,\n\n http::get_user_submissions,\n\n utils::unknown_error,\n\n};\n\n\n\n#[command]\n\n#[only_in(\"guild\")]\n\n#[aliases(\"r\")]\n\n#[description(\n", "file_path": "src/commands/account/register.rs", "rank": 85, "score": 24349.099245784615 }, { "content": "use serenity::prelude::*;\n\nuse serenity::model::prelude::*;\n\nuse serenity::framework::standard::{\n\n Args, CommandResult,\n\n macros::command,\n\n};\n\n\n\n#[command]\n\npub async fn add(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let sum: i64 = args.iter::<i64>().map(|v| v.unwrap_or(0)).sum();\n\n msg.channel_id.say(&ctx.http, sum).await?;\n\n Ok(())\n\n}", "file_path": "src/commands/general/adder.rs", "rank": 86, "score": 24347.091505878998 }, { "content": ")]\n\n#[max_args(1)]\n\n#[usage(\"[channel_name]\")]\n\n#[example(\"\")]\n\n#[example(\"#general\")]\n\n#[bucket(\"account\")]\n\npub async fn start(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let channel_id = args.single::<ChannelId>().unwrap_or(msg.channel_id);\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n println!(\"{}\", channel_id);\n\n let guild_id = msg.guild_id.unwrap().0 as i64;\n\n match Guild::get(&pool, guild_id).await {\n\n Ok(guild) => {\n\n if let Err(err) = Guild::change_channel(&pool, guild_id, channel_id.0 as i64).await {\n\n error!(\"Failed to change channel: {:?}\", err);\n\n let _ = send_error(ctx, msg, \"Internal Error!\", REGISTRATION_ERROR).await;\n\n return Ok(())\n", "file_path": "src/commands/settings/start.rs", "rank": 87, "score": 24337.076908800987 }, { "content": " \"Link your Discord user to AtCoder's user data. To receive notifications, type the command `^subscribe`.\"\n\n)]\n\n#[num_args(1)]\n\n#[usage(\"<atcoder_user_name>\")]\n\n#[example(\"tourist\")]\n\n#[example(\"chokudai\")]\n\n#[bucket(\"account\")]\n\npub async fn register(ctx: &Context, msg: &Message, mut args: Args) -> CommandResult {\n\n let atcoder_id = args.single::<String>()?;\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n if let Ok(account) = Account::get(&pool, msg.author.id.0 as i64).await {\n\n \n\n // 既にデータベースに登録済みの場合はIDを更新する\n\n if let Err(e) = Account::update(&pool, msg.author.id.0 as i64, &atcoder_id).await {\n\n error!(\"Failed to update account: {:?}\", e);\n\n return Err(unknown_error())\n\n }\n", "file_path": "src/commands/account/register.rs", "rank": 88, "score": 24334.69368280417 }, { "content": " }\n\n let description = match guild.channel_id {\n\n Some(old_channel_id) => {\n\n format!(\"Changed the notification channel from <#{}> to <#{}>.\", old_channel_id, channel_id)\n\n },\n\n None => {\n\n format!(\"Set the notification channel to <#{}>.\", channel_id)\n\n },\n\n };\n\n let _ = msg.reply(&ctx, description).await;\n\n }\n\n Err(err) => {\n\n error!(\"Failed to get guild: {:?}\", err);\n\n let _ = send_error(ctx, msg, \"Internal Error!\", REGISTRATION_ERROR).await;\n\n }\n\n };\n\n Ok(())\n\n}\n\n\n\nconst REGISTRATION_ERROR: &str = \"Your registration to the database has not been completed because the internal process did not finish successfully.\\\n\nPlease report the details of the error to `[email protected]`.\";\n", "file_path": "src/commands/settings/start.rs", "rank": 89, "score": 24333.992034100575 }, { "content": " let guild_id = msg.guild_id.unwrap().0 as i64;\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n let accounts = match Account::list_accounts(&pool, guild_id).await {\n\n Ok(accounts) => accounts,\n\n Err(err) => {\n\n error!(\"Failed to change channel: {:?}\", err);\n\n return Ok(())\n\n }\n\n };\n\n let mut map = Vec::new();\n\n for account in accounts {\n\n let stat = match UserStat::get(&pool, &account.0).await {\n\n Ok(stat) => stat,\n\n Err(err) => {\n\n error!(\"Failed to get user: {:?}\", err);\n\n return Ok(())\n\n }\n", "file_path": "src/commands/stat/streak.rs", "rank": 90, "score": 24329.871367184394 }, { "content": " };\n\n map.push((stat.streak, account.0, account.1));\n\n }\n\n map.sort();\n\n map.reverse();\n\n if let Err(e) = msg.channel_id.send_message(&ctx.http, |m| {\n\n m.embed(|e| {\n\n let embed = e.title(\"Top 10 Longest Streak Rank\").color(Colour::from_rgb(0, 255, 55));\n\n let top10 = map.iter().take(10).collect::<Vec<_>>();\n\n let mut rank = 1;\n\n for (v, name, id) in top10 {\n\n let name = format!(\"#{} {}\", rank, name);\n\n let value = format!(\"<@{}> - {} days\", id, v);\n\n embed.field(name, value, false);\n\n rank += 1;\n\n }\n\n embed\n\n })\n\n }).await {\n\n error!(\"Failed to send message: {:?}\", e);\n\n };\n\n Ok(())\n\n}", "file_path": "src/commands/stat/streak.rs", "rank": 91, "score": 24328.51395868993 }, { "content": " info!(\"Update Account: {} to {}\", &account.atcoder_id, &atcoder_id);\n\n let _ = msg.reply(ctx, format!(\"Update your AtCoder ID: **{}** to **{}**\", account.atcoder_id, atcoder_id)).await;\n\n \n\n } else {\n\n\n\n if let Err(e) = Account::create(&pool, msg, &atcoder_id).await {\n\n error!(\"Failed to create account: {:?}\", e);\n\n return Err(unknown_error())\n\n }\n\n info!(\"Create account: {}\", &atcoder_id);\n\n let _ = msg.reply(ctx, format!(\"Registered your AtCoder ID: **{}**\", atcoder_id)).await;\n\n \n\n }\n\n let account_id = msg.author.id.0 as i64;\n\n match get_user_submissions(&atcoder_id).await {\n\n Ok(submissions) => {\n\n match Submission::bulk_insert(&pool, account_id, &submissions).await {\n\n Ok(_) => info!(\"Submisstions were successfully updated!\"),\n\n Err(e) => error!(\"Failed to update submissions: {:?}\", e),\n\n }\n\n }\n\n Err(e) => {\n\n error!(\"Failed to update user submissions: {:?}\", e);\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/commands/account/register.rs", "rank": 92, "score": 24327.05934885228 }, { "content": "use serenity::utils::Colour;\n\nuse serenity::prelude::*;\n\nuse serenity::model::prelude::*;\n\nuse serenity::framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n};\n\n\n\nuse crate::{\n\n data::DatabasePool,\n\n models::{account::Account, user_stat::UserStat},\n\n};\n\n\n\n\n\n#[command]\n\n#[description(\n\n \"Show guild point sum ranking\"\n\n)]\n\n#[usage(\"\")]\n\npub async fn point(ctx: &Context, msg: &Message) -> CommandResult {\n", "file_path": "src/commands/stat/point_sum.rs", "rank": 93, "score": 23178.582606210715 }, { "content": "use serenity::utils::Colour;\n\nuse serenity::prelude::*;\n\nuse serenity::model::prelude::*;\n\nuse serenity::framework::standard::{\n\n CommandResult,\n\n macros::command,\n\n};\n\n\n\nuse crate::{\n\n data::DatabasePool,\n\n models::{account::Account, user_stat::UserStat},\n\n};\n\n\n\n\n\n#[command]\n\n#[description(\n\n \"Show guild solved problem ranking\"\n\n)]\n\n#[usage(\"\")]\n\npub async fn problem(ctx: &Context, msg: &Message) -> CommandResult {\n", "file_path": "src/commands/stat/problem_count.rs", "rank": 94, "score": 23178.582606210715 }, { "content": " let guild_id = msg.guild_id.unwrap().0 as i64;\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n let accounts = match Account::list_accounts(&pool, guild_id).await {\n\n Ok(accounts) => accounts,\n\n Err(err) => {\n\n error!(\"Failed to change channel: {:?}\", err);\n\n return Ok(())\n\n }\n\n };\n\n let mut map = Vec::new();\n\n for account in accounts {\n\n let stat = match UserStat::get(&pool, &account.0).await {\n\n Ok(stat) => stat,\n\n Err(err) => {\n\n error!(\"Failed to get user: {:?}\", err);\n\n return Ok(())\n\n }\n", "file_path": "src/commands/stat/point_sum.rs", "rank": 95, "score": 23156.22974977252 }, { "content": " let guild_id = msg.guild_id.unwrap().0 as i64;\n\n let pool = {\n\n let data = ctx.data.read().await;\n\n data.get::<DatabasePool>().unwrap().clone()\n\n };\n\n let accounts = match Account::list_accounts(&pool, guild_id).await {\n\n Ok(accounts) => accounts,\n\n Err(err) => {\n\n error!(\"Failed to change channel: {:?}\", err);\n\n return Ok(())\n\n }\n\n };\n\n let mut map = Vec::new();\n\n for account in accounts {\n\n let stat = match UserStat::get(&pool, &account.0).await {\n\n Ok(stat) => stat,\n\n Err(err) => {\n\n error!(\"Failed to get user: {:?}\", err);\n\n return Ok(())\n\n }\n", "file_path": "src/commands/stat/problem_count.rs", "rank": 96, "score": 23156.22974977252 }, { "content": " };\n\n map.push((stat.problem_count, account.0, account.1));\n\n }\n\n map.sort();\n\n map.reverse();\n\n if let Err(e) = msg.channel_id.send_message(&ctx.http, |m| {\n\n m.embed(|e| {\n\n let embed = e.title(\"Top 10 Solved Problem Rank\").color(Colour::from_rgb(0, 255, 55));\n\n let top10 = map.iter().take(10).collect::<Vec<_>>();\n\n let mut rank = 1;\n\n for (v, name, id) in top10 {\n\n let name = format!(\"#{} {}\", rank, name);\n\n let value = format!(\"<@{}> - {}\", id, v);\n\n embed.field(name, value, false);\n\n rank += 1;\n\n }\n\n embed\n\n })\n\n }).await {\n\n error!(\"Failed to send message: {:?}\", e);\n\n };\n\n Ok(())\n\n}", "file_path": "src/commands/stat/problem_count.rs", "rank": 97, "score": 23154.872341278056 }, { "content": " };\n\n map.push((stat.point_sum, account.0, account.1));\n\n }\n\n map.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap());\n\n map.reverse();\n\n if let Err(e) = msg.channel_id.send_message(&ctx.http, |m| {\n\n m.embed(|e| {\n\n let embed = e.title(\"Top 10 Point Rank\").color(Colour::from_rgb(0, 255, 55));\n\n let top10 = map.iter().take(10).collect::<Vec<_>>();\n\n let mut rank = 1;\n\n for (v, name, id) in top10 {\n\n let name = format!(\"#{} {}\", rank, name);\n\n let value = format!(\"<@{}> - {} pts\", id, v);\n\n embed.field(name, value, false);\n\n rank += 1;\n\n }\n\n embed\n\n })\n\n }).await {\n\n error!(\"Failed to send message: {:?}\", e);\n\n };\n\n Ok(())\n\n}\n", "file_path": "src/commands/stat/point_sum.rs", "rank": 98, "score": 23154.580818918705 }, { "content": "-- Add migration script here\n\nALTER TABLE guild_accounts ALTER COLUMN guild_id SET NOT NULL;\n", "file_path": "migrations/20210122155412_fix_nullable.sql", "rank": 99, "score": 18474.218253896976 } ]
Rust
src/zcm.rs
Gregory-Meyer/zcm-rs
6e9e6e9be1f4324260c239e7054ee0313056be3c
extern crate std; use super::*; pub struct Zcm { zcm: ffi::zcm_t, } impl Zcm { pub fn new(url: &str) -> Result<Zcm, NewError> { let url_owned = match std::ffi::CString::new(url) { Ok(u) => u, Err(e) => return Err(NewError::Nul(e)), }; let mut zcm = Zcm { zcm: unsafe { std::mem::uninitialized() }, }; if unsafe { ffi::zcm_init(zcm.as_mut_ptr(), url_owned.as_ptr()) } != 0 { return Err(NewError::Error(zcm.errno().unwrap())); } Ok(zcm) } pub fn errno(&self) -> Option<Error> { let err = unsafe { ffi::zcm_errno(self.as_ptr()) }; Error::from_raw(err) } pub fn strerror(&self) -> std::borrow::Cow<str> { let err = unsafe { ffi::zcm_strerror(self.as_ptr()) }; unsafe { std::ffi::CStr::from_ptr(err) }.to_string_lossy() } pub fn flush(&mut self) { unsafe { ffi::zcm_flush(self.as_mut_ptr()) }; } pub fn try_flush(&mut self) -> Result<(), Error> { let result = unsafe { ffi::zcm_try_flush(self.as_mut_ptr()) }; match Error::from_raw(result) { None => Ok(()), Some(e) => Err(e), } } pub fn as_ptr(&self) -> *const ffi::zcm_t { &self.zcm as *const ffi::zcm_t } pub fn as_mut_ptr(&mut self) -> *mut ffi::zcm_t { &mut self.zcm as *mut ffi::zcm_t } } impl Drop for Zcm { fn drop(&mut self) { unsafe { ffi::zcm_cleanup(&mut self.zcm as *mut ffi::zcm_t) } } } #[derive(Clone, Copy, Debug)] pub enum Error { Invalid, Again, Connect, Interrupted, Unknown, NumReturnCodes, } impl Error { pub fn from_raw(err: libc::c_int) -> Option<Error> { use ffi::zcm_return_codes::*; if err < 0 || err >= unsafe { std::mem::transmute(ZCM_NUM_RETURN_CODES) } { return Some(Error::NumReturnCodes); } match unsafe { std::mem::transmute(err) } { ZCM_EOK => None, ZCM_EINVALID => Some(Error::Invalid), ZCM_EAGAIN => Some(Error::Again), ZCM_ECONNECT => Some(Error::Connect), ZCM_EINTR => Some(Error::Interrupted), ZCM_EUNKNOWN => Some(Error::Unknown), _ => Some(Error::NumReturnCodes), } } pub fn as_int(&self) -> libc::c_int { use ffi::zcm_return_codes::*; let underlying = match self { Error::Invalid => ZCM_EINVALID, Error::Again => ZCM_EAGAIN, Error::Connect => ZCM_ECONNECT, Error::Interrupted => ZCM_EINTR, Error::Unknown => ZCM_EUNKNOWN, Error::NumReturnCodes => ZCM_NUM_RETURN_CODES, }; unsafe { std::mem::transmute(underlying) } } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let as_str = unsafe { std::ffi::CStr::from_ptr(ffi::zcm_strerrno(self.as_int())) }; write!(f, "{}", as_str.to_string_lossy()) } } impl std::error::Error for Error {} #[derive(Clone, Debug)] pub enum NewError { Nul(std::ffi::NulError), Error(Error), } impl std::fmt::Display for NewError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { NewError::Nul(e) => write!(f, "{}", e), NewError::Error(e) => write!(f, "{}", e), } } } impl std::error::Error for NewError {}
extern crate std; use super::*; pub struct Zcm { zcm: ffi::zcm_t, } impl Zcm { pub fn new(url: &str) -> Result<Zcm, NewError> { let url_owned = match std::ffi::CString::new(url) { Ok(u) => u, Err(e) => return Err(NewError::Nul(e)), }; let mut zcm = Zcm { zcm: unsafe { std::mem::uninitialized() }, }; if unsafe { ffi::zcm_init(zcm.as_mut_ptr(), url_owned.as_ptr()) } != 0 { return Err(NewError::Error(zcm.errno().unwrap())); } Ok(zcm) } pub fn errno(&self) -> Option<Error> { let err = unsafe { ffi::zcm_errno(self.as_ptr()) }; Error::from_raw(err) } pub fn strerror(&self) -> std::borrow::Cow<str> {
} } pub fn as_ptr(&self) -> *const ffi::zcm_t { &self.zcm as *const ffi::zcm_t } pub fn as_mut_ptr(&mut self) -> *mut ffi::zcm_t { &mut self.zcm as *mut ffi::zcm_t } } impl Drop for Zcm { fn drop(&mut self) { unsafe { ffi::zcm_cleanup(&mut self.zcm as *mut ffi::zcm_t) } } } #[derive(Clone, Copy, Debug)] pub enum Error { Invalid, Again, Connect, Interrupted, Unknown, NumReturnCodes, } impl Error { pub fn from_raw(err: libc::c_int) -> Option<Error> { use ffi::zcm_return_codes::*; if err < 0 || err >= unsafe { std::mem::transmute(ZCM_NUM_RETURN_CODES) } { return Some(Error::NumReturnCodes); } match unsafe { std::mem::transmute(err) } { ZCM_EOK => None, ZCM_EINVALID => Some(Error::Invalid), ZCM_EAGAIN => Some(Error::Again), ZCM_ECONNECT => Some(Error::Connect), ZCM_EINTR => Some(Error::Interrupted), ZCM_EUNKNOWN => Some(Error::Unknown), _ => Some(Error::NumReturnCodes), } } pub fn as_int(&self) -> libc::c_int { use ffi::zcm_return_codes::*; let underlying = match self { Error::Invalid => ZCM_EINVALID, Error::Again => ZCM_EAGAIN, Error::Connect => ZCM_ECONNECT, Error::Interrupted => ZCM_EINTR, Error::Unknown => ZCM_EUNKNOWN, Error::NumReturnCodes => ZCM_NUM_RETURN_CODES, }; unsafe { std::mem::transmute(underlying) } } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let as_str = unsafe { std::ffi::CStr::from_ptr(ffi::zcm_strerrno(self.as_int())) }; write!(f, "{}", as_str.to_string_lossy()) } } impl std::error::Error for Error {} #[derive(Clone, Debug)] pub enum NewError { Nul(std::ffi::NulError), Error(Error), } impl std::fmt::Display for NewError { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { match self { NewError::Nul(e) => write!(f, "{}", e), NewError::Error(e) => write!(f, "{}", e), } } } impl std::error::Error for NewError {}
let err = unsafe { ffi::zcm_strerror(self.as_ptr()) }; unsafe { std::ffi::CStr::from_ptr(err) }.to_string_lossy() } pub fn flush(&mut self) { unsafe { ffi::zcm_flush(self.as_mut_ptr()) }; } pub fn try_flush(&mut self) -> Result<(), Error> { let result = unsafe { ffi::zcm_try_flush(self.as_mut_ptr()) }; match Error::from_raw(result) { None => Ok(()), Some(e) => Err(e),
random
[ { "content": "#[test]\n\nfn foo() {\n\n let mut zcm = Zcm::new(\"ipc\").unwrap();\n\n let channel = std::ffi::CString::new(\"foo\").unwrap();\n\n\n\n let mut received = false;\n\n\n\n let sub = unsafe {\n\n ffi::zcm_subscribe(\n\n zcm.as_mut_ptr(),\n\n channel.as_ptr(),\n\n handler,\n\n &mut received as *mut bool as *mut libc::c_void,\n\n )\n\n };\n\n\n\n assert!(!sub.is_null());\n\n\n\n let message = std::ffi::CString::new(\"foo bar baz qux\").unwrap();\n\n let msg_slice = message.as_bytes_with_nul();\n\n\n", "file_path": "src/tests.rs", "rank": 0, "score": 23175.42312981702 }, { "content": " pub m_type: zcm_type,\n\n pub m_impl: *mut libc::c_void,\n\n pub err: libc::c_int,\n\n}\n\n\n\n#[repr(C)]\n\npub struct zcm_recv_buf_t {\n\n pub recv_utime: i64,\n\n pub zcm: *mut zcm_t,\n\n pub data: *mut u8,\n\n pub data_size: u32,\n\n}\n\n\n\n#[link(name = \"zcm\")]\n\nextern \"C\" {\n\n pub fn zcm_retcode_name_to_enum(zcm_retcode_name: *const libc::c_char) -> libc::c_int;\n\n\n\n pub fn zcm_create(url: *const libc::c_char) -> *mut zcm_t;\n\n\n\n pub fn zcm_create_trans(zt: *mut zcm_trans_t) -> *mut zcm_t;\n", "file_path": "src/ffi/zcm.rs", "rank": 9, "score": 17263.902314537845 }, { "content": "}\n\n\n\n#[repr(C)]\n\npub enum zcm_return_codes {\n\n ZCM_EOK = 0,\n\n ZCM_EINVALID = 1,\n\n ZCM_EAGAIN = 2,\n\n ZCM_ECONNECT = 3,\n\n ZCM_EINTR = 4,\n\n ZCM_EUNKNOWN = 5,\n\n ZCM_NUM_RETURN_CODES = 6,\n\n}\n\n\n\npub enum zcm_sub_t {}\n\n\n\npub type zcm_msg_handler_t =\n\n extern \"C\" fn(*const zcm_recv_buf_t, *const libc::c_char, *mut libc::c_void);\n\n\n\n#[repr(C)]\n\npub struct zcm_t {\n", "file_path": "src/ffi/zcm.rs", "rank": 10, "score": 17262.546108205406 }, { "content": "\n\n pub fn zcm_destroy(zt: *mut zcm_t);\n\n\n\n pub fn zcm_init(zcm: *mut zcm_t, url: *const libc::c_char) -> libc::c_int;\n\n\n\n pub fn zcm_init_trans(zcm: *mut zcm_t, zt: *mut zcm_trans_t) -> libc::c_int;\n\n\n\n pub fn zcm_cleanup(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_errno(zcm: *const zcm_t) -> libc::c_int;\n\n\n\n pub fn zcm_strerror(zcm: *const zcm_t) -> *const libc::c_char;\n\n\n\n pub fn zcm_strerrno(err: libc::c_int) -> *const libc::c_char;\n\n\n\n pub fn zcm_subscribe(\n\n zcm: *mut zcm_t,\n\n channel: *const libc::c_char,\n\n cb: zcm_msg_handler_t,\n\n usr: *mut libc::c_void,\n", "file_path": "src/ffi/zcm.rs", "rank": 11, "score": 17260.912987883297 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\n#![allow(non_camel_case_types)]\n\n\n\nextern crate libc;\n\n\n\nuse ffi::*;\n\n\n\n#[repr(C)]\n\npub enum zcm_type {\n\n ZCM_BLOCKING,\n\n ZCM_NONBLOCKING,\n", "file_path": "src/ffi/zcm.rs", "rank": 12, "score": 17260.147807848236 }, { "content": " pub fn zcm_flush(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_try_flush(zcm: *mut zcm_t) -> libc::c_int;\n\n\n\n pub fn zcm_run(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_start(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_stop(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_try_stop(zcm: *mut zcm_t) -> libc::c_int;\n\n\n\n pub fn zcm_pause(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_resume(zcm: *mut zcm_t);\n\n\n\n pub fn zcm_handle(zcm: *mut zcm_t) -> libc::c_int;\n\n\n\n pub fn zcm_set_queue_size(zcm: *mut zcm_t, numMsgs: u32);\n\n\n\n pub fn zcm_try_set_queue_size(zcm: *mut zcm_t, numMsgs: u32) -> libc::c_int;\n\n\n\n pub fn zcm_handle_nonblock(zcm: *mut zcm_t) -> libc::c_int;\n\n}\n\n\n\npub const ZCM_MAJOR_VERSION: libc::c_int = 1;\n\npub const ZCM_MINOR_VERSION: libc::c_int = 0;\n\npub const ZCM_MICRO_VERSION: libc::c_int = 0;\n", "file_path": "src/ffi/zcm.rs", "rank": 13, "score": 17259.584066875526 }, { "content": " ) -> *mut zcm_sub_t;\n\n\n\n pub fn zcm_try_subscribe(\n\n zcm: *mut zcm_t,\n\n channel: *const libc::c_char,\n\n cb: zcm_msg_handler_t,\n\n usr: *mut libc::c_void,\n\n ) -> *mut zcm_sub_t;\n\n\n\n pub fn zcm_unsubscribe(zcm: *mut zcm_t, sub: *mut zcm_sub_t) -> libc::c_int;\n\n\n\n pub fn zcm_try_unsubscribe(zcm: *mut zcm_t, sub: *mut zcm_sub_t) -> libc::c_int;\n\n\n\n pub fn zcm_publish(\n\n zcm: *mut zcm_t,\n\n channel: *const libc::c_char,\n\n data: *const u8,\n\n len: u32,\n\n ) -> libc::c_int;\n\n\n", "file_path": "src/ffi/zcm.rs", "rank": 14, "score": 17259.301038300222 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/ffi/zcm.rs", "rank": 15, "score": 17254.387570918025 }, { "content": "extern crate libc;\n\n\n\nuse super::*;\n\n\n\nextern \"C\" fn handler(\n\n buffer: *const ffi::zcm_recv_buf_t,\n\n channel: *const libc::c_char,\n\n usr: *mut libc::c_void,\n\n) {\n\n let received_ptr = usr as *mut bool;\n\n let received = &mut unsafe { *received_ptr };\n\n\n\n println!(\n\n \"buffer: {:?}, channel: {:?}, usr: {:?}\",\n\n buffer, channel, usr\n\n );\n\n\n\n *received = true;\n\n}\n\n\n\n#[test]\n", "file_path": "src/tests.rs", "rank": 16, "score": 13.06914747102795 }, { "content": "}\n\n\n\n#[repr(C)]\n\npub struct zcm_trans_t {\n\n pub trans_type: zcm_type,\n\n pub vtbl: *mut zcm_trans_methods_t,\n\n}\n\n\n\n#[repr(C)]\n\npub struct zcm_trans_methods_t {\n\n pub get_mtu: extern \"C\" fn(*mut zcm_trans_t) -> libc::size_t,\n\n pub sendmsg: extern \"C\" fn(*mut zcm_trans_t, zcm_msg_t) -> libc::c_int,\n\n pub recvmsg_enable: extern \"C\" fn(*mut zcm_trans_t, *const libc::c_char, bool) -> libc::c_int,\n\n pub recvmsg: extern \"C\" fn(*mut zcm_trans_t, *mut zcm_msg_t, libc::c_int) -> libc::c_int,\n\n pub update: extern \"C\" fn(*mut zcm_trans_t) -> libc::c_int,\n\n pub destroy: extern \"C\" fn(*mut zcm_trans_t) -> libc::c_int,\n\n}\n\n\n\npub unsafe fn zcm_trans_get_mtu(zt: *mut zcm_trans_t) -> libc::size_t {\n\n ((*(*zt).vtbl).get_mtu)(zt)\n", "file_path": "src/ffi/transport.rs", "rank": 17, "score": 10.462713386079042 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\n#![allow(non_camel_case_types)]\n\n\n\nextern crate libc;\n\n\n\nuse ffi::*;\n\n\n\npub type zcm_trans_create_func = extern \"C\" fn(url: *mut zcm_url_t) -> *mut zcm_trans_t;\n\n\n\n#[link(name = \"zcm\")]\n\nextern \"C\" {\n", "file_path": "src/ffi/transport_registrar.rs", "rank": 18, "score": 9.972218744386058 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\nextern crate libc;\n\n\n\n#[repr(C)]\n\npub struct zcm_eventlog_event_t {\n\n pub eventnum: i64,\n\n pub timestamp: i64,\n\n pub channellen: i32,\n\n pub channel: *mut libc::c_char,\n\n pub data: *mut u8,\n\n}\n", "file_path": "src/ffi/eventlog.rs", "rank": 19, "score": 9.102663351483276 }, { "content": "}\n\n\n\npub unsafe fn zcm_trans_update(zt: *mut zcm_trans_t) -> libc::c_int {\n\n ((*(*zt).vtbl).update)(zt)\n\n}\n\n\n\npub unsafe fn zcm_trans_destroy(zt: *mut zcm_trans_t) -> libc::c_int {\n\n ((*(*zt).vtbl).destroy)(zt)\n\n}\n", "file_path": "src/ffi/transport.rs", "rank": 20, "score": 8.484323147487949 }, { "content": "}\n\n\n\npub unsafe fn zcm_trans_sendmsg(zt: *mut zcm_trans_t, msg: zcm_msg_t) -> libc::c_int {\n\n ((*(*zt).vtbl).sendmsg)(zt, msg)\n\n}\n\n\n\npub unsafe fn zcm_trans_recvmsg_enable(\n\n zt: *mut zcm_trans_t,\n\n channel: *const libc::c_char,\n\n enable: bool,\n\n) -> libc::c_int {\n\n ((*(*zt).vtbl).recvmsg_enable)(zt, channel, enable)\n\n}\n\n\n\npub unsafe fn zcm_trans_recvmsg(\n\n zt: *mut zcm_trans_t,\n\n msg: *mut zcm_msg_t,\n\n timeout: libc::c_int,\n\n) -> libc::c_int {\n\n ((*(*zt).vtbl).recvmsg)(zt, msg, timeout)\n", "file_path": "src/ffi/transport.rs", "rank": 21, "score": 8.454357371522109 }, { "content": " pub value: [*const libc::c_char; ZCM_OPTS_MAX as usize],\n\n}\n\n\n\npub enum zcm_url_t {}\n\n\n\n#[link(name = \"zcm\")]\n\nextern \"C\" {\n\n pub fn zcm_url_create(url: *const libc::c_char) -> *mut zcm_url_t;\n\n\n\n pub fn zcm_url_destroy(u: *mut zcm_url_t);\n\n\n\n pub fn zcm_url_protocol(u: *mut zcm_url_t) -> *const libc::c_char;\n\n\n\n pub fn zcm_url_address(u: *mut zcm_url_t) -> *const libc::c_char;\n\n\n\n pub fn zcm_url_opts(u: *mut zcm_url_t) -> *mut zcm_url_opts_t;\n\n}\n", "file_path": "src/ffi/url.rs", "rank": 22, "score": 8.320482656978577 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\nextern crate libc;\n\n\n\npub mod ffi;\n\npub mod zcm;\n\n\n\npub use zcm::{Error, Zcm};\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "src/lib.rs", "rank": 23, "score": 8.297363479608203 }, { "content": "\n\n#[repr(C)]\n\npub struct zcm_eventlog_t {\n\n pub f: *mut libc::FILE,\n\n pub eventcount: i64,\n\n}\n\n\n\n#[link(name = \"zcm\")]\n\nextern \"C\" {\n\n pub fn zcm_eventlog_create(\n\n path: *const libc::c_char,\n\n mode: *const libc::c_char,\n\n ) -> *mut zcm_eventlog_t;\n\n\n\n pub fn zcm_eventlog_destroy(eventlog: *mut zcm_eventlog_t);\n\n\n\n pub fn zcm_eventlog_get_fileptr(eventlog: *mut zcm_eventlog_t) -> *mut libc::FILE;\n\n\n\n pub fn zcm_eventlog_seek_to_timestamp(eventlog: *mut zcm_eventlog_t, ts: i64) -> libc::c_int;\n\n\n", "file_path": "src/ffi/eventlog.rs", "rank": 24, "score": 8.233746364642883 }, { "content": " let pub_ret = unsafe {\n\n ffi::zcm_publish(\n\n zcm.as_mut_ptr(),\n\n channel.as_ptr(),\n\n msg_slice.as_ptr(),\n\n msg_slice.len() as u32,\n\n )\n\n };\n\n\n\n assert_eq!(pub_ret, 0);\n\n assert!(received);\n\n\n\n let unsub_ret = unsafe { ffi::zcm_unsubscribe(zcm.as_mut_ptr(), sub) };\n\n\n\n assert_eq!(unsub_ret, 0);\n\n}\n", "file_path": "src/tests.rs", "rank": 25, "score": 8.183216739666868 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\n#![allow(non_camel_case_types)]\n\n\n\nextern crate libc;\n\n\n\npub const ZCM_OPTS_MAX: libc::c_int = 128;\n\n\n\n#[repr(C)]\n\npub struct zcm_url_opts_t {\n\n pub numopts: libc::size_t,\n\n pub name: [*const libc::c_char; ZCM_OPTS_MAX as usize],\n", "file_path": "src/ffi/url.rs", "rank": 26, "score": 7.709278961595505 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\nextern crate libc;\n\n\n\nuse ffi::*;\n\n\n\n#[repr(C)]\n\npub struct zcm_msg_t {\n\n pub utime: u64,\n\n pub channel: *const libc::c_char,\n\n pub len: libc::size_t,\n\n pub buf: *const u8,\n", "file_path": "src/ffi/transport.rs", "rank": 27, "score": 7.70843017258456 }, { "content": " pub fn zcm_eventlog_read_next_event(eventlog: *mut zcm_eventlog_t)\n\n -> *mut zcm_eventlog_event_t;\n\n\n\n pub fn zcm_eventlog_read_prev_event(eventlog: *mut zcm_eventlog_t)\n\n -> *mut zcm_eventlog_event_t;\n\n\n\n pub fn zcm_eventlog_read_event_at_offset(\n\n eventlog: *mut zcm_eventlog_t,\n\n offset: libc::off_t,\n\n ) -> *mut zcm_eventlog_event_t;\n\n\n\n pub fn zcm_eventlog_free_event(event: *mut zcm_eventlog_event_t);\n\n\n\n pub fn zcm_eventlog_write_event(\n\n eventlog: *mut zcm_eventlog_t,\n\n event: *const zcm_eventlog_event_t,\n\n ) -> libc::c_int;\n\n}\n", "file_path": "src/ffi/eventlog.rs", "rank": 28, "score": 6.731037063817069 }, { "content": "// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE\n\n// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\n\n// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE\n\n// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL\n\n// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR\n\n// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER\n\n// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,\n\n// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n\n// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n\n\n\npub mod eventlog;\n\npub mod transport;\n\npub mod transport_registrar;\n\npub mod url;\n\npub mod zcm;\n\n\n\npub use self::eventlog::*;\n\npub use self::transport::*;\n\npub use self::transport_registrar::*;\n\npub use self::url::*;\n\npub use self::zcm::*;\n", "file_path": "src/ffi/mod.rs", "rank": 29, "score": 5.966529072375669 }, { "content": " pub fn zcm_transport_register(\n\n name: *const libc::c_char,\n\n desc: *const libc::c_char,\n\n creator: zcm_trans_create_func,\n\n );\n\n\n\n pub fn zcm_transport_find(name: *const libc::c_char) -> zcm_trans_create_func;\n\n\n\n pub fn zcm_transport_help(f: *mut libc::FILE);\n\n}\n", "file_path": "src/ffi/transport_registrar.rs", "rank": 30, "score": 5.7406215120466415 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/ffi/transport_registrar.rs", "rank": 31, "score": 1.8456350568657776 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/ffi/url.rs", "rank": 32, "score": 1.8456350568657776 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/ffi/transport.rs", "rank": 33, "score": 1.8456350568657776 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/lib.rs", "rank": 34, "score": 1.8456350568657776 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/ffi/mod.rs", "rank": 35, "score": 1.8456350568657776 }, { "content": "// BSD 3-Clause License\n\n//\n\n// Copyright (c) 2018, Gregory Meyer\n\n// All rights reserved.\n\n//\n\n// Redistribution and use in source and binary forms, with or without\n\n// modification, are permitted provided that the following conditions are met:\n\n//\n\n// * Redistributions of source code must retain the above copyright notice, this\n\n// list of conditions and the following disclaimer.\n\n//\n\n// * Redistributions in binary form must reproduce the above copyright notice,\n\n// this list of conditions and the following disclaimer in the documentation\n\n// and/or other materials provided with the distribution.\n\n//\n\n// * Neither the name of the copyright holder nor the names of its\n\n// contributors may be used to endorse or promote products derived from\n\n// this software without specific prior written permission.\n\n//\n\n// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS AS IS\n", "file_path": "src/ffi/eventlog.rs", "rank": 36, "score": 1.8456350568657776 } ]
Rust
crates/graphics/src/api/vulkan/texture.rs
gents83/NRG
62743a54ac873a8dea359f3816e24c189a323ebb
use super::{ copy_buffer_to_image, copy_from_buffer, copy_image_to_buffer, copy_to_buffer, create_buffer, create_image, create_image_view, destroy_buffer, }; use super::{device::BackendDevice, find_depth_format}; use crate::api::backend::physical_device::BackendPhysicalDevice; use crate::{Area, TEXTURE_CHANNEL_COUNT}; use vulkan_bindings::*; pub struct BackendTexture { width: u32, height: u32, layers_count: u32, texture_image: VkImage, texture_image_memory: VkDeviceMemory, texture_image_view: VkImageView, texture_sampler: VkSampler, } unsafe impl Send for BackendTexture {} unsafe impl Sync for BackendTexture {} impl BackendTexture { pub fn width(&self) -> u32 { self.width } pub fn height(&self) -> u32 { self.height } pub fn layers_count(&self) -> u32 { self.layers_count } pub fn create( device: &BackendDevice, physical_device: &BackendPhysicalDevice, width: u32, height: u32, layers_count: u32, ) -> Self { let mut texture = Self { width, height, layers_count, texture_image: ::std::ptr::null_mut(), texture_image_memory: ::std::ptr::null_mut(), texture_image_view: ::std::ptr::null_mut(), texture_sampler: ::std::ptr::null_mut(), }; texture.create_texture_image( device, physical_device, VkFormat_VK_FORMAT_R8G8B8A8_UNORM, layers_count, 0, VkImageAspectFlagBits_VK_IMAGE_ASPECT_COLOR_BIT, ); texture.create_texture_sampler(device, physical_device); texture } pub fn create_as_render_target( device: &BackendDevice, physical_device: &BackendPhysicalDevice, width: u32, height: u32, layers_count: u32, is_depth: bool, ) -> Self { let mut texture = Self { width, height, layers_count, texture_image: ::std::ptr::null_mut(), texture_image_memory: ::std::ptr::null_mut(), texture_image_view: ::std::ptr::null_mut(), texture_sampler: ::std::ptr::null_mut(), }; let format = if is_depth { find_depth_format(**physical_device) } else { VkFormat_VK_FORMAT_R8G8B8A8_UNORM }; let specific_flags = if is_depth { VkImageUsageFlagBits_VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT as _ } else { VkImageUsageFlagBits_VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT as _ }; let aspect_flags = if is_depth { (VkImageAspectFlagBits_VK_IMAGE_ASPECT_DEPTH_BIT | VkImageAspectFlagBits_VK_IMAGE_ASPECT_STENCIL_BIT) as _ } else { VkImageAspectFlagBits_VK_IMAGE_ASPECT_COLOR_BIT as _ }; texture.create_texture_image( device, physical_device, format, layers_count, specific_flags, aspect_flags, ); texture.create_texture_sampler(device, physical_device); texture } pub fn destroy(&self, device: &BackendDevice) { unsafe { vkDestroySampler.unwrap()(**device, self.texture_sampler, ::std::ptr::null_mut()); vkDestroyImageView.unwrap()(**device, self.texture_image_view, ::std::ptr::null_mut()); vkDestroyImage.unwrap()(**device, self.texture_image, ::std::ptr::null_mut()); vkFreeMemory.unwrap()(**device, self.texture_image_memory, ::std::ptr::null_mut()); } } pub fn get_descriptor(&self) -> VkDescriptorImageInfo { VkDescriptorImageInfo { imageLayout: VkImageLayout_VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, imageView: self.texture_image_view, sampler: self.texture_sampler, } } pub fn get_image(&self) -> VkImage { self.texture_image } pub fn get_image_view(&self) -> VkImageView { self.texture_image_view } pub fn add_in_layer( &mut self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, index: u32, area: &Area, image_data: &[u8], ) { sabi_profiler::scoped_profile!("texture::add_in_layer"); if self.width < area.width || self.height < area.height { panic!("Image resolution is different from texture one"); } let image_size: VkDeviceSize = (area.width * area.height * TEXTURE_CHANNEL_COUNT) as _; let flags = VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; let mut staging_buffer: VkBuffer = ::std::ptr::null_mut(); let mut staging_buffer_memory: VkDeviceMemory = ::std::ptr::null_mut(); create_buffer( device, physical_device, image_size as _, VkBufferUsageFlagBits_VK_BUFFER_USAGE_TRANSFER_SRC_BIT as _, flags as _, &mut staging_buffer, &mut staging_buffer_memory, ); copy_from_buffer(device, &mut staging_buffer_memory, 0, image_data); copy_buffer_to_image( device, staging_buffer, self.texture_image, index, self.layers_count, area, ); destroy_buffer(device, &staging_buffer, &staging_buffer_memory); } pub fn get_from_layer( &self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, index: u32, area: &Area, image_data: &mut [u8], ) { sabi_profiler::scoped_profile!("texture::get_from_layer"); if self.width < area.width || self.height < area.height { panic!("Image resolution is different from texture one"); } let image_size: VkDeviceSize = (area.width * area.height * TEXTURE_CHANNEL_COUNT) as _; let flags = VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_CACHED_BIT; let mut staging_buffer: VkBuffer = ::std::ptr::null_mut(); let mut staging_buffer_memory: VkDeviceMemory = ::std::ptr::null_mut(); create_buffer( device, physical_device, image_size as _, VkBufferUsageFlagBits_VK_BUFFER_USAGE_TRANSFER_DST_BIT as _, flags as _, &mut staging_buffer, &mut staging_buffer_memory, ); copy_image_to_buffer( device, self.texture_image, staging_buffer, index, self.layers_count, area, ); copy_to_buffer(device, &mut staging_buffer_memory, 0, image_data); destroy_buffer(device, &staging_buffer, &staging_buffer_memory); } fn create_texture_image( &mut self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, format: VkFormat, layers_count: u32, specific_flags: i32, aspect_flags: i32, ) { let flags = specific_flags | VkImageUsageFlagBits_VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VkImageUsageFlagBits_VK_IMAGE_USAGE_TRANSFER_DST_BIT | VkImageUsageFlagBits_VK_IMAGE_USAGE_SAMPLED_BIT; let (device_image, device_image_memory) = create_image( device, physical_device, (self.width, self.height, format), VkImageTiling_VK_IMAGE_TILING_OPTIMAL, flags as _, VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT as _, layers_count, ); self.texture_image = device_image; self.texture_image_memory = device_image_memory; self.texture_image_view = create_image_view( **device, self.texture_image, format, aspect_flags as _, layers_count, ); } fn create_texture_sampler( &mut self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, ) { let properties = physical_device.get_properties(); let sampler_info = VkSamplerCreateInfo { sType: VkStructureType_VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, pNext: ::std::ptr::null_mut(), flags: 0, magFilter: VkFilter_VK_FILTER_LINEAR, minFilter: VkFilter_VK_FILTER_LINEAR, mipmapMode: VkSamplerMipmapMode_VK_SAMPLER_MIPMAP_MODE_LINEAR, addressModeU: VkSamplerAddressMode_VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, addressModeV: VkSamplerAddressMode_VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, addressModeW: VkSamplerAddressMode_VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, mipLodBias: 0.0, anisotropyEnable: VK_TRUE, maxAnisotropy: properties.limits.maxSamplerAnisotropy, compareEnable: VK_FALSE, compareOp: VkCompareOp_VK_COMPARE_OP_NEVER, minLod: 0.0, maxLod: 1.0, borderColor: VkBorderColor_VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, unnormalizedCoordinates: VK_FALSE, }; self.texture_sampler = unsafe { let mut option = ::std::mem::MaybeUninit::uninit(); assert_eq!( VkResult_VK_SUCCESS, vkCreateSampler.unwrap()( **device, &sampler_info, ::std::ptr::null_mut(), option.as_mut_ptr() ) ); option.assume_init() }; } }
use super::{ copy_buffer_to_image, copy_from_buffer, copy_image_to_buffer, copy_to_buffer, create_buffer, create_image, create_image_view, destroy_buffer, }; use super::{device::BackendDevice, find_depth_format}; use crate::api::backend::physical_device::BackendPhysicalDevice; use crate::{Area, TEXTURE_CHANNEL_COUNT}; use vulkan_bindings::*; pub struct BackendTexture { width: u32, height: u32, layers_count: u32, texture_image: VkImage, texture_image_memory: VkDeviceMemory, texture_image_view: VkImageView, texture_sampler: VkSampler, } unsafe impl Send for BackendTexture {} unsafe impl Sync for BackendTexture {} impl BackendTexture { pub fn width(&self) -> u32 { self.width } pub fn height(&self) -> u32 { self.height } pub fn layers_count(&self) -> u32 { self.layers_count } pub fn create( device: &BackendDevice, physical_device: &BackendPhysicalDevice, width: u32, height: u32, layers_count: u32, ) -> Self { let mut texture = Self { width, height, layers_count, texture_image: ::std::ptr::null_mut(), texture_image_memory: ::std::ptr::null_mut(), texture_image_view: ::std::ptr::null_mut(), texture_sampler: ::std::ptr::null_mut(), }; texture.create_texture_image( device, physical_device, VkFormat_VK_FORMAT_R8G8B8A8_UNORM, layers_count, 0, VkImageAspectFlagBits_VK_IMAGE_ASPECT_COLOR_BIT, ); texture.create_texture_sampler(device, physical_device); texture } pub fn create_as_render_target( device: &BackendDevice, physical_device: &BackendPhysicalDevice, width: u32, height: u32, layers_count: u32, is_depth: bool, ) -> Self { let mut texture = Self { width, height, layers_count, texture_image: ::std::ptr::null_mut(), texture_image_memory: ::std::ptr::null_mut(), texture_image_view: ::std::ptr::null_mut(), texture_sampler: ::std::ptr::null_mut(), }; let format = if is_depth { find_depth_format(**physical_device) } else { VkFormat_VK_FORMAT_R8G8B8A8_UNORM }; let specific_flags = if is_depth { VkImageUsageFlagBits_VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT as _ } else { VkImageUsageFlagBits_VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT as _ }; let aspect_flags = if is_depth { (VkImageAspectFlagBits_VK_IMAGE_ASPECT_DEPTH_BIT | VkImageAspectFlagBits_VK_IMAGE_ASPECT_STENCIL_BIT) as _ } else { VkImageAspectFlagBits_VK_IMAGE_ASPECT_COLOR_BIT as _ }; texture.create_texture_imag
pub fn destroy(&self, device: &BackendDevice) { unsafe { vkDestroySampler.unwrap()(**device, self.texture_sampler, ::std::ptr::null_mut()); vkDestroyImageView.unwrap()(**device, self.texture_image_view, ::std::ptr::null_mut()); vkDestroyImage.unwrap()(**device, self.texture_image, ::std::ptr::null_mut()); vkFreeMemory.unwrap()(**device, self.texture_image_memory, ::std::ptr::null_mut()); } } pub fn get_descriptor(&self) -> VkDescriptorImageInfo { VkDescriptorImageInfo { imageLayout: VkImageLayout_VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, imageView: self.texture_image_view, sampler: self.texture_sampler, } } pub fn get_image(&self) -> VkImage { self.texture_image } pub fn get_image_view(&self) -> VkImageView { self.texture_image_view } pub fn add_in_layer( &mut self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, index: u32, area: &Area, image_data: &[u8], ) { sabi_profiler::scoped_profile!("texture::add_in_layer"); if self.width < area.width || self.height < area.height { panic!("Image resolution is different from texture one"); } let image_size: VkDeviceSize = (area.width * area.height * TEXTURE_CHANNEL_COUNT) as _; let flags = VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_COHERENT_BIT; let mut staging_buffer: VkBuffer = ::std::ptr::null_mut(); let mut staging_buffer_memory: VkDeviceMemory = ::std::ptr::null_mut(); create_buffer( device, physical_device, image_size as _, VkBufferUsageFlagBits_VK_BUFFER_USAGE_TRANSFER_SRC_BIT as _, flags as _, &mut staging_buffer, &mut staging_buffer_memory, ); copy_from_buffer(device, &mut staging_buffer_memory, 0, image_data); copy_buffer_to_image( device, staging_buffer, self.texture_image, index, self.layers_count, area, ); destroy_buffer(device, &staging_buffer, &staging_buffer_memory); } pub fn get_from_layer( &self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, index: u32, area: &Area, image_data: &mut [u8], ) { sabi_profiler::scoped_profile!("texture::get_from_layer"); if self.width < area.width || self.height < area.height { panic!("Image resolution is different from texture one"); } let image_size: VkDeviceSize = (area.width * area.height * TEXTURE_CHANNEL_COUNT) as _; let flags = VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_HOST_CACHED_BIT; let mut staging_buffer: VkBuffer = ::std::ptr::null_mut(); let mut staging_buffer_memory: VkDeviceMemory = ::std::ptr::null_mut(); create_buffer( device, physical_device, image_size as _, VkBufferUsageFlagBits_VK_BUFFER_USAGE_TRANSFER_DST_BIT as _, flags as _, &mut staging_buffer, &mut staging_buffer_memory, ); copy_image_to_buffer( device, self.texture_image, staging_buffer, index, self.layers_count, area, ); copy_to_buffer(device, &mut staging_buffer_memory, 0, image_data); destroy_buffer(device, &staging_buffer, &staging_buffer_memory); } fn create_texture_image( &mut self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, format: VkFormat, layers_count: u32, specific_flags: i32, aspect_flags: i32, ) { let flags = specific_flags | VkImageUsageFlagBits_VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VkImageUsageFlagBits_VK_IMAGE_USAGE_TRANSFER_DST_BIT | VkImageUsageFlagBits_VK_IMAGE_USAGE_SAMPLED_BIT; let (device_image, device_image_memory) = create_image( device, physical_device, (self.width, self.height, format), VkImageTiling_VK_IMAGE_TILING_OPTIMAL, flags as _, VkMemoryPropertyFlagBits_VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT as _, layers_count, ); self.texture_image = device_image; self.texture_image_memory = device_image_memory; self.texture_image_view = create_image_view( **device, self.texture_image, format, aspect_flags as _, layers_count, ); } fn create_texture_sampler( &mut self, device: &BackendDevice, physical_device: &BackendPhysicalDevice, ) { let properties = physical_device.get_properties(); let sampler_info = VkSamplerCreateInfo { sType: VkStructureType_VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO, pNext: ::std::ptr::null_mut(), flags: 0, magFilter: VkFilter_VK_FILTER_LINEAR, minFilter: VkFilter_VK_FILTER_LINEAR, mipmapMode: VkSamplerMipmapMode_VK_SAMPLER_MIPMAP_MODE_LINEAR, addressModeU: VkSamplerAddressMode_VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, addressModeV: VkSamplerAddressMode_VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, addressModeW: VkSamplerAddressMode_VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE, mipLodBias: 0.0, anisotropyEnable: VK_TRUE, maxAnisotropy: properties.limits.maxSamplerAnisotropy, compareEnable: VK_FALSE, compareOp: VkCompareOp_VK_COMPARE_OP_NEVER, minLod: 0.0, maxLod: 1.0, borderColor: VkBorderColor_VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK, unnormalizedCoordinates: VK_FALSE, }; self.texture_sampler = unsafe { let mut option = ::std::mem::MaybeUninit::uninit(); assert_eq!( VkResult_VK_SUCCESS, vkCreateSampler.unwrap()( **device, &sampler_info, ::std::ptr::null_mut(), option.as_mut_ptr() ) ); option.assume_init() }; } }
e( device, physical_device, format, layers_count, specific_flags, aspect_flags, ); texture.create_texture_sampler(device, physical_device); texture }
function_block-function_prefixed
[]
Rust
src/main.rs
KisaragiEffective/webhook-handler
d31f1df94e5f0c8e445ef6bdcbb8c63b0b7283d8
#![warn(clippy::pedantic, clippy::nursery)] #![deny(type_alias_bounds, legacy_derive_helpers, late_bound_lifetime_arguments)] mod payload; mod call; mod config; mod serde_integration; mod generic_format_io; use std::any::Any; use std::borrow::Borrow; use std::fs::File; use std::io::BufReader; use std::marker::PhantomData; use once_cell::sync::OnceCell; use std::sync::Arc; use actix_web::{App, guard, HttpResponse, HttpServer, Responder, web}; use actix_web::web::JsonConfig; use anyhow::Context; use log::{info, trace}; use serde::{Deserialize, Deserializer, Serialize}; use rustls::{Certificate, PrivateKey, ServerConfig}; use rustls_pemfile::{certs, pkcs8_private_keys}; use generic_format_io::handler; use generic_format_io::handler::JsonHandler; use crate::generic_format_io::outgoing::GenericOutgoingSerializer; use crate::payload::todoist::{TodoistEvent, TodoistPayload}; use crate::payload::discord::{DiscordWebhookPayload, Embed, EmbedCollection, EmbedField, EmbedFields}; use crate::call::api_key::ApiKey; use crate::config::config::Config; type PhantomLifetime<'a> = PhantomData<&'a ()>; fn setup_logger() -> Result<(), fern::InitError> { use fern::colors::*; let mut colors = ColoredLevelConfig::new(); fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{}[{}][{}] {}", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colors.color(record.level()), message )) }) .level(log::LevelFilter::Trace) .chain(std::io::stdout()) .chain(fern::log_file("output.log")?) .apply()?; Ok(()) } fn todoist_to_webhook(incoming_data: TodoistPayload) -> DiscordWebhookPayload { let username = Some("Todoist".to_string()); let avatar_url = Some("https://upload.wikimedia.org/wikipedia/commons/thumb/e/e1/Cib-todoist_%28CoreUI_Icons_v1.0.0%29.svg/240px-Cib-todoist_%28CoreUI_Icons_v1.0.0%29.svg.png".to_string()); let content = "abx".to_string(); let tts = false; match incoming_data.event { TodoistEvent::NoteAdded(note) => { DiscordWebhookPayload { content, username, avatar_url, tts, embeds: EmbedCollection(vec![ Embed { title: Some("Note added".to_string()), description: None, url: Some("test".to_string()), color: Some(0xFF34eb5e), footer: None, image: None, thumbnail: None, video: None, provider: None, author: None, fields: (EmbedFields(vec![EmbedField { name: "description".to_string(), value: note.content }])) } ]), components: Default::default() } } _ => unreachable!("oops") } } static RUNNING_CONFIG: OnceCell<Config> = OnceCell::new(); #[actix_web::main] async fn main() -> std::io::Result<()> { println!("starting"); match setup_logger().context("failed to setup logger") { Ok(_) => {} Err(err) => { eprintln!("failed to initialize logger: {:?}", err); } } let mut config = { trace!("loading cert.pem"); let cert_file = &mut BufReader::new(File::open("cert.pem").unwrap()); let cert_chain = certs(cert_file).unwrap().iter().map(|a| Certificate(a.clone())).collect(); trace!("loading key.pem"); let key_file = &mut BufReader::new(File::open("key.pem").unwrap()); let mut keys = pkcs8_private_keys(key_file).unwrap().iter().map(|x| PrivateKey(x.clone())).collect::<Vec<_>>(); if keys.is_empty() { eprintln!("Could not locate PKCS 8 private keys."); std::process::exit(1); } ServerConfig::builder() .with_safe_defaults() .with_no_client_auth() .with_single_cert(cert_chain, keys.remove(0)).unwrap() }; trace!("Reading config..."); let running_config = File::open("data/config.json").unwrap(); RUNNING_CONFIG.set(serde_json::from_reader(BufReader::new(running_config)).unwrap()); trace!("building HttpServer"); let mut http_server = HttpServer::new(|| { App::new() .wrap(actix_web::middleware::Logger::default()) .app_data( JsonConfig::default().error_handler(handler::json_error_handler) ) .service( web::resource("/api/from/todoist/to/discord") .route( web::post() .guard(guard::Header("content-type", "application/json")) .to(|a, b| handler::handle(Arc::new(JsonHandler::new( RUNNING_CONFIG.get().unwrap().discord_webhook.clone().unwrap(), todoist_to_webhook )), a, b)) ) .route( web::post() .to(|| { HttpResponse::BadRequest().body("Content-Type header must be included") }) ) ) }); trace!("binding ports"); http_server .bind_rustls(format!("127.0.0.1:{}", RUNNING_CONFIG.get().unwrap().https_port), config)? .bind(format!("127.0.0.1:{}", RUNNING_CONFIG.get().unwrap().http_port))? .run() .await?; info!("stopped"); Ok(()) }
#![warn(clippy::pedantic, clippy::nursery)] #![deny(type_alias_bounds, legacy_derive_helpers, late_bound_lifetime_arguments)] mod payload; mod call; mod config; mod serde_integration; mod generic_format_io; use std::any::Any; use std::borrow::Borrow; use std::fs::File; use std::io::BufReader; use std::marker::PhantomData; use once_cell::sync::OnceCell; use std::sync::Arc; use actix_web::{App, guard, HttpResponse, HttpServer, Responder, web}; use actix_web::web::JsonConfig; use anyhow::Context; use log::{info, trace}; use serde::{Deserialize, Deserializer, Serialize}; use rustls::{Certificate, PrivateKey, ServerConfig}; use rustls_pemfile::{certs, pkcs8_private_keys}; use generic_format_io::handler; use generic_format_io::handler::JsonHandler; use crate::generic_format_io::outgoing::GenericOutgoingSerializer; use crate::payload::todoist::{TodoistEvent, TodoistPayload}; use crate::payload::discord::{DiscordWebhookPayload, Embed, EmbedCollection, EmbedField, EmbedFields}; use crate::call::api_key::ApiKey; use crate::config::config::Config; type PhantomLifetime<'a> = PhantomData<&'a ()>; fn setup_logger() -> Result<(), fern::InitError> { use fern::colors::*; let mut colors = ColoredLevelConfig::new(); fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{}[{}][{}] {}", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colors.color(record.level()), message )) }) .level(log::LevelFilter::Trace) .chain(std::io::stdout()) .chain(fern::log_file("output.log")?) .apply()?; Ok(()) } fn todoist_to_webhook(incoming_data: TodoistPayload) -> DiscordWebhookPayload { let username = Some("Todoist".to_string()); let avatar_url = Some("https://upload.wikimedia.org/wikipedia/commons/thumb/e/e1/Cib-todoist_%28CoreUI_Icons_v1.0.0%29.svg/240px-Cib-todoist_%28CoreUI_Icons_v1.0.0%29.svg.png".to_string()); let content = "abx".to_string(); let tts = false; match incoming_data.event { TodoistEvent::NoteAdded(note) => { DiscordWebhookPayload { content, username, avatar_url, tts, embeds: EmbedCollection(vec![ Embed { title: Some("Note added".to_string()), description: None, url: Some("test".to_string()), color: Some(0xFF34eb5e), footer: None, image: None, thumbnail: None, video: None, provider: None, author: None, fields: (EmbedFields(vec![EmbedField { name: "description".to_string(), value: note.content }])) } ]), components: Default::default() } } _ => unreachable!("oops") } } static RUNNING_CONFIG: OnceCell<Config> = OnceCell::new(); #[actix_web::main] async fn main() -> std::io::Result<()> { println!("starting"); match setup_logger().context("failed to setup logger") { Ok(_) => {} Err(err) => { eprintln!("failed to initialize logger: {:?}", err); } } let mut config = { trace!("loading cert.pem"); let cert_file = &mut BufReader::new(File::open("cert.pem").unwrap()); let cert_chain = certs(cert_file).unwrap().iter().map(|a| Certificate(a.clone())).collect(); trace!("loading key.pem"); let key_file = &mut BufReader::new(File::open("key.pem").unwrap()); let mut keys = pkcs8_private_keys(key_file).unwrap().iter().map(|x| PrivateKey(x.clone())).collect::<Vec<_>>(); if keys.is_empty() { eprintln!("Could not locate PKCS 8 private keys."); std::process::exit(1
web::post() .to(|| { HttpResponse::BadRequest().body("Content-Type header must be included") }) ) ) }); trace!("binding ports"); http_server .bind_rustls(format!("127.0.0.1:{}", RUNNING_CONFIG.get().unwrap().https_port), config)? .bind(format!("127.0.0.1:{}", RUNNING_CONFIG.get().unwrap().http_port))? .run() .await?; info!("stopped"); Ok(()) }
); } ServerConfig::builder() .with_safe_defaults() .with_no_client_auth() .with_single_cert(cert_chain, keys.remove(0)).unwrap() }; trace!("Reading config..."); let running_config = File::open("data/config.json").unwrap(); RUNNING_CONFIG.set(serde_json::from_reader(BufReader::new(running_config)).unwrap()); trace!("building HttpServer"); let mut http_server = HttpServer::new(|| { App::new() .wrap(actix_web::middleware::Logger::default()) .app_data( JsonConfig::default().error_handler(handler::json_error_handler) ) .service( web::resource("/api/from/todoist/to/discord") .route( web::post() .guard(guard::Header("content-type", "application/json")) .to(|a, b| handler::handle(Arc::new(JsonHandler::new( RUNNING_CONFIG.get().unwrap().discord_webhook.clone().unwrap(), todoist_to_webhook )), a, b)) ) .route(
function_block-random_span
[ { "content": "pub fn deserialize_iso8601<'de, D: Deserializer<'de>>(deserializer: D) -> Result<DateTime, D::Error> {\n\n use std::str::FromStr;\n\n match String::deserialize(deserializer) {\n\n Ok(a) => {\n\n match DateTime::from_str(a.as_str()) {\n\n Ok(a) => { Ok(a) }\n\n Err(b) => { Err(D::Error::custom(b)) }\n\n }\n\n }\n\n Err(b) => {\n\n Err(D::Error::custom(b))\n\n }\n\n }\n\n}", "file_path": "src/serde_integration/deserializers.rs", "rank": 2, "score": 62708.921908422824 }, { "content": "pub fn deserialize_one_zero_bool<'de, D: Deserializer<'de>>(deserializer: D) -> Result<bool, D::Error> {\n\n match u8::deserialize(deserializer) {\n\n Ok(a) => {\n\n match a {\n\n 0 => Ok(true),\n\n 1 => Ok(false),\n\n _ => Err(serde::de::Error::invalid_value(Unexpected::Unsigned(u64::from(a)),&\"expected 0 or 1\"))\n\n }\n\n },\n\n Err(b) => {\n\n Err(serde::de::Error::custom(b))\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/serde_integration/deserializers.rs", "rank": 3, "score": 61611.35904219451 }, { "content": "pub fn json_error_handler(err: error::JsonPayloadError, _req: &HttpRequest) -> error::Error {\n\n use actix_web::error::JsonPayloadError;\n\n use actix_web::HttpResponse;\n\n\n\n let detail = err.to_string();\n\n let resp = match &err {\n\n JsonPayloadError::ContentType => {\n\n HttpResponse::UnsupportedMediaType().body(detail)\n\n }\n\n JsonPayloadError::Deserialize(json_err) if json_err.is_data() => {\n\n HttpResponse::UnprocessableEntity().body(detail)\n\n }\n\n _ => HttpResponse::BadRequest().body(detail),\n\n };\n\n error::InternalError::from_response(err, resp).into()\n\n}\n", "file_path": "src/generic_format_io/handler.rs", "rank": 5, "score": 49204.72198246851 }, { "content": "use rand::prelude::*;\n\nuse rand_chacha::ChaCha20Rng;\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\nuse serde::de::Error;\n\npub struct ApiKey(pub(in crate::call) [u8; 64]);\n\n\n\nimpl ApiKey {\n\n /// 暗号学的に安全な乱数を使用した新しいインスタンスの生成\n\n /// 暗号学的に安全な乱数を生成することで、予測不可能なアクセストークンが生成されることが保証される。\n\n pub fn generate() -> Self {\n\n let mut csp_rng = ChaCha20Rng::from_entropy();\n\n let mut data = [0u8; 64];\n\n csp_rng.fill_bytes(&mut data);\n\n ApiKey(data)\n\n }\n\n\n\n pub(in crate::call) fn new(slice: [u8; 64]) -> Self {\n\n ApiKey(slice)\n\n }\n\n\n", "file_path": "src/call/api_key.rs", "rank": 6, "score": 43298.49888474292 }, { "content": " fn as_base64(&self) -> String {\n\n base64::encode(self)\n\n }\n\n}\n\n\n\nimpl <'de> Deserialize<'de> for ApiKey {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> {\n\n String::deserialize(deserializer).and_then(|a| base64::decode(a).map_err(|f| D::Error::custom(f))).map(|a| ApiKey::from(a))\n\n }\n\n}\n\n\n\nimpl Serialize for ApiKey {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer {\n\n base64::encode(self).serialize(serializer)\n\n }\n\n}\n", "file_path": "src/call/api_key.rs", "rank": 7, "score": 43298.39415443094 }, { "content": "use crate::ApiKey;\n\n\n\nimpl From<Vec<u8>> for ApiKey {\n\n fn from(a: Vec<u8>) -> Self {\n\n if a.len() != 64 {\n\n panic!(\"ApiKey bytes != 64\")\n\n } else {\n\n let mut x = [0u8; 64];\n\n let mut p = 0usize;\n\n for m in a {\n\n x[p] = m;\n\n p += 1;\n\n }\n\n ApiKey::new(x)\n\n }\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for ApiKey {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0\n\n }\n\n}", "file_path": "src/call/imp/api_key_impls.rs", "rank": 8, "score": 39498.035786140426 }, { "content": "use serde::Deserialize;\n\nuse iso8601::DateTime;\n\nuse crate::serde_integration::deserializers::*;\n\n\n\n#[derive(Deserialize)]\n\npub(crate) struct Config {\n\n #[serde(deserialize_with = \"deserialize_iso8601\")]\n\n created_at: DateTime,\n\n pub(crate) discord_webhook: Option<String>,\n\n pub http_port: u16,\n\n pub https_port: u16,\n\n}\n", "file_path": "src/config/config.rs", "rank": 9, "score": 29162.89791078781 }, { "content": "pub(crate) mod config;", "file_path": "src/config.rs", "rank": 10, "score": 24187.789342314805 }, { "content": "pub(crate) mod api_key;\n\nmod imp;\n", "file_path": "src/call.rs", "rank": 11, "score": 23830.16313678225 }, { "content": "pub(in crate::call) mod api_key_impls;", "file_path": "src/call/imp.rs", "rank": 18, "score": 22629.695632730545 }, { "content": "#[derive(Serialize)]\n\nstruct DiscordWebhookQueryPayload {\n\n #[serde(default = \"_false\")]\n\n wait: bool,\n\n thread_id: Option<ThreadID>,\n\n}\n\n\n", "file_path": "src/payload/discord.rs", "rank": 19, "score": 21802.181908139453 }, { "content": "use iso8601::DateTime;\n\nuse serde::de::{Error, Unexpected};\n\nuse serde::{Deserialize, Deserializer};\n\n\n", "file_path": "src/serde_integration/deserializers.rs", "rank": 20, "score": 21756.31608145855 }, { "content": "pub mod todoist;\n\npub mod discord;", "file_path": "src/payload.rs", "rank": 21, "score": 20401.550388046286 }, { "content": " pub description: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub url: Option<String>,\n\n // #[serde(skip_serializing_if = \"Option::is_none\", serialize_with = \"f\")]\n\n // timestamp: Option<ISO8601DateTime>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub color: Option<u32>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub footer: Option<EmbedFooter>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub image: Option<EmbedImage>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub thumbnail: Option<EmbedThumbnail>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub video: Option<EmbedVideo>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub provider: Option<EmbedProvider>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub author: Option<EmbedAuthor>,\n\n #[serde(default /*, skip_serializing_if = \"Option::is_none\" */)]\n", "file_path": "src/payload/discord.rs", "rank": 22, "score": 19388.947770650477 }, { "content": "pub struct EmbedVideo {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n proxy_url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n height: Option<usize>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n width: Option<usize>,\n\n}\n\n\n\n/// https://discord.com/developers/docs/resources/channel#embed-object-embed-author-structure\n\n#[derive(Serialize)]\n\npub struct EmbedProvider {\n\n name: Option<String>,\n\n url: Option<String>,\n\n}\n\n\n\n/// https://discord.com/developers/docs/resources/channel#embed-object-embed-author-structure\n\n#[derive(Serialize)]\n", "file_path": "src/payload/discord.rs", "rank": 23, "score": 19383.956308374647 }, { "content": "pub struct EmbedAuthor {\n\n name: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n icon_url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n proxy_icon_url: Option<String>,\n\n}\n\n\n\n#[derive(Serialize, Eq, PartialEq, Clone, Hash, Default)]\n\npub struct EmbedFields(pub Vec<EmbedField>);\n\n\n\n/// https://discord.com/developers/docs/resources/channel#embed-object-embed-field-structure\n\n#[derive(Serialize, Eq, PartialEq, Clone, Hash)]\n\npub struct EmbedField {\n\n pub(crate) name: String,\n\n pub(crate) value: String,\n\n // #[serde(skip_serializing_if = \"_false()\")]\n\n // inline: bool,\n\n}\n\n\n\n#[inline]\n\nconst fn _false() -> bool {\n\n false\n\n}", "file_path": "src/payload/discord.rs", "rank": 24, "score": 19383.54436751558 }, { "content": " pub fields: EmbedFields\n\n}\n\n\n\n/// https://discord.com/developers/docs/resources/channel#embed-object-embed-footer-structure\n\n#[derive(Serialize)]\n\npub struct EmbedFooter {\n\n text: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n icon_url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n proxy_icon_url: Option<String>\n\n}\n\n\n\n//noinspection DuplicatedCode\n\n/// https://discord.com/developers/docs/resources/channel#embed-object-embed-image-structure\n\n#[derive(Serialize)]\n\npub struct EmbedImage {\n\n url: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n proxy_url: Option<String>,\n", "file_path": "src/payload/discord.rs", "rank": 25, "score": 19383.41331606482 }, { "content": " // attatchments: PartialAttachment,\n\n}\n\n\n\n#[derive(Serialize, Default)]\n\npub struct Components(Vec<Component>);\n\n\n\n#[derive(Serialize)]\n\npub struct Component {\n\n\n\n}\n\n\n\n#[derive(Serialize, Default)]\n\npub struct EmbedCollection(pub Vec<Embed>);\n\n\n\n/// https://discord.com/developers/docs/resources/channel#embed-object\n\n#[derive(Serialize, Default)]\n\npub struct Embed {\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub title: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/payload/discord.rs", "rank": 26, "score": 19380.20609137538 }, { "content": " #[serde(skip_serializing_if = \"Option::is_none\")]\n\n height: Option<usize>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n width: Option<usize>,\n\n}\n\n\n\n//noinspection DuplicatedCode\n\n/// https://discord.com/developers/docs/resources/channel#embed-object-embed-thumbnail-structure\n\n#[derive(Serialize)]\n\npub struct EmbedThumbnail {\n\n url: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n proxy_url: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n height: Option<usize>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n width: Option<usize>,\n\n}\n\n\n\n#[derive(Serialize)]\n", "file_path": "src/payload/discord.rs", "rank": 27, "score": 19380.16165484788 }, { "content": "use serde::{Deserialize, Deserializer};\n\nuse serde::de::{Error, Unexpected};\n\nuse iso8601::{DateTime};\n\nuse crate::serde_integration::deserializers::*;\n\n\n\n#[derive(Deserialize)]\n\npub struct TodoistPayload {\n\n pub user_id: i64,\n\n #[serde(flatten)]\n\n pub event: TodoistEvent,\n\n #[serde(rename = \"version_number\")]\n\n pub version: String,\n\n pub initiator: TodoistCollaborator\n\n}\n\n\n\n/// for all events, see https://developer.todoist.com/sync/v8/#configuration\n\n#[derive(Deserialize)]\n\n#[serde(tag = \"event_name\")]\n\npub enum TodoistEvent {\n\n // TODO: replace those boilerplate with proc-macro\n", "file_path": "src/payload/todoist.rs", "rank": 28, "score": 19377.30124131905 }, { "content": "}\n\n\n\n\n\n/// https://developer.todoist.com/sync/v8/#file-attachments\n\n#[derive(Deserialize)]\n\npub struct TodoistFileAttachment {\n\n #[serde(rename = \"file_name\")]\n\n name: String,\n\n #[serde(rename = \"file_size\")]\n\n size: usize,\n\n // TODO: MIME type\n\n #[serde(rename = \"file_type\")]\n\n mime: String,\n\n #[serde(rename = \"file_url\")]\n\n url: String,\n\n // TODO: this may be enum\n\n upload_state: String,\n\n}\n\n\n\n#[derive(Deserialize)]\n", "file_path": "src/payload/todoist.rs", "rank": 29, "score": 19376.463695745126 }, { "content": "use serde::Serialize;\n\nuse serde_json::Value;\n\nuse iso8601::DateTime as ISO8601DateTime;\n\n/// for more information, see https://discord.com/developers/docs/resources/webhook#execute-webhook\n\n#[derive(Serialize)]\n", "file_path": "src/payload/discord.rs", "rank": 30, "score": 19375.75453256685 }, { "content": "pub struct Reactions {\n\n #[serde(rename = \"♥\", default = \"none\")]\n\n love: Option<Vec<UserID>>,\n\n #[serde(rename = \"👍\", default = \"none\")]\n\n good: Option<Vec<UserID>>,\n\n}\n\n\n\nconst fn none<T>() -> Option<T> {\n\n None\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct TodoistProject {\n\n // TODO: fill fields\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct TodoistSession {\n\n // TODO: fill fields\n\n}\n", "file_path": "src/payload/todoist.rs", "rank": 31, "score": 19373.646387568166 }, { "content": "#[derive(Deserialize)]\n\npub struct ProjectID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct LegacyProjectID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct Due(i64); // TODO: this seems invalid\n\n\n\n#[derive(Deserialize)]\n\npub struct SectionID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct SyncID(i64); // TODO: this seems invalid\n\n\n\n#[derive(Deserialize)]\n\npub struct ImageID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct TodoistDate;\n\n\n\n#[derive(Deserialize)]\n\npub struct NoteID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct LegacyNoteID(i64);", "file_path": "src/payload/todoist.rs", "rank": 32, "score": 19373.016832548306 }, { "content": " #[serde(rename = \"reminder:fired\")]\n\n ReminderFired(\n\n #[serde(rename = \"event_data\")]\n\n TodoistReminder\n\n ),\n\n\n\n\n\n}\n\n\n\n/// please see https://developer.todoist.com/sync/v8/#items\n\n#[derive(Deserialize)]\n\npub struct TodoistItem {\n\n id: TaskID,\n\n legacy_id: Option<LegacyTaskID>,\n\n user_id: UserID,\n\n project_id: ProjectID,\n\n legacy_project_id: Option<LegacyProjectID>,\n\n content: String,\n\n description: String,\n\n due: Due,\n", "file_path": "src/payload/todoist.rs", "rank": 33, "score": 19372.773823421474 }, { "content": " priority: TodoistPriority,\n\n parent_id: Option<TaskID>,\n\n legacy_parent_id: Option<LegacyTaskID>,\n\n child_order: u32,\n\n section_id: SectionID,\n\n day_order: u32,\n\n #[serde(deserialize_with = \"deserialize_one_zero_bool\")]\n\n collapsed: bool,\n\n labels: Vec<TaskID>,\n\n /// The UserID who created the task. This value is set to null on tasks created before 2019/10/31.\n\n added_by_uid: Option<UserID>,\n\n assigned_by_uid: UserID,\n\n responsible_uid: Option<UserID>,\n\n #[serde(rename = \"checked\", deserialize_with = \"deserialize_one_zero_bool\")]\n\n completed: bool,\n\n #[serde(deserialize_with = \"deserialize_one_zero_bool\")]\n\n in_history: bool,\n\n #[serde(deserialize_with = \"deserialize_one_zero_bool\")]\n\n is_deleted: bool,\n\n sync_id: Option<SyncID>,\n\n date_completed: TodoistDate,\n\n date_added: TodoistDate,\n\n}\n\n\n\n#[derive(Deserialize)]\n", "file_path": "src/payload/todoist.rs", "rank": 34, "score": 19372.38569791739 }, { "content": "\n\n/// please see https://developer.todoist.com/sync/v8/#item-notes\n\n#[derive(Deserialize)]\n\npub struct TodoistNote {\n\n pub id: NoteID,\n\n pub legacy_id: LegacyNoteID,\n\n pub posted_uid: UserID,\n\n pub item_id: TaskID,\n\n pub legacy_item_id: LegacyTaskID,\n\n pub project_id: ProjectID,\n\n pub legacy_project_id: LegacyProjectID,\n\n pub content: String,\n\n pub file_attachment: TodoistFileAttachment,\n\n pub uids_to_notify: Option<Vec<UserID>>,\n\n #[serde(deserialize_with = \"deserialize_one_zero_bool\")]\n\n pub is_deleted: bool,\n\n /// Posted date\n\n #[serde(deserialize_with = \"deserialize_iso8601\")] // ?\n\n pub posted: DateTime,\n\n pub reactions: Reactions,\n", "file_path": "src/payload/todoist.rs", "rank": 35, "score": 19372.066618417666 }, { "content": "\n\n#[derive(Deserialize)]\n\npub struct TodoistFilter {\n\n // TODO: fill fields\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct TodoistReminder {\n\n // TODO: fill fields\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct TaskID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct LegacyTaskID(i64);\n\n\n\n#[derive(Deserialize)]\n\npub struct UserID(i64);\n\n\n", "file_path": "src/payload/todoist.rs", "rank": 36, "score": 19371.639870230323 }, { "content": " #[serde(rename = \"project:added\")]\n\n ProjectAdded(\n\n #[serde(rename = \"event_data\")]\n\n TodoistProject\n\n ),\n\n #[serde(rename = \"project:updated\")]\n\n ProjectUpdated(\n\n #[serde(rename = \"event_data\")]\n\n TodoistProject\n\n ),\n\n #[serde(rename = \"project:deleted\")]\n\n ProjectDeleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistProject\n\n ),\n\n #[serde(rename = \"project:archived\")]\n\n ProjectArchived(\n\n #[serde(rename = \"event_data\")]\n\n TodoistProject\n\n ),\n", "file_path": "src/payload/todoist.rs", "rank": 37, "score": 19368.903256106736 }, { "content": " #[serde(rename = \"label:updated\")]\n\n LabelUpdated(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n\n #[serde(rename = \"filter:added\")]\n\n FilterAdded(\n\n #[serde(rename = \"event_data\")]\n\n TodoistFilter\n\n ),\n\n #[serde(rename = \"filter:deleted\")]\n\n FilterDeleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistFilter\n\n ),\n\n #[serde(rename = \"filter:updated\")]\n\n FilterUpdated(\n\n #[serde(rename = \"event_data\")]\n\n TodoistFilter\n\n ),\n", "file_path": "src/payload/todoist.rs", "rank": 38, "score": 19368.903256106736 }, { "content": " #[serde(rename = \"section:archived\")]\n\n SectionArchived(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n\n #[serde(rename = \"section:unarchived\")]\n\n SectionUnarchived(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n\n #[serde(rename = \"label:added\")]\n\n LabelAdded(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n\n #[serde(rename = \"label:deleted\")]\n\n LabelDeleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n", "file_path": "src/payload/todoist.rs", "rank": 39, "score": 19368.903256106736 }, { "content": " #[serde(rename = \"item:added\")]\n\n ItemAdded(\n\n #[serde(rename = \"event_data\")]\n\n TodoistItem\n\n ),\n\n #[serde(rename = \"item:removed\")]\n\n ItemRemoved(\n\n #[serde(rename = \"event_data\")]\n\n TodoistItem\n\n ),\n\n #[serde(rename = \"item:deleted\")]\n\n ItemDeleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistItem\n\n ),\n\n #[serde(rename = \"item:completed\")]\n\n ItemCompleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistItem\n\n ),\n", "file_path": "src/payload/todoist.rs", "rank": 40, "score": 19368.903256106736 }, { "content": " #[serde(rename = \"project:unarchived\")]\n\n ProjectUnarchived(\n\n #[serde(rename = \"event_data\")]\n\n TodoistProject\n\n ),\n\n #[serde(rename = \"section:added\")]\n\n SectionAdded(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n\n #[serde(rename = \"section:updated\")]\n\n SectionUpdated(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n\n #[serde(rename = \"section:deleted\")]\n\n SectionDeleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistSession\n\n ),\n", "file_path": "src/payload/todoist.rs", "rank": 41, "score": 19368.903256106736 }, { "content": " #[serde(rename = \"item:uncompleted\")]\n\n ItemUncompleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistItem\n\n ),\n\n #[serde(rename = \"note:added\")]\n\n NoteAdded(\n\n #[serde(rename = \"event_data\")]\n\n TodoistNote\n\n ),\n\n #[serde(rename = \"note:updated\")]\n\n NoteUpdated(\n\n #[serde(rename = \"event_data\")]\n\n TodoistNote\n\n ),\n\n #[serde(rename = \"note:deleted\")]\n\n NoteDeleted(\n\n #[serde(rename = \"event_data\")]\n\n TodoistNote\n\n ),\n", "file_path": "src/payload/todoist.rs", "rank": 42, "score": 19368.903256106736 }, { "content": "#[derive(Deserialize)]\n\nenum TodoistPriority {\n\n // raw: 4\n\n P1,\n\n // raw: 3\n\n P2,\n\n // raw: 2\n\n P3,\n\n // raw: 1\n\n P4,\n\n}\n\n\n\n/// please see https://developer.todoist.com/sync/v8/#collaborators\n\n#[derive(Deserialize)]\n\npub struct TodoistCollaborator {\n\n id: UserID,\n\n email: String,\n\n full_name: String,\n\n timezone: String,\n\n image_id: ImageID\n\n}\n", "file_path": "src/payload/todoist.rs", "rank": 43, "score": 17595.460642555146 }, { "content": "#[derive(Serialize, Ord, PartialOrd, Eq, PartialEq, Copy, Clone, Hash)]\n\nstruct ThreadID(u64);\n\n\n\n/// for more information, see https://discord.com/developers/docs/resources/webhook\n\n#[derive(Serialize)]\n\npub(crate) struct DiscordWebhookPayload {\n\n pub(crate) content: String,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub(crate) username: Option<String>,\n\n #[serde(skip_serializing_if = \"Option::is_none\")]\n\n pub(crate) avatar_url: Option<String>,\n\n #[serde(default = \"_false\")]\n\n pub(crate) tts: bool,\n\n #[serde(default)]\n\n pub(crate) embeds: EmbedCollection,\n\n // #[serde(default)]\n\n // allowed_mentions: MentionAssertion,\n\n #[serde(default)]\n\n pub(crate) components: Components,\n\n // files: Vec<File>,\n\n // payload_json: Value,\n", "file_path": "src/payload/discord.rs", "rank": 44, "score": 16823.894347849884 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse std::sync::Arc;\n\nuse std::marker::PhantomData;\n\nuse actix_web::{error, HttpRequest, HttpResponse, Responder};\n\nuse actix_web::web::{Json, Query};\n\nuse anyhow::anyhow;\n\nuse log::{error, trace};\n\nuse crate::{ApiKey, GenericOutgoingSerializer, PhantomLifetime};\n\nuse crate::generic_format_io::incoming::GenericIncomingDeserializer;\n\n\n\npub struct GenericHandler<'de, D: Deserialize<'de>, S: Serialize, F: 'static + FnOnce(D) -> S, TD: FnOnce(&'static str) -> D, TS: FnOnce(S) -> &'static str> {\n\n incoming_deserializer: GenericIncomingDeserializer<'de, D, TD>,\n\n outgoing_serializer: GenericOutgoingSerializer<S, TS>,\n\n post_url: &'static str,\n\n mapper: Arc<F>,\n\n __phantom: PhantomLifetime<'de>\n\n}\n\n\n\nimpl <'de, D: Deserialize<'de>, S: Serialize, F: 'static + FnOnce(D) -> S, TD: FnOnce(&'static str) -> D, TS: FnOnce(S) -> &'static str> GenericHandler<'de, D, S, F, TD, TS> {\n\n fn new(post_url: &'static str, incoming_deserializer: TD, mapper: F, outgoing_serializer: TS) -> Self {\n", "file_path": "src/generic_format_io/handler.rs", "rank": 45, "score": 16.136072080863737 }, { "content": " JsonHandler::<'de, D, S, F> {\n\n to, f: Arc::new(f),\n\n __phantom_d: PhantomData,\n\n __phantom_s: PhantomData,\n\n __phantom_de: PhantomData\n\n }\n\n }\n\n}\n\n\n\n// TODO: input type can be inferred by Content-Type\n\npub async fn handle<'de, D: Deserialize<'de>, S: Serialize, F: 'static + Copy + FnOnce(D) -> S>(\n\n handler: Arc<JsonHandler<'de, D, S, F>>,\n\n Json(incoming_data): actix_web::web::Json<D>,\n\n Query(api_key): actix_web::web::Query<ApiKey>,\n\n) -> impl Responder {\n\n // TODO: api_key=something in query string\n\n trace!(\"enter\");\n\n let client = reqwest::Client::new();\n\n let outgoing_data: &S = &(handler.f)(incoming_data);\n\n let result = client\n", "file_path": "src/generic_format_io/handler.rs", "rank": 46, "score": 14.33586541055764 }, { "content": " .post(&handler.to)\n\n .json(outgoing_data)\n\n .send()\n\n .await\n\n .map(|_| ())\n\n .map_err(|x| anyhow!(x));\n\n match result {\n\n Ok(_) => {\n\n HttpResponse::NoContent()\n\n }\n\n Err(e) => {\n\n error!(\"ERROR!!!: {:?}\", e);\n\n HttpResponse::NotModified()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/generic_format_io/handler.rs", "rank": 47, "score": 10.514167546788718 }, { "content": " GenericHandler {\n\n incoming_deserializer: GenericIncomingDeserializer::new(incoming_deserializer),\n\n outgoing_serializer: GenericOutgoingSerializer::new(outgoing_serializer),\n\n post_url,\n\n mapper: Arc::new(mapper),\n\n __phantom: PhantomData\n\n }\n\n }\n\n}\n\n\n\npub struct JsonHandler<'de, D: Deserialize<'de>, S: Serialize, F: 'static + FnOnce(D) -> S + ?Sized> {\n\n to: String,\n\n f: Arc<F>,\n\n __phantom_de: PhantomLifetime<'de>,\n\n __phantom_s: PhantomData<S>,\n\n __phantom_d: PhantomData<D>\n\n}\n\n\n\nimpl <'de, D: Deserialize<'de>, S: Serialize, F: 'static + FnOnce(D) -> S> JsonHandler<'de, D, S, F> {\n\n pub(crate) fn new(to: String, f: F) -> Self {\n", "file_path": "src/generic_format_io/handler.rs", "rank": 48, "score": 8.924096223533258 }, { "content": "use serde::Serialize;\n\nuse std::marker::PhantomData;\n\n\n\npub struct GenericOutgoingSerializer<S: Serialize, F: FnOnce(S) -> &'static str> {\n\n f: F,\n\n __phantom: PhantomData<S>\n\n}\n\n\n\nimpl <S: Serialize, F: FnOnce(S) -> &'static str> GenericOutgoingSerializer<S, F> {\n\n pub(crate) fn new(f: F) -> Self {\n\n GenericOutgoingSerializer {\n\n f,\n\n __phantom: PhantomData\n\n }\n\n }\n\n}\n", "file_path": "src/generic_format_io/outgoing.rs", "rank": 49, "score": 8.922749379305264 }, { "content": "use std::marker::PhantomData;\n\nuse serde::Deserialize;\n\nuse crate::PhantomLifetime;\n\n\n\npub struct GenericIncomingDeserializer<'de, D: Deserialize<'de>, F: FnOnce(&'static str) -> D> {\n\n f: F,\n\n __phantom: PhantomLifetime<'de>\n\n}\n\n\n\nimpl <'de, D: Deserialize<'de>, F: FnOnce(&'static str) -> D> GenericIncomingDeserializer<'de, D, F> {\n\n pub(crate) fn new(f: F) -> Self {\n\n GenericIncomingDeserializer {\n\n f,\n\n __phantom: PhantomData\n\n }\n\n }\n\n}", "file_path": "src/generic_format_io/incoming.rs", "rank": 50, "score": 8.750909401359383 }, { "content": "pub(crate) mod deserializers;", "file_path": "src/serde_integration.rs", "rank": 51, "score": 5.0992141213228415 }, { "content": "pub(crate) mod incoming;\n\npub(crate) mod outgoing;\n\npub(crate) mod handler;\n", "file_path": "src/generic_format_io.rs", "rank": 52, "score": 3.1058485843892205 }, { "content": "# webhook-handler\n\n各種Webhook対応サービスをつなげる簡易プロキシ\n\n\n\n## 使用方法\n\n```sh\n\ngit clone https://github.com/KisaragiEffective/webhook-handler\n\ncd webhook-handler\n\ncargo run\n\n```\n\n\n\n## 使用言語/フレームワーク\n\n* [Rust](https://www.rust-lang.org)\n\n* [actix-web](https://actix.rs)\n\n\n\n## 対応サービス\n\n* [Todoist](https://todoist.com)\n\n* [Discord](https://discord.com)\n\n\n\n## ライセンス\n\nApache License 2.0\n\n```\n\n Copyright 2022 KisaragiEffective\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n```\n", "file_path": "README.md", "rank": 53, "score": 1.8106880105226382 } ]
Rust
src/mix/sender.rs
hydra-acn/hydra
b5cb31310d1be95202f65ea732574e84555c3365
use futures_util::stream; use log::*; use rand::seq::SliceRandom; use std::collections::HashMap; use std::io::Write; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use tokio::task; use crate::crypto::cprng::thread_cprng; use crate::epoch::current_time; use crate::error::Error; use crate::net::cell::Cell; use crate::net::channel_pool::{MixChannel, RendezvousChannel, TcpChannel}; use crate::net::PacketWithNextHop; use crate::tonic_mix::{SetupPacket, Subscription}; use super::cell_processor::cell_rss_t; use super::directory_client; use super::setup_processor::setup_t; pub type Batch<T> = (Vec<Vec<PacketWithNextHop<T>>>, Option<Duration>); pub type SetupBatch = Batch<SetupPacket>; pub type SubscribeBatch = Batch<Subscription>; pub type CellBatch = Batch<Cell>; macro_rules! send_next_batch { ($state:expr, $queue:ident, $batch_type:ident, $channel_getter:ident, $send_fun:ident) => { let queue = $state.$queue.clone(); let maybe_batch = task::spawn_blocking(move || queue.recv()) .await .expect("Spawn failed"); let batch: $batch_type = match maybe_batch { Ok(b) => b, Err(e) => { error!("Seems like the worker thread is gone: {}", e); break; } }; let deadline = batch.1; let (batch_map, destinations) = sort_by_destination(batch); let channel_map = $state.dir_client.$channel_getter(&destinations).await; for (dst, pkts) in batch_map.into_iter() { match channel_map.get(&dst) { Some(c) => { tokio::spawn($send_fun( $state.dir_client.clone(), c.clone(), pkts, deadline, )); } None => { warn!( "Expected to have a connection by now, dropping packets destined to {}", dst ); () } } } }; } macro_rules! define_send_task { ($name:ident, $queue:ident, $batch_type:ident, $channel_getter:ident, $send_fun:ident) => { pub async fn $name(state: Arc<State>) -> Result<(), Error> { loop { send_next_batch!(state, $queue, $batch_type, $channel_getter, $send_fun); } Ok(()) } }; } pub struct State { dir_client: Arc<directory_client::Client>, setup_tx_queue: setup_t::TxQueue, subscribe_tx_queue: setup_t::AltTxQueue, relay_tx_queue: cell_rss_t::TxQueue, } impl State { pub fn new( dir_client: Arc<directory_client::Client>, setup_tx_queue: setup_t::TxQueue, subscribe_tx_queue: setup_t::AltTxQueue, relay_tx_queue: cell_rss_t::TxQueue, ) -> Self { State { dir_client, setup_tx_queue, subscribe_tx_queue, relay_tx_queue, } } } fn sort_by_destination<T>(batch: Batch<T>) -> (HashMap<SocketAddr, Vec<T>>, Vec<SocketAddr>) { let mut batch_map: HashMap<SocketAddr, Vec<T>> = HashMap::new(); for vec in batch.0.into_iter() { for pkt in vec.into_iter() { match batch_map.get_mut(pkt.next_hop()) { Some(vec) => vec.push(pkt.into_inner()), None => { batch_map.insert(*pkt.next_hop(), vec![pkt.into_inner()]); } } } } let destinations: Vec<SocketAddr> = batch_map.keys().cloned().collect(); (batch_map, destinations) } async fn send_setup_packets( dir_client: Arc<directory_client::Client>, mut c: MixChannel, pkts: Vec<SetupPacket>, deadline: Option<Duration>, ) { let shuffle_it = ShuffleIterator::new(pkts, deadline); let mut req = tonic::Request::new(stream::iter(shuffle_it)); req.metadata_mut().insert( "reply-to", dir_client .config() .setup_reply_to() .parse() .expect("Why should this fail?"), ); c.stream_setup_circuit(req) .await .map(|_| ()) .unwrap_or_else(|e| warn!("Creating circuits failed: {}", e)); } async fn send_subscriptions( _dir_client: Arc<directory_client::Client>, mut c: RendezvousChannel, pkts: Vec<Subscription>, _deadline: Option<Duration>, ) { if pkts.len() > 1 { warn!("Expected one subscription to each rendezvous node only"); } for sub in pkts.into_iter() { info!("Sending subscriptions for {} circuits", sub.circuits.len()); let req = tonic::Request::new(sub); c.subscribe(req) .await .map(|_| ()) .unwrap_or_else(|e| warn!("Subscription failed: {}", e)); } } async fn relay_cells( _dir_client: Arc<directory_client::Client>, c: TcpChannel, cells: Vec<Cell>, deadline: Option<Duration>, ) { tokio::task::spawn_blocking(move || { let shuffle_it = ShuffleIterator::new(cells, deadline); let mut stream = c.write().expect("Lock poisoned"); for cell in shuffle_it { stream.write_all(cell.buf()).unwrap_or_else(|e| { warn!("Writing to TCP stream failed: {}", e); }); } }) .await .expect("Spawn failed"); } define_send_task!( setup_task, setup_tx_queue, SetupBatch, get_mix_channels, send_setup_packets ); define_send_task!( subscribe_task, subscribe_tx_queue, SubscribeBatch, get_rendezvous_channels, send_subscriptions ); define_send_task!( relay_task, relay_tx_queue, CellBatch, get_relay_channels, relay_cells ); pub async fn run(state: Arc<State>) { let setup_handle = tokio::spawn(setup_task(state.clone())); let subscribe_handle = tokio::spawn(subscribe_task(state.clone())); let relay_handle = tokio::spawn(relay_task(state.clone())); match tokio::try_join!(setup_handle, subscribe_handle, relay_handle,) { Ok(_) => (), Err(e) => error!("Something panicked: {}", e), } } pub struct ShuffleIterator<T> { idx_vec: Vec<usize>, pkt_vec: Vec<T>, pos: usize, deadline: Option<Duration>, } impl<T> ShuffleIterator<T> { pub fn new(pkt_vec: Vec<T>, deadline: Option<Duration>) -> Self { let mut idx_vec: Vec<usize> = (0..pkt_vec.len()).collect(); idx_vec.shuffle(&mut thread_cprng()); ShuffleIterator { idx_vec, pkt_vec, pos: 0, deadline, } } } impl<T: Default> Iterator for ShuffleIterator<T> { type Item = T; fn next(&mut self) -> Option<T> { if let Some(deadline) = self.deadline { if deadline.checked_sub(current_time()).is_none() { warn!("Sending did not finish in time"); return None; } } if self.pos < self.idx_vec.len() { let pkt = std::mem::take(&mut self.pkt_vec[self.idx_vec[self.pos]]); self.pos += 1; Some(pkt) } else { None } } }
use futures_util::stream; use log::*; use rand::seq::SliceRandom; use std::collections::HashMap; use std::io::Write; use std::net::SocketAddr; use std::sync::Arc; use std::time::Duration; use tokio::task; use crate::crypto::cprng::thread_cprng; use crate::epoch::current_time; use crate::error::Error; use crate::net::cell::Cell; use crate::net::channel_pool::{MixChannel, RendezvousChannel, TcpChannel}; use crate::net::PacketWithNextHop; use crate::tonic_mix::{SetupPacket, Subscription}; use super::cell_processor::cell_rss_t; use super::directory_client; use super::setup_processor::setup_t; pub type Batch<T> = (Vec<Vec<PacketWithNextHop<T>>>, Option<Duration>); pub type SetupBatch = Batch<SetupPacket>; pub type SubscribeBatch = Batch<Subscription>; pub type CellBatch = Batch<Cell>; macro_rules! send_next_batch { ($state:expr, $queue:ident, $batch_type:ident, $channel_getter:ident, $send_fun:ident) => { let queue = $state.$queue.clone(); let maybe_batch = task::spawn_blocking(move || queue.recv()) .await .expect("Spawn failed"); let batch: $batch_type = match maybe_batch { Ok(b) => b, Err(e) => { error!("Seems like the worker thread is gone: {}", e); break; } }; let deadline = batch.1; let (batch_map, destinations) = sort_by_destination(batch); let channel_map = $state.dir_client.$channel_getter(&destinations).await; for (dst, pkts) in batch_map.into_iter() { match channel_map.get(&dst) { Some(c) => { tokio::spawn($send_fun( $state.dir_client.clone(), c.clone(), pkts, deadline, )); } None => { warn!( "Expected to have a connection by now, dropping packets destined to {}", dst ); () } } } }; } macro_rules! define_send_task { ($name:ident, $queue:ident, $batch_type:ident, $channel_getter:ident, $send_fun:ident) => { pub async fn $name(state: Arc<State>) -> Result<(), Error> { loop { send_next_batch!(state, $queue, $batch_type, $channel_getter, $send_fun); } Ok(()) } }; } pub struct State { dir_client: Arc<directory_client::Client>, setup_tx_queue: setup_t::TxQueue, subscribe_tx_queue: setup_t::AltTxQueue, relay_tx_queue: cell_rss_t::TxQueue, } impl State { pub fn new( dir_client: Arc<directory_client::Client>, setup_tx_queue: setup_t::TxQueue, subscribe_tx_queue: setup_t::AltTxQueue, relay_tx_queue: cell_rss_t::TxQueue, ) -> Self { State { dir_client, setup_tx_queue, subscribe_tx_queue, relay_tx_queue, } } } fn sort_by_destination<T>(batch: Batch<T>) -> (HashMap<SocketAddr, Vec<T>>, Vec<SocketAddr>) { let mut batch_map: HashMap<SocketAddr, Vec<T>> = HashMap::new(); for vec in batch.0.into_iter() { for pkt in vec.into_iter() { match batch_map.get_mut(pkt.next_hop()) { Some(vec) => vec.push(pkt.into_inner()), None => { batch_map.insert(*pkt.next_hop(), vec![pkt.into_inner()]); } } } } let destinations: Vec<SocketAddr> = batch_map.keys().cloned().collect(); (batch_map, destinations) } async fn send_setup_packets( dir_client: Arc<directory_client::Client>, mut c: MixChannel, pkts: Vec<SetupPacket>, deadline: Option<Duration>, ) { let shuffle_it = ShuffleIterator::new(pkts, deadline); let mut req = tonic::Request::new(stream::iter(shuffle_it)); req.metadata_mut().insert( "reply-to", dir_client .config() .setup_reply_to() .parse() .expect("Why should this fail?"), ); c.stream_setup_circuit(req) .await .map(|_| ()) .unwrap_or_else(|e| warn!("Creating circuits failed: {}", e)); } async fn send_subscriptions( _dir_client: Arc<directory_client::Client>, mut c: RendezvousChannel, pkts: Vec<Subscription>, _deadline: Option<Duration>, ) { if pkts.len() > 1 { warn!("Expected one
async fn relay_cells( _dir_client: Arc<directory_client::Client>, c: TcpChannel, cells: Vec<Cell>, deadline: Option<Duration>, ) { tokio::task::spawn_blocking(move || { let shuffle_it = ShuffleIterator::new(cells, deadline); let mut stream = c.write().expect("Lock poisoned"); for cell in shuffle_it { stream.write_all(cell.buf()).unwrap_or_else(|e| { warn!("Writing to TCP stream failed: {}", e); }); } }) .await .expect("Spawn failed"); } define_send_task!( setup_task, setup_tx_queue, SetupBatch, get_mix_channels, send_setup_packets ); define_send_task!( subscribe_task, subscribe_tx_queue, SubscribeBatch, get_rendezvous_channels, send_subscriptions ); define_send_task!( relay_task, relay_tx_queue, CellBatch, get_relay_channels, relay_cells ); pub async fn run(state: Arc<State>) { let setup_handle = tokio::spawn(setup_task(state.clone())); let subscribe_handle = tokio::spawn(subscribe_task(state.clone())); let relay_handle = tokio::spawn(relay_task(state.clone())); match tokio::try_join!(setup_handle, subscribe_handle, relay_handle,) { Ok(_) => (), Err(e) => error!("Something panicked: {}", e), } } pub struct ShuffleIterator<T> { idx_vec: Vec<usize>, pkt_vec: Vec<T>, pos: usize, deadline: Option<Duration>, } impl<T> ShuffleIterator<T> { pub fn new(pkt_vec: Vec<T>, deadline: Option<Duration>) -> Self { let mut idx_vec: Vec<usize> = (0..pkt_vec.len()).collect(); idx_vec.shuffle(&mut thread_cprng()); ShuffleIterator { idx_vec, pkt_vec, pos: 0, deadline, } } } impl<T: Default> Iterator for ShuffleIterator<T> { type Item = T; fn next(&mut self) -> Option<T> { if let Some(deadline) = self.deadline { if deadline.checked_sub(current_time()).is_none() { warn!("Sending did not finish in time"); return None; } } if self.pos < self.idx_vec.len() { let pkt = std::mem::take(&mut self.pkt_vec[self.idx_vec[self.pos]]); self.pos += 1; Some(pkt) } else { None } } }
subscription to each rendezvous node only"); } for sub in pkts.into_iter() { info!("Sending subscriptions for {} circuits", sub.circuits.len()); let req = tonic::Request::new(sub); c.subscribe(req) .await .map(|_| ()) .unwrap_or_else(|e| warn!("Subscription failed: {}", e)); } }
function_block-function_prefixed
[ { "content": "pub fn key_exchange(pk_mix: &Key) -> Result<(Key, Key), Error> {\n\n let (pk, sk) = x448::generate_keypair();\n\n let s = x448::generate_shared_secret(&pk_mix, &sk)?;\n\n Ok((pk, s))\n\n}\n\n\n\npub async fn update_loop(state: Arc<State>) {\n\n loop {\n\n // wait till next update\n\n sleep(Duration::from_secs(30)).await;\n\n state.update();\n\n }\n\n}\n", "file_path": "src/directory/state.rs", "rank": 0, "score": 184094.71906082865 }, { "content": "/// Derive AES and Threefish key from shared secret.\n\npub fn derive_keys(master_key: &Key, nonce: &[u8]) -> Result<(Key, Key), Error> {\n\n // 32 byte AES key for the onion-encrypted part of the setup packet\n\n let aes_info = [42u8];\n\n let aes_key = hkdf_sha256(&master_key, Some(&nonce), Some(&aes_info), 32)?;\n\n // 128 byte Threefish-1024 key for circuit cells\n\n let onion_info = [43u8];\n\n let onion_key = hkdf_sha256(&master_key, Some(&nonce), Some(&onion_info), 128)?;\n\n Ok((aes_key, onion_key))\n\n}\n\n\n\n/// Client view on circuits.\n\npub struct Circuit {\n\n circuit_id: CircuitId,\n\n first_hop: SocketAddr,\n\n threefishies: Vec<Threefish2048>,\n\n tokens: Vec<Token>,\n\n dummy_tokens: Vec<Token>,\n\n}\n\n\n\nimpl Circuit {\n", "file_path": "src/client/circuit.rs", "rank": 1, "score": 170796.40693877338 }, { "content": "pub fn ip_addr_from_slice(a: &[u8]) -> Result<IpAddr, Error> {\n\n match a.len() {\n\n 4 => {\n\n let mut octets: [u8; 4] = [0; 4];\n\n octets.copy_from_slice(&a[..]);\n\n Ok(IpAddr::V4(octets.into()))\n\n }\n\n 16 => {\n\n let mut octets: [u8; 16] = [0; 16];\n\n octets.copy_from_slice(&a[..]);\n\n Ok(IpAddr::V6(octets.into()))\n\n }\n\n _ => Err(Error::InputError(\n\n \"Length of slice does not match a valid IP address length\".to_string(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/net/mod.rs", "rank": 2, "score": 165707.0189738479 }, { "content": "pub fn thread_cprng() -> impl Rng + CryptoRng {\n\n thread_rng()\n\n}\n", "file_path": "src/crypto/cprng.rs", "rank": 3, "score": 162114.04073139257 }, { "content": "/// generate shared secret\n\npub fn generate_shared_secret(pk: &Key, sk: &Key) -> Result<Key, Error> {\n\n if pk.len() != KEY_LEN {\n\n return Err(Error::SizeMismatch(format!(\n\n \"Public key has wrong size: {}\",\n\n pk.len()\n\n )));\n\n }\n\n if sk.len() != KEY_LEN {\n\n return Err(Error::SizeMismatch(format!(\n\n \"Secret key has wrong size: {}\",\n\n sk.len()\n\n )));\n\n }\n\n let mut s_vec = vec![0u8; KEY_LEN];\n\n unsafe {\n\n let res = X25519(&mut (s_vec[0]) as *mut u8, sk.head_ptr(), pk.head_ptr());\n\n if res == 0 {\n\n return Err(Error::ExternalError(\"x25519 C call failed\".to_string()));\n\n }\n\n }\n", "file_path": "src/crypto/x25519.rs", "rank": 5, "score": 153165.777744583 }, { "content": "/// generate shared secret\n\npub fn generate_shared_secret(pk: &Key, sk: &Key) -> Result<Key, Error> {\n\n if pk.len() != KEY_LEN {\n\n return Err(Error::SizeMismatch(format!(\n\n \"Public key has wrong size: {}\",\n\n pk.len()\n\n )));\n\n }\n\n if sk.len() != KEY_LEN {\n\n return Err(Error::SizeMismatch(format!(\n\n \"Secret key has wrong size: {}\",\n\n sk.len()\n\n )));\n\n }\n\n let mut s_vec = vec![0u8; KEY_LEN];\n\n unsafe {\n\n let res = x448_int(&mut (s_vec[0]) as *mut u8, pk.head_ptr(), sk.head_ptr());\n\n if res == x448_bindings::FAILURE {\n\n return Err(Error::ExternalError(\"x448 C call failed\".to_string()));\n\n }\n\n }\n", "file_path": "src/crypto/x448.rs", "rank": 6, "score": 153165.77774458303 }, { "content": "pub fn socket_addr_from_slice(addr: &[u8], port: u16) -> Result<SocketAddr, Error> {\n\n ip_addr_from_slice(addr).map(|a| SocketAddr::new(a, port))\n\n}\n\n\n\n/// Wrapping a packet of type `T` with next hop information\n\npub struct PacketWithNextHop<T> {\n\n inner: T,\n\n next_hop: SocketAddr,\n\n}\n\n\n\nimpl<T> PacketWithNextHop<T> {\n\n pub fn new(pkt: T, next_hop: SocketAddr) -> Self {\n\n PacketWithNextHop {\n\n inner: pkt,\n\n next_hop,\n\n }\n\n }\n\n\n\n pub fn into_inner(self) -> T {\n\n self.inner\n", "file_path": "src/net/mod.rs", "rank": 7, "score": 150421.7503562594 }, { "content": "pub fn tokens_to_byte_vec(tokens: &[Token]) -> Vec<u8> {\n\n let mut vec = vec![0; size_of::<Token>() * tokens.len()];\n\n let mut i = 0;\n\n for t in tokens.iter() {\n\n LittleEndian::write_u64(&mut vec[i..i + 8], *t);\n\n i += 8;\n\n }\n\n vec\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! delegate_generic {\n\n ($to:ident; $doc:expr; $fnname:ident; $($arg:ident: $type:ty),* => $ret:ty) => {\n\n #[doc = $doc]\n\n pub fn $fnname(&self, $($arg: $type),*) -> $ret {\n\n self.$to.$fnname($($arg),*)\n\n }\n\n };\n\n}\n\n\n", "file_path": "src/defs.rs", "rank": 8, "score": 141033.45239295356 }, { "content": "fn handle_stream(state: Arc<State>, mut stream: TcpStream) {\n\n debug!(\n\n \"Accepted new TCP stream from {}\",\n\n stream.peer_addr().expect(\"Could not get peer addr\")\n\n );\n\n stream\n\n .set_read_timeout(None)\n\n .expect(\"Setting read timeout failed\");\n\n // TODO performance: buffer size\n\n let mut buf = [0u8; CELL_LEN];\n\n loop {\n\n match stream.read_exact(&mut buf) {\n\n Ok(()) => (),\n\n Err(e) => {\n\n warn!(\"Reading from TCP stream failed, giving up ({})\", e);\n\n break;\n\n }\n\n }\n\n let cell = buf.to_vec().try_into().expect(\"Read exact broken?\");\n\n state.handle_cell(cell);\n\n }\n\n}\n\n\n", "file_path": "src/mix/cell_acceptor.rs", "rank": 9, "score": 140731.01586721343 }, { "content": "fn expect_fail<T>(reply: &Result<T, tonic::Status>) {\n\n match *reply {\n\n Ok(_) => panic!(\"Expected fail did not occure\"),\n\n Err(_) => (),\n\n }\n\n}\n", "file_path": "tests/directory.rs", "rank": 10, "score": 140488.2075088665 }, { "content": "pub fn ip_addr_to_vec(a: &IpAddr) -> Vec<u8> {\n\n match a {\n\n IpAddr::V4(v4) => v4.octets().to_vec(),\n\n IpAddr::V6(v6) => v6.octets().to_vec(),\n\n }\n\n}\n\n\n", "file_path": "src/net/mod.rs", "rank": 11, "score": 138457.13238648488 }, { "content": "pub fn accept(state: Arc<State>, local_addr: SocketAddr) {\n\n let listener = TcpListener::bind(local_addr).expect(\"Cannot bind TCP\");\n\n for s in listener.incoming() {\n\n match s {\n\n Ok(stream) => {\n\n let cloned_state = state.clone();\n\n std::thread::spawn(move || handle_stream(cloned_state, stream));\n\n }\n\n Err(e) => {\n\n warn!(\"Accepting TCP connection failed: {}\", e);\n\n continue;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/mix/cell_acceptor.rs", "rank": 12, "score": 138091.46181618358 }, { "content": "pub fn process_publish(cell: Cell, map: Arc<SubscriptionMap>) -> cell_rss_t::Result {\n\n if cell.round_no() != PUBLISH_ROUND_NO {\n\n if cell.round_no() == INJECT_ROUND_NO {\n\n // seems like we are behind in time -> requeue for inject\n\n return cell_rss_t::Result::Requeue(cell);\n\n } else {\n\n debug!(\n\n \"Dropping cell with wrong round number. Expected {}, got {}.\",\n\n PUBLISH_ROUND_NO,\n\n cell.round_no()\n\n );\n\n\n\n return cell_rss_t::Result::Drop;\n\n }\n\n }\n\n\n\n let circuit_id: CircuitId = match cell.circuit_id().try_into() {\n\n Ok(cid) => cid,\n\n Err(_) => {\n\n warn!(\"Rendezvous circuit id too large\");\n", "file_path": "src/rendezvous/processor.rs", "rank": 13, "score": 134949.80628693046 }, { "content": "pub fn tokens_from_bytes(raw: &[u8]) -> Vec<Token> {\n\n let mut tokens: Vec<Token> = Vec::new();\n\n for i in (0..raw.len()).step_by(8) {\n\n match raw.get(i..i + 8) {\n\n Some(token) => tokens.push(\n\n token_from_bytes(&token).expect(\"Something went wrong during the conversion\"),\n\n ),\n\n None => {\n\n log::warn!(\"Size of Vector is not a multiple of eight.\");\n\n }\n\n };\n\n }\n\n tokens\n\n}\n\n\n", "file_path": "src/defs.rs", "rank": 14, "score": 133721.059497834 }, { "content": "pub fn init(log_external: bool) {\n\n let filter;\n\n if cfg!(debug_assertions) {\n\n filter = LevelFilter::Debug;\n\n } else {\n\n filter = LevelFilter::Info;\n\n }\n\n\n\n let mut builder = ConfigBuilder::new();\n\n builder.set_time_format_str(\"%H:%M:%S%.6f\");\n\n if !log_external {\n\n builder\n\n .add_filter_allow_str(\"hydra\")\n\n .add_filter_allow_str(\"mix\")\n\n .add_filter_allow_str(\"directory_service\")\n\n .add_filter_allow_str(\"load_gen\");\n\n }\n\n let cfg = builder.build();\n\n\n\n TermLogger::init(filter, cfg, TerminalMode::Mixed).expect(\"Initializing Hydra logging failed\");\n\n}\n", "file_path": "src/log_cfg.rs", "rank": 15, "score": 132555.1118246877 }, { "content": "pub fn process_setup_pkt(\n\n pkt: SetupPacketWithPrev,\n\n dir_client: Arc<directory_client::Client>,\n\n epoch: &EpochInfo,\n\n sk: &Key,\n\n layer: u32,\n\n setup_state: &EpochSetupState,\n\n) -> setup_t::Result {\n\n let circuit_map = setup_state.circuits();\n\n\n\n let current_ttl = epoch.path_length - layer - 1;\n\n let pkt_ttl = pkt.ttl().expect(\"Expected to reject this in gRPC!?\");\n\n // first of all, check if its the right place and time for this setup packet\n\n match pkt.epoch_no().cmp(&epoch.epoch_no) {\n\n Ordering::Less => {\n\n warn!(\n\n \"Dropping late (by {} epochs) setup packet\",\n\n epoch.epoch_no - pkt.epoch_no()\n\n );\n\n return setup_t::Result::Drop;\n", "file_path": "src/mix/setup_processor.rs", "rank": 16, "score": 128265.035404757 }, { "content": "/// Panics on failure as dummy circuits are essential for anonymity.\n\n/// Returns the info necessary for circuit extension (next hop, setup packet).\n\npub fn create_dummy_circuit(\n\n dummy_circuit_map: &Arc<RwLock<DummyCircuitMap>>,\n\n dir_client: &directory_client::Client,\n\n epoch_no: EpochNo,\n\n layer: u32,\n\n ttl: u32,\n\n) -> PacketWithNextHop<SetupPacket> {\n\n // TODO code: move path selection inside DummyCircuit::new()?\n\n let path = dir_client\n\n .select_path_tunable(\n\n epoch_no,\n\n Some(ttl as usize),\n\n Some(dir_client.fingerprint()),\n\n None,\n\n )\n\n .expect(\"No path available\");\n\n let (circuit, extend) =\n\n DummyCircuit::new(epoch_no, layer, &path).expect(\"Creating dummy circuit failed\");\n\n let mut dummy_circuit_map_guard = dummy_circuit_map.write().expect(\"Lock poisoned\");\n\n dummy_circuit_map_guard.insert(circuit.circuit_id(), circuit);\n\n extend\n\n}\n", "file_path": "src/mix/setup_processor.rs", "rank": 17, "score": 127543.59065441655 }, { "content": "type MapType = HashMap<Token, Vec<SmallEndpoint>>;\n\n\n\n/// Mapping tokens to subscribers\n\npub struct SubscriptionMap {\n\n map: Vec<RwLock<MapType>>,\n\n addr_map: RwLock<Vec<SocketAddr>>,\n\n drop_idx: AtomicUsize,\n\n}\n\n\n\nimpl Default for SubscriptionMap {\n\n fn default() -> Self {\n\n SubscriptionMap::new()\n\n }\n\n}\n\n\n\nimpl SubscriptionMap {\n\n pub fn new() -> Self {\n\n let mut map = Vec::new();\n\n // TODO code: don't hardcode\n\n for _ in 0..128 {\n", "file_path": "src/rendezvous/subscription_map.rs", "rank": 18, "score": 126631.10499532294 }, { "content": "/// Set the 8 bytes (args, cmd) of cells based on the given `CellCmd`.\n\npub fn set_command(cmd: CellCmd, slice: &mut [u8]) {\n\n match cmd {\n\n CellCmd::Delay(rounds) => {\n\n for b in slice.iter_mut() {\n\n *b = 0;\n\n }\n\n slice[0] = rounds;\n\n }\n\n CellCmd::Subscribe(n_tokens) => {\n\n for b in slice.iter_mut() {\n\n *b = 0;\n\n }\n\n slice[1] = 1;\n\n slice[0] = n_tokens;\n\n }\n\n CellCmd::Broadcast => {\n\n for b in slice.iter_mut() {\n\n *b = 255;\n\n }\n\n }\n", "file_path": "src/net/cell.rs", "rank": 19, "score": 123192.17870847629 }, { "content": "type MixCircuit = hydra::mix::circuit::Circuit;\n\n\n", "file_path": "tests/circuit.rs", "rank": 20, "score": 122139.55218313301 }, { "content": "type ClientCircuit = hydra::client::circuit::Circuit;\n", "file_path": "tests/circuit.rs", "rank": 21, "score": 122139.55218313301 }, { "content": "struct Circuit {\n\n cells: Vec<Cell>,\n\n}\n\n\n\nimpl Circuit {\n\n pub fn new() -> Self {\n\n Circuit { cells: Vec::new() }\n\n }\n\n}\n\n\n\n// TODO cleanup once in a while\n\npub struct Storage {\n\n circuit_maps: Vec<RwLock<HashMap<CircuitId, Circuit>>>,\n\n firebase_auth_key: Option<String>,\n\n firebase_map: RwLock<BTreeMap<EpochNo, HashMap<CircuitId, String>>>,\n\n sync_rx: xbeam::Receiver<SyncBeat>,\n\n}\n\n\n\nimpl Storage {\n\n pub fn new(cfg: &Config, sync_rx: xbeam::Receiver<SyncBeat>) -> Self {\n", "file_path": "src/mix/storage.rs", "rank": 22, "score": 121791.49304898045 }, { "content": "/// Processes new incomming requests for one thread and returns the idle time.\n\nfn process_new<I, O, A, F: FnMut(I) -> ProcessResult<I, O, A>>(\n\n thread: &mut ThreadState<I, O, A>,\n\n mut f: F,\n\n deadline: Duration,\n\n) -> Duration {\n\n let poll_interval = Duration::from_millis(1);\n\n let mut idle_time = Duration::from_millis(0);\n\n loop {\n\n if deadline.checked_sub(current_time()).is_none() {\n\n // time limit reached\n\n return idle_time;\n\n }\n\n match thread.in_queue.try_recv().map(|req| match f(req) {\n\n ProcessResult::Out(out) => thread.out_queue.push(out),\n\n ProcessResult::Alt(out) => thread.alt_out_queue.push(out),\n\n ProcessResult::Multiple(out_vec) => thread.out_queue.extend(out_vec),\n\n ProcessResult::MultipleAlt(out_vec) => thread.alt_out_queue.extend(out_vec),\n\n ProcessResult::Requeue(req) => thread.pending_queue.push_back(req),\n\n ProcessResult::Drop => (),\n\n }) {\n", "file_path": "src/mix/rss_pipeline.rs", "rank": 23, "score": 120964.8156498733 }, { "content": "/// convert bool to Result<(), tonic::Status>, with \"invalid argument\" error code\n\npub fn valid_request_check(check: bool, msg: &str) -> Result<(), Status> {\n\n match check {\n\n true => Ok(()),\n\n false => {\n\n log::warn!(\"{}\", msg);\n\n Err(Status::new(Code::InvalidArgument, msg))\n\n }\n\n }\n\n}\n", "file_path": "src/grpc/macros.rs", "rank": 24, "score": 117771.33102847316 }, { "content": "fn get_previous_hop<T>(req: &Request<T>) -> Result<Option<SocketAddr>, Status> {\n\n match req.metadata().get(\"reply-to\") {\n\n Some(val) => {\n\n let as_str = rethrow_as_invalid!(val.to_str(), \"reply-to is not valid\");\n\n let prev = rethrow_as_invalid!(as_str.to_string().parse(), \"reply-to is not valid\");\n\n Ok(Some(prev))\n\n }\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "src/mix/grpc.rs", "rank": 25, "score": 113690.81013847022 }, { "content": "struct ThreadState<I, O, A> {\n\n in_queue: crossbeam_channel::Receiver<I>,\n\n pending_queue: VecDeque<I>,\n\n out_queue: Vec<O>,\n\n alt_out_queue: Vec<A>,\n\n}\n\n\n\n/// Struct for doing the work on requests of type `I`, transforming each to output of type `O` or\n\n/// type `A`.\n\npub struct Processor<I: Send, O: Send, A: Send> {\n\n threads: Vec<ThreadState<I, O, A>>,\n\n tx_queue: crossbeam_channel::Sender<(Vec<Vec<O>>, Option<Duration>)>,\n\n alt_tx_queue: crossbeam_channel::Sender<(Vec<Vec<A>>, Option<Duration>)>,\n\n}\n\n\n\nimpl<I: Send, O: Send, A: Send> Processor<I, O, A> {\n\n /// Return the number of threads.\n\n pub fn size(&self) -> usize {\n\n self.threads.len()\n\n }\n", "file_path": "src/mix/rss_pipeline.rs", "rank": 26, "score": 113001.58970713371 }, { "content": "/// Generate a key by expanding a \"master key\" `ikm` using the HKDF key derivation function\n\n/// (RFC 5869), instantiated with SHA256 as hash algorithm.\n\npub fn hkdf_sha256(\n\n ikm: &Key,\n\n salt: Option<&[u8]>,\n\n info: Option<&[u8]>,\n\n size: usize,\n\n) -> Result<Key, Error> {\n\n let hkdf = Hkdf::<sha2::Sha256>::new(salt, ikm.borrow_raw());\n\n let mut okm = vec![0u8; size];\n\n let info_slice = info.unwrap_or(&[0u8; 0]);\n\n hkdf.expand(info_slice, &mut okm)?;\n\n Ok(Key::move_from_vec(okm))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn generate_random_key() {\n\n let size = 1337;\n\n let key = Key::new(size);\n", "file_path": "src/crypto/key.rs", "rank": 27, "score": 103074.75417025675 }, { "content": "pub fn process_subscribe(\n\n epoch_no: EpochNo,\n\n req: Subscription,\n\n map: Arc<SubscriptionMap>,\n\n) -> subscribe_t::Result {\n\n match epoch_no.cmp(&req.epoch_no) {\n\n Ordering::Less => {\n\n warn!(\n\n \"Dropping late subscription; expected epoch {}, got {}\",\n\n epoch_no, req.epoch_no\n\n );\n\n ProcessResult::Drop\n\n }\n\n Ordering::Greater => ProcessResult::Requeue(req),\n\n Ordering::Equal => {\n\n map.subscribe(req);\n\n ProcessResult::Drop\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/rendezvous/processor.rs", "rank": 28, "score": 103071.15184581143 }, { "content": "pub fn process_cell(\n\n cell: Cell,\n\n round_no: RoundNo,\n\n incomming_round_no: RoundNo,\n\n layer: u32,\n\n max_layer: u32,\n\n direction: CellDirection,\n\n epoch_state: &EpochState,\n\n) -> cell_rss_t::Result {\n\n let circuits = &*epoch_state.circuits();\n\n let sub_collector = &*epoch_state.sub_collector();\n\n\n\n if cell.round_no() != incomming_round_no {\n\n if let CellDirection::Upstream = direction {\n\n if cell.round_no() == PUBLISH_ROUND_NO && layer == max_layer {\n\n // seems like we are behind in time -> requeue cells that shall be published already\n\n return cell_rss_t::Result::Requeue(cell);\n\n }\n\n }\n\n\n", "file_path": "src/mix/cell_processor.rs", "rank": 29, "score": 100768.38667827495 }, { "content": "/// return current POSIX/UNIX time\n\npub fn current_time() -> Duration {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Failed to get UNIX time\")\n\n}\n\n\n", "file_path": "src/epoch.rs", "rank": 30, "score": 100029.44204616782 }, { "content": "/// return current POSIX/UNIX time in seconds\n\npub fn current_time_in_secs() -> u64 {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Failed to get UNIX time\")\n\n .as_secs()\n\n}\n\n\n", "file_path": "src/epoch.rs", "rank": 31, "score": 97726.67687863135 }, { "content": "fn process_pending<I, O, A, F: FnMut(I) -> ProcessResult<I, O, A>>(\n\n thread: &mut ThreadState<I, O, A>,\n\n mut f: F,\n\n time: Duration,\n\n) {\n\n let mut new_pending = VecDeque::new();\n\n while let Some(req) = thread.pending_queue.pop_front() {\n\n if time.checked_sub(current_time()).is_none() {\n\n // time limit reached\n\n return;\n\n }\n\n match f(req) {\n\n ProcessResult::Out(out) => thread.out_queue.push(out),\n\n ProcessResult::Alt(out) => thread.alt_out_queue.push(out),\n\n ProcessResult::Multiple(out_vec) => thread.out_queue.extend(out_vec),\n\n ProcessResult::MultipleAlt(out_vec) => thread.alt_out_queue.extend(out_vec),\n\n ProcessResult::Requeue(req) => new_pending.push_back(req),\n\n ProcessResult::Drop => (),\n\n }\n\n }\n\n thread.pending_queue = new_pending;\n\n}\n\n\n", "file_path": "src/mix/rss_pipeline.rs", "rank": 32, "score": 95789.34824215656 }, { "content": "pub fn hydra_version() -> &'static str {\n\n option_env!(\"CARGO_PKG_VERSION\").unwrap_or(\"Unknown\")\n\n}\n\n\n\npub type AuthTag = Vec<u8>;\n\npub type Token = u64;\n\npub type CircuitId = u64;\n\npub type CircuitIdSet = std::collections::BTreeSet<CircuitId>;\n\npub type RoundNo = u32;\n\n\n\npub const DIR_AUTH_KEY_SIZE: usize = 32;\n\npub const DIR_AUTH_KEY_INFO: &[u8; 4] = b\"auth\";\n\n\n\npub const DIR_AUTH_UNREGISTER: &[u8; 10] = b\"unregister\";\n\n\n\n/// Number of tokens in a setup packet\n\npub const SETUP_TOKENS: usize = 256;\n\npub const ONION_LEN: usize = 256;\n\npub const CELL_LEN: usize =\n\n ONION_LEN + std::mem::size_of::<CircuitId>() + std::mem::size_of::<RoundNo>();\n", "file_path": "src/defs.rs", "rank": 33, "score": 95111.0665116244 }, { "content": "type ResultType = i32;\n\npub const _SUCCESS: ResultType = -1;\n\npub const FAILURE: ResultType = 0;\n\n\n\nextern \"C\" {\n\n pub fn x448_int(out: *mut u8, base: *const u8, scalar: *const u8) -> ResultType;\n\n}\n\nextern \"C\" {\n\n pub fn x448_derive_public_key(out: *mut u8, scalar: *const u8);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n // test vector from RFC 7748, section 6.2\n\n fn rfc_test_vector() {\n\n // constants from RFC\n\n let sk_alice = hex::decode(\n\n \"9a8f4925d1519f5775cf46b04b5800d4ee9ee8bae8bc5565d498c28dd9c9baf574a9419744897391006382a6f127ab1d9ac2d8c0a598726b\"\n", "file_path": "src/crypto/x448_bindings.rs", "rank": 34, "score": 94847.39669324963 }, { "content": "/// return (pk, sk)\n\npub fn generate_keypair() -> (Key, Key) {\n\n let sk = Key::new(KEY_LEN);\n\n let mut pk_vec = vec![0u8; KEY_LEN];\n\n unsafe {\n\n x448_derive_public_key(&mut (pk_vec[0]) as *mut u8, sk.head_ptr());\n\n }\n\n let pk = Key::move_from_vec(pk_vec);\n\n (pk, sk)\n\n}\n\n\n", "file_path": "src/crypto/x448.rs", "rank": 35, "score": 92971.48942682677 }, { "content": "/// return (pk, sk)\n\npub fn generate_keypair() -> (Key, Key) {\n\n let sk = Key::new(KEY_LEN);\n\n let mut pk_vec = vec![0u8; KEY_LEN];\n\n unsafe {\n\n X25519_public_from_private(&mut (pk_vec[0]) as *mut u8, sk.head_ptr());\n\n }\n\n let pk = Key::move_from_vec(pk_vec);\n\n (pk, sk)\n\n}\n\n\n", "file_path": "src/crypto/x25519.rs", "rank": 36, "score": 92971.48942682677 }, { "content": "fn create_bloomfilter(last_circuit_count: usize) -> DupFilter {\n\n Bloom::new_for_fp_rate(\n\n max(100_000, (last_circuit_count as f64 * 1.1) as usize),\n\n 1e-6,\n\n )\n\n}\n", "file_path": "src/mix/epoch_state.rs", "rank": 37, "score": 92726.96817304939 }, { "content": "/// Create a new pipeline with `size` threads. Panics if `size == 0`.\n\npub fn new_pipeline<I: std::fmt::Debug + Scalable + Send, O: Send, A: Send>(\n\n size: usize,\n\n) -> Pipeline<I, O, A> {\n\n assert!(size > 0);\n\n let mut senders = Vec::new();\n\n let mut threads = Vec::new();\n\n for _ in 0..size {\n\n let (rx_sender, rx_receiver) = crossbeam_channel::unbounded();\n\n senders.push(rx_sender);\n\n let t = ThreadState {\n\n in_queue: rx_receiver,\n\n out_queue: Vec::new(),\n\n alt_out_queue: Vec::new(),\n\n pending_queue: VecDeque::new(),\n\n };\n\n threads.push(t);\n\n }\n\n let rx = RxQueue { queues: senders };\n\n let (tx_sender, tx_receiver) = crossbeam_channel::unbounded();\n\n let (alt_tx_sender, alt_tx_receiver) = crossbeam_channel::unbounded();\n\n let processor = Processor {\n\n threads,\n\n tx_queue: tx_sender,\n\n alt_tx_queue: alt_tx_sender,\n\n };\n\n (rx, processor, tx_receiver, alt_tx_receiver)\n\n}\n\n\n", "file_path": "src/mix/rss_pipeline.rs", "rank": 38, "score": 92667.77291849017 }, { "content": "/// Attention: Use in tests only!!!\n\npub fn activate_fake_rand(seed: u32) {\n\n unsafe {\n\n activate_fakerand(seed as c_uint);\n\n }\n\n}\n", "file_path": "src/crypto/mod.rs", "rank": 39, "score": 90982.47502292381 }, { "content": "#[derive(Clone)]\n\nstruct SmallEndpoint {\n\n addr_idx: u16,\n\n circuit_id: CircuitId,\n\n}\n\n\n", "file_path": "src/rendezvous/subscription_map.rs", "rank": 40, "score": 90455.2952540258 }, { "content": "/// return the current epoch number (only the directory service should rely on this function)\n\npub fn current_epoch_no(phase_duration: Duration) -> EpochNo {\n\n (current_time_in_secs() / phase_duration.as_secs()) as EpochNo\n\n}\n\n\n\nimpl EpochInfo {\n\n /// Return the end time for the communication phase of this epoch (keep keys till then).\n\n pub fn communication_end_time(&self) -> u64 {\n\n let k = self.number_of_rounds as f64;\n\n let d = self.round_duration;\n\n let w = self.round_waiting;\n\n self.communication_start_time + (k * (d + w)) as u64\n\n }\n\n}\n", "file_path": "src/epoch.rs", "rank": 41, "score": 88705.16162165711 }, { "content": "#[test]\n\nfn key_derivation() {\n\n let master_key =\n\n Key::from_hex_str(\"775f84edb8bf10bb747765f2582c87f4a1e4463f275f38ce447a5885\").unwrap();\n\n let nonce = hex::decode(\"903a73a912df\").unwrap();\n\n let (aes_key, onion_key) = derive_keys(&master_key, &nonce).unwrap();\n\n let aes_expected =\n\n Key::from_hex_str(\"ac5e5ae356e4f943574ee7cefadb091b17eec79d642fcafcd8679f8c110cc51f\")\n\n .unwrap();\n\n let onion_expected = Key::from_hex_str(\"30755ecd02757afb390d28ae2eb5bc4e6015e95c835a998cc74551e8a8a183fac722852edf51c1a82b0b9f068c085ad6c17233ef20730e710e862232cb8675696c140a5ee306a816df06f99b1cf639baa93a7d15fbe0be7e4c10afaeea26d77f6b656808d756df0c0f978610faa8c35597e49e04f4f1b85225bff654b69ee06e\").unwrap();\n\n assert_eq!(aes_key, aes_expected);\n\n assert_eq!(onion_key, onion_expected);\n\n}\n\n\n", "file_path": "tests/circuit.rs", "rank": 42, "score": 88116.32270523618 }, { "content": "#[test]\n\nfn setup_onion() {\n\n // deterministic test (only when not run in parallel with other tests)\n\n hydra::crypto::activate_fake_rand(1337);\n\n\n\n let mixes: Vec<(MixInfo, Key)> = [1, 2, 3].iter().map(|i| create_mix_info(*i)).collect();\n\n let path: Vec<MixInfo> = mixes.iter().map(|(info, _)| info.clone()).collect();\n\n let endpoints: Vec<SocketAddr> = path\n\n .iter()\n\n .map(|info| {\n\n SocketAddr::new(\n\n ip_addr_from_slice(&info.address).unwrap(),\n\n info.relay_port as u16,\n\n )\n\n })\n\n .collect();\n\n let mut epoch_info = EpochInfo::default();\n\n epoch_info.mixes = path.clone();\n\n let subscribe_to = vec![13, 37];\n\n let rendezvous_map = Arc::new(RendezvousMap::new(&epoch_info).unwrap());\n\n let (client_circuit, extend) =\n", "file_path": "tests/circuit.rs", "rank": 43, "score": 88116.32270523618 }, { "content": "/// Decode bytes as little-endian u64\n\n///\n\n/// # Examples\n\n/// ```\n\n/// # use hydra::defs::token_from_bytes;\n\n/// let raw = vec![42, 0, 0, 0, 0, 0, 0, 128];\n\n/// let token = token_from_bytes(&raw);\n\n/// assert_eq!(token.unwrap(), (1u64 << 63) + 42);\n\n/// ```\n\npub fn token_from_bytes(raw: &[u8]) -> Option<Token> {\n\n if raw.len() != 8 {\n\n return None;\n\n }\n\n let mut rdr = std::io::Cursor::new(raw);\n\n Some(\n\n rdr.read_u64::<LittleEndian>()\n\n .expect(\"Why should this fail?\"),\n\n )\n\n}\n\n\n", "file_path": "src/defs.rs", "rank": 44, "score": 86722.3356851628 }, { "content": "/// Read the 8 bytes (args, cmd) of cells and return the `CellCmd` if there is one.\n\npub fn read_command(slice: &[u8]) -> Option<CellCmd> {\n\n if slice.iter().all(|b| *b == 255) {\n\n Some(CellCmd::Broadcast)\n\n } else if slice[2..].iter().all(|b| *b == 0) {\n\n match slice[1] {\n\n 0 => Some(CellCmd::Delay(slice[0])),\n\n 1 => Some(CellCmd::Subscribe(slice[0])),\n\n _ => None,\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/net/cell.rs", "rank": 45, "score": 83112.0711824935 }, { "content": "fn get_firebase_token<T>(req: &Request<T>) -> Option<String> {\n\n match req.metadata().get(\"firebase\") {\n\n Some(val) => match val.to_str() {\n\n Ok(token) => Some(token.to_string()),\n\n Err(_) => {\n\n warn!(\"Firebase token is not valid\");\n\n None\n\n }\n\n },\n\n None => None,\n\n }\n\n}\n", "file_path": "src/mix/grpc.rs", "rank": 46, "score": 82383.90591949289 }, { "content": "fn create_mix_info(index: u8) -> (MixInfo, Key) {\n\n let (pk, sk) = x448::generate_keypair();\n\n let info = MixInfo {\n\n address: vec![127, 0, 0, index],\n\n entry_port: 9001,\n\n relay_port: 9002,\n\n rendezvous_port: 9003,\n\n fingerprint: format!(\"mix-{}\", index),\n\n public_dh: pk.clone_to_vec(),\n\n };\n\n (info, sk)\n\n}\n", "file_path": "tests/circuit.rs", "rank": 47, "score": 69317.34707894195 }, { "content": "type StatisticMap = HashMap<String, HashMap<EpochNo, MixStatistics>>;\n\n\n\npub struct State {\n\n pub mix_map: Mutex<HashMap<String, Mix>>,\n\n contact_service_addr: SocketAddr,\n\n config: Config,\n\n pub epochs: RwLock<VecDeque<EpochInfo>>,\n\n pub stat_map: RwLock<StatisticMap>,\n\n}\n\n\n\nimpl State {\n\n pub fn new(config: Config, contact_service_addr: SocketAddr) -> Self {\n\n let current_epoch_no = current_epoch_no(config.phase_duration());\n\n info!(\"Initializing directory in epoch {}\", current_epoch_no);\n\n\n\n State {\n\n mix_map: Mutex::new(HashMap::new()),\n\n contact_service_addr,\n\n config,\n\n epochs: RwLock::new(VecDeque::new()),\n", "file_path": "src/directory/state.rs", "rank": 48, "score": 68046.01889077008 }, { "content": "fn main() {\n\n // build static x448 lib\n\n let dst = cmake::build(\"include/x448\");\n\n println!(\"cargo:rustc-link-search=native={}/lib\", dst.display());\n\n println!(\"cargo:rustc-link-lib=static=x448\");\n\n\n\n // build static threefish lib\n\n let dst = cmake::build(\"include/threefish\");\n\n println!(\"cargo:rustc-link-search=native={}/lib\", dst.display());\n\n println!(\"cargo:rustc-link-lib=static=threefish\");\n\n\n\n // build static fakerand lib\n\n let dst = cmake::build(\"include/fakerand\");\n\n println!(\"cargo:rustc-link-search=native={}/lib\", dst.display());\n\n println!(\"cargo:rustc-link-lib=static=fakerand\");\n\n\n\n // compile protobuf files\n\n tonic_build::compile_protos(\"protobuf/directory.proto\")\n\n .expect(\"Failed to generate directory gRPC\");\n\n tonic_build::compile_protos(\"protobuf/mix.proto\").expect(\"Failed to generate mix gRPC\");\n\n tonic_build::compile_protos(\"protobuf/rendezvous.proto\")\n\n .expect(\"Failed to generate rendezvous gRPC\");\n\n}\n", "file_path": "build.rs", "rank": 49, "score": 57479.523497365095 }, { "content": "#[test]\n\nfn integration() {\n\n let mut rt = Builder::new()\n\n .threaded_scheduler()\n\n .enable_all()\n\n .build()\n\n .expect(\"Failed to init tokio runtime\");\n\n\n\n rt.block_on(async {\n\n let local_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0);\n\n let contact_service_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 1337);\n\n let state = Arc::new(State::new(Config::default(), contact_service_addr));\n\n\n\n let timeout = sleep(Duration::from_secs(2));\n\n let key =\n\n Key::read_from_file(\"tests/data/tls-test.key\").expect(\"Failed to read key from file\");\n\n let cert = std::fs::read_to_string(\"tests/data/tls-test.pem\").unwrap();\n\n let tls_cred = ServerCredentials::new(key, &cert);\n\n let (grpc_handle, local_addr) = grpc::spawn_service_with_shutdown(\n\n state.clone(),\n\n local_addr,\n", "file_path": "tests/directory.rs", "rank": 50, "score": 55892.2733019977 }, { "content": "type Cert = String;\n", "file_path": "src/client/directory_client.rs", "rank": 51, "score": 55698.29753065293 }, { "content": "/// Trait for valid requests to an RSS pipeline.\n\npub trait Scalable {\n\n /// Given the `size` of the pipeline (number of threads), return the id of the thread this\n\n /// request should run on. Return value has to be in the range `0..size`, otherwise the request\n\n /// is dropped.\n\n /// Default implementation: random\n\n fn thread_id(&self, size: usize) -> usize {\n\n thread_rng().gen_range(0, size)\n\n }\n\n}\n\n\n\n/// Sender end of the rx channel.\n\n#[derive(Clone)]\n\npub struct RxQueue<I: Scalable + std::fmt::Debug> {\n\n queues: Vec<crossbeam_channel::Sender<I>>,\n\n}\n\n\n\nimpl<I: Scalable + std::fmt::Debug> RxQueue<I> {\n\n /// Add a new request to the pipeline. Panics if the matching processor is gone.\n\n pub fn enqueue(&self, req: I) {\n\n let size = self.queues.len();\n", "file_path": "src/mix/rss_pipeline.rs", "rank": 52, "score": 52893.82825696019 }, { "content": "type DirectoryChannel = DirectoryClient<Channel>;\n\n\n\npub struct Config {\n\n pub addr: IpAddr,\n\n pub entry_port: u16,\n\n pub relay_port: u16,\n\n pub fast_port: u16,\n\n pub rendezvous_port: u16,\n\n pub directory_certificate: Option<String>,\n\n pub directory_domain: String,\n\n pub directory_port: u16,\n\n pub setup_exchange_alg: KeyExchangeAlgorithm,\n\n}\n\n\n\nimpl Config {\n\n pub fn setup_reply_to(&self) -> String {\n\n format!(\"{}:{}\", self.addr, self.fast_port)\n\n }\n\n}\n\n\n", "file_path": "src/mix/directory_client.rs", "rank": 53, "score": 50638.849391187585 }, { "content": "type DirectoryChannel = DirectoryClient<Channel>;\n\n\n\npub struct Client {\n\n domain: String,\n\n grpc_url: String,\n\n tls_cert: Option<Cert>,\n\n // TODO performance: should store infos inside Arcs to avoid copies (key material is big!)\n\n epochs: RwLock<BTreeMap<EpochNo, EpochInfo>>,\n\n testbed_nat: bool,\n\n}\n\n\n\nimpl Client {\n\n /// Use a custom CA certificate `tls_cert` if the directory service certificate is not anchored\n\n /// in your system.\n\n /// Set `testbed_nat` to true if you want to query the testbed NAT addresses.\n\n pub fn new(domain: String, port: u16, tls_cert: Option<Cert>, testbed_nat: bool) -> Self {\n\n let grpc_url = format!(\"https://{}:{}\", domain, port);\n\n Client {\n\n domain,\n\n grpc_url,\n", "file_path": "src/client/directory_client.rs", "rank": 54, "score": 50638.849391187585 }, { "content": "#[async_trait]\n\npub trait Channel: Sized {\n\n async fn connect(dst: SocketAddr) -> Result<Self, Error>;\n\n}\n\n\n\npub type TcpChannel = Arc<RwLock<TcpStream>>;\n\n\n\n#[tonic::async_trait]\n\nimpl Channel for TcpChannel {\n\n async fn connect(dst: SocketAddr) -> Result<Self, Error> {\n\n debug!(\"Connecting TCP stream to {}\", dst);\n\n let stream = TcpStream::connect(dst)?;\n\n stream.set_nodelay(false)?;\n\n debug!(\n\n \".. TCP connection to {} established, src port {}\",\n\n dst,\n\n stream.local_addr().unwrap().port()\n\n );\n\n Ok(Arc::new(RwLock::new(stream)))\n\n }\n\n}\n", "file_path": "src/net/channel_pool.rs", "rank": 55, "score": 50127.70550200973 }, { "content": "type BaseClient = crate::client::directory_client::Client;\n\n\n", "file_path": "src/mix/directory_client.rs", "rank": 56, "score": 45905.925349294834 }, { "content": "fn create_register_request(index: u8, pk: &Key) -> RegisterRequest {\n\n RegisterRequest {\n\n fingerprint: format!(\"mix-{}\", index),\n\n address: vec![10, 0, 0, index],\n\n entry_port: 4242,\n\n relay_port: 1337,\n\n rendezvous_port: 1337,\n\n public_dh: pk.clone_to_vec(),\n\n }\n\n}\n\n\n", "file_path": "tests/directory.rs", "rank": 57, "score": 40520.4425368139 }, { "content": "fn create_unregister_request(index: u8, auth_tag: &[u8]) -> UnregisterRequest {\n\n UnregisterRequest {\n\n fingerprint: format!(\"mix-{}\", index),\n\n auth_tag: auth_tag.to_vec(),\n\n }\n\n}\n\n\n", "file_path": "tests/directory.rs", "rank": 58, "score": 39706.268304805584 }, { "content": "impl std::fmt::Display for Error {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n Error::OpenSslError(msg) => write!(f, \"OpenSSL error: {}\", msg),\n\n Error::IoError(msg) => write!(f, \"IO error: {}\", msg),\n\n Error::SizeMismatch(msg) => write!(f, \"Size mismatch: {}\", msg),\n\n Error::InputError(msg) => write!(f, \"Input error: {}\", msg),\n\n Error::ExternalError(msg) => write!(f, \"External error: {}\", msg),\n\n Error::NoneError(msg) => write!(f, \"None error: {}\", msg),\n\n }\n\n }\n\n}\n\n\n\n/// Helper for returning an `Error` if condition `b` is not met, using error `type` (from `Error`\n\n/// enum). `args` after `type` are passed to `format!(...)` to construct the error message.\n\n#[macro_export]\n\nmacro_rules! assert_as_err {\n\n ($b:expr, $type:expr $(, $args:expr)*) => {\n\n if !$b {\n\n return Err($type(format!($($args, )*)));\n", "file_path": "src/error.rs", "rank": 59, "score": 37457.93550138164 }, { "content": "}\n\n\n\n/// Specializing assert_as_err\n\n#[macro_export]\n\nmacro_rules! assert_as_external_err {\n\n ($b:expr $(, $args:expr)*) => { crate::assert_as_err!($b, Error::ExternalError $(, $args)*) };\n\n}\n\n\n\nimpl std::convert::From<Error> for tonic::Status {\n\n fn from(e: Error) -> Self {\n\n tonic::Status::new(tonic::Code::Internal, e.to_string())\n\n }\n\n}\n\n\n\nimpl std::convert::From<openssl::error::ErrorStack> for Error {\n\n fn from(stack: openssl::error::ErrorStack) -> Self {\n\n let mut msg = \"[\".to_string();\n\n for e in stack.errors() {\n\n msg.push_str(&format!(\"{}, \", e));\n\n }\n", "file_path": "src/error.rs", "rank": 60, "score": 37457.437181867346 }, { "content": " assert!(err_fun(true).unwrap_err() == Error::IoError(\"Reasons: specific\".to_string()));\n\n assert!(err_fun(false).unwrap_err() == Error::InputError(\"Reasons: foo, 42\".to_string()));\n\n ok_fun().expect(\"Should return an Ok\");\n\n }\n\n\n\n fn err_fun(specific: bool) -> Result<(), Error> {\n\n if specific {\n\n assert_as_io_err!(false, \"Reasons: {}\", \"specific\");\n\n } else {\n\n assert_as_err!(false, Error::InputError, \"Reasons: {}, {}\", \"foo\", 42);\n\n }\n\n Ok(())\n\n }\n\n\n\n fn ok_fun() -> Result<(), Error> {\n\n assert_as_size_err!(0 == 0, \"Foo {}\", \"bar\");\n\n assert_as_io_err!(true, \"Foobar\");\n\n assert_as_input_err!(true, \"Foobar\");\n\n assert_as_external_err!(true, \"Foobar\");\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 61, "score": 37455.94443282684 }, { "content": " }\n\n}\n\n\n\nimpl std::convert::From<tokio::io::Error> for Error {\n\n fn from(e: tokio::io::Error) -> Self {\n\n Error::IoError(e.to_string())\n\n }\n\n}\n\n\n\nimpl std::convert::From<http::uri::InvalidUri> for Error {\n\n fn from(e: http::uri::InvalidUri) -> Self {\n\n Error::InputError(e.to_string())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n #[test]\n\n fn test_assert_macros() {\n", "file_path": "src/error.rs", "rank": 62, "score": 37451.849062589376 }, { "content": "//! Hydra errors\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Error {\n\n /// something in OpenSSL went wrong\n\n OpenSslError(String),\n\n /// IoError\n\n IoError(String),\n\n /// some size mismatch (e.g. for keys)\n\n SizeMismatch(String),\n\n /// error due to wrong user input\n\n InputError(String),\n\n /// something external went wrong\n\n ExternalError(String),\n\n /// something was None that should not have been\n\n NoneError(String),\n\n}\n\n\n\nimpl std::error::Error for Error {}\n\n\n", "file_path": "src/error.rs", "rank": 63, "score": 37450.174047224566 }, { "content": " msg.push(']');\n\n Error::OpenSslError(msg)\n\n }\n\n}\n\n\n\nimpl std::convert::From<hkdf::InvalidLength> for Error {\n\n fn from(e: hkdf::InvalidLength) -> Self {\n\n Error::InputError(e.to_string())\n\n }\n\n}\n\n\n\nimpl std::convert::From<tonic::transport::Error> for Error {\n\n fn from(e: tonic::transport::Error) -> Self {\n\n Error::IoError(e.to_string())\n\n }\n\n}\n\n\n\nimpl std::convert::From<tonic::Status> for Error {\n\n fn from(e: tonic::Status) -> Self {\n\n Error::IoError(e.to_string())\n", "file_path": "src/error.rs", "rank": 64, "score": 37449.811468855754 }, { "content": " }\n\n };\n\n}\n\n\n\n/// Specializing assert_as_err\n\n#[macro_export]\n\nmacro_rules! assert_as_io_err {\n\n ($b:expr $(, $args:expr)*) => { crate::assert_as_err!($b, Error::IoError $(, $args)*) };\n\n}\n\n\n\n/// Specializing assert_as_err\n\n#[macro_export]\n\nmacro_rules! assert_as_size_err {\n\n ($b:expr $(, $args:expr)*) => { crate::assert_as_err!($b, Error::SizeMismatch $(, $args)*) };\n\n}\n\n\n\n/// Specializing assert_as_err\n\n#[macro_export]\n\nmacro_rules! assert_as_input_err {\n\n ($b:expr $(, $args:expr)*) => { crate::assert_as_err!($b, Error::InputError $(, $args)*) };\n", "file_path": "src/error.rs", "rank": 65, "score": 37445.712041424034 }, { "content": " let (_mix_circuit, next_step) =\n\n MixCircuit::new(pkt_with_prev, &mixes[1].1, rendezvous_map.clone(), 1, 41).unwrap();\n\n let extend = match next_step {\n\n NextSetupStep::Extend(e) => e,\n\n _ => unreachable!(),\n\n };\n\n assert_eq!(*extend.next_hop(), endpoints[2]);\n\n let setup_pkt = extend.into_inner();\n\n\n\n // third mix (last one)\n\n assert_eq!(setup_pkt.ttl().unwrap(), 0);\n\n let pkt_with_prev = SetupPacketWithPrev::new(setup_pkt, Some(endpoints[1].clone()));\n\n let (_mix_circuit, next_step) =\n\n MixCircuit::new(pkt_with_prev, &mixes[2].1, rendezvous_map.clone(), 2, 41).unwrap();\n\n\n\n let rendezvous_tokens = match next_step {\n\n NextSetupStep::Rendezvous(ts) => ts,\n\n _ => unreachable!(),\n\n };\n\n let expected_tokens_it = subscribe_to\n", "file_path": "tests/circuit.rs", "rank": 66, "score": 36733.521032228506 }, { "content": " ClientCircuit::new(42, &path, subscribe_to.clone(), None).unwrap();\n\n assert_eq!(*extend.next_hop(), endpoints[0]);\n\n let setup_pkt = extend.into_inner();\n\n\n\n // first mix\n\n assert_eq!(setup_pkt.ttl().unwrap(), 2);\n\n let previous_hop = Some(\"8.8.8.8:42\".parse().unwrap()); // previous hop is client\n\n let pkt_with_prev = SetupPacketWithPrev::new(setup_pkt, previous_hop);\n\n let (_mix_circuit, next_step) =\n\n MixCircuit::new(pkt_with_prev, &mixes[0].1, rendezvous_map.clone(), 0, 41).unwrap();\n\n let extend = match next_step {\n\n NextSetupStep::Extend(e) => e,\n\n _ => unreachable!(),\n\n };\n\n assert_eq!(*extend.next_hop(), endpoints[1]);\n\n let setup_pkt = extend.into_inner();\n\n\n\n // second mix\n\n assert_eq!(setup_pkt.ttl().unwrap(), 1);\n\n let pkt_with_prev = SetupPacketWithPrev::new(setup_pkt, Some(endpoints[0].clone()));\n", "file_path": "tests/circuit.rs", "rank": 67, "score": 36730.833473525745 }, { "content": "use std::net::SocketAddr;\n\nuse std::sync::Arc;\n\n\n\nuse hydra::client::circuit::derive_keys;\n\nuse hydra::crypto::key::Key;\n\nuse hydra::crypto::x448;\n\nuse hydra::defs::SETUP_TOKENS;\n\nuse hydra::grpc::type_extensions::SetupPacketWithPrev;\n\nuse hydra::mix::circuit::NextSetupStep;\n\nuse hydra::mix::rendezvous_map::RendezvousMap;\n\nuse hydra::net::ip_addr_from_slice;\n\nuse hydra::tonic_directory::{EpochInfo, MixInfo};\n\n\n", "file_path": "tests/circuit.rs", "rank": 68, "score": 36722.38681427733 }, { "content": " .iter()\n\n .chain(client_circuit.dummy_tokens().iter());\n\n assert_eq!(\n\n subscribe_to.len() + client_circuit.dummy_tokens().len(),\n\n SETUP_TOKENS\n\n );\n\n assert_eq!(rendezvous_tokens.len(), SETUP_TOKENS);\n\n for t in expected_tokens_it {\n\n assert!(rendezvous_tokens.contains(t));\n\n }\n\n}\n\n\n", "file_path": "tests/circuit.rs", "rank": 69, "score": 36718.579127007346 }, { "content": "use simplelog::{ConfigBuilder, LevelFilter, TermLogger, TerminalMode};\n\n\n", "file_path": "src/log_cfg.rs", "rank": 70, "score": 36007.480770584414 }, { "content": " && self.number_of_rounds % (self.path_len as u32 + 1) == 0\n\n }\n\n}\n\n\n\nimpl ConfigBuilder {\n\n pub fn build_valid(&self) -> Result<Config, Error> {\n\n let cfg = self\n\n .build()\n\n .expect(\"This should not happen, defaults provided\");\n\n match cfg.is_valid() {\n\n true => Ok(cfg),\n\n false => Err(Error::InputError(\"Invalid config\".to_string())),\n\n }\n\n }\n\n}\n\n\n\npub struct Mix {\n\n pub fingerprint: String,\n\n pub auth_key: Key,\n\n pub addr: IpAddr,\n\n pub entry_port: u16,\n\n pub relay_port: u16,\n\n pub rendezvous_port: u16,\n\n pub dh_map: BTreeMap<EpochNo, Key>,\n\n pub last_counter: Option<u32>,\n\n}\n\n\n", "file_path": "src/directory/state.rs", "rank": 71, "score": 35691.053662361606 }, { "content": " // setup packets now)\n\n while let Some(front) = epoch_queue.front() {\n\n if front.epoch_no <= current_epoch_no {\n\n epoch_queue.pop_front();\n\n } else {\n\n break;\n\n }\n\n }\n\n\n\n // add new epoch information (starting with the next uncommited epoch)\n\n let mut epoch_no = match epoch_queue.back() {\n\n Some(e) => e.epoch_no + 1,\n\n None => current_epoch_no + 1,\n\n };\n\n let mut setup_start_time = phase_duration.mul_f64(epoch_no as f64);\n\n let mut communication_start_time = setup_start_time + phase_duration;\n\n\n\n while epoch_queue.len() < cfg.epochs_in_advance.into() {\n\n let mut mixes = Vec::new();\n\n let mut mix_map = self.mix_map.lock().expect(\"Acquiring lock failed\");\n", "file_path": "src/directory/state.rs", "rank": 72, "score": 35689.99739032609 }, { "content": " stat_map: RwLock::new(StatisticMap::new()),\n\n }\n\n }\n\n\n\n pub fn config(&self) -> &Config {\n\n &self.config\n\n }\n\n\n\n pub fn update(&self) {\n\n let cfg = &self.config;\n\n let phase_duration = cfg.phase_duration();\n\n\n\n let current_epoch_no = current_epoch_no(phase_duration);\n\n if current_epoch_no == MAX_EPOCH_NO {\n\n panic!(\"End of time reached!\");\n\n }\n\n info!(\"Updating epochs, current epoch is {}\", current_epoch_no);\n\n let mut epoch_queue = self.epochs.write().expect(\"Acquiring lock failed\");\n\n\n\n // clear old epoch information (includes the current epoch, because it's too late to send\n", "file_path": "src/directory/state.rs", "rank": 73, "score": 35683.20064575436 }, { "content": "}\n\n\n\n#[derive(Builder)]\n\n#[builder(default)]\n\npub struct Config {\n\n number_of_rounds: u32,\n\n epochs_in_advance: u8,\n\n path_len: u8,\n\n round_duration: Duration,\n\n round_waiting: Duration,\n\n testbed_nat_addr: Vec<u8>,\n\n testbed_nat_base_port: u16,\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Self {\n\n Config {\n\n number_of_rounds: 8,\n\n epochs_in_advance: 10,\n\n path_len: 3,\n", "file_path": "src/directory/state.rs", "rank": 74, "score": 35677.75977802462 }, { "content": "use crate::crypto::key::Key;\n\nuse crate::crypto::x448;\n\nuse crate::epoch::{current_epoch_no, EpochNo, MAX_EPOCH_NO};\n\nuse crate::error::Error;\n\nuse crate::tonic_directory::{EpochInfo, MixInfo, MixStatistics};\n\nuse derive_builder::*;\n\n\n\nuse log::*;\n\nuse std::collections::{BTreeMap, HashMap, VecDeque};\n\nuse std::net::{IpAddr, SocketAddr};\n\nuse std::sync::{Arc, Mutex, RwLock};\n\nuse tokio::time::delay_for as sleep;\n\nuse tokio::time::Duration;\n\n\n", "file_path": "src/directory/state.rs", "rank": 75, "score": 35677.61762081817 }, { "content": " for (_, mix) in mix_map.iter_mut() {\n\n // note: remove because we do not need it again\n\n match mix.dh_map.remove(&epoch_no) {\n\n Some(pk) => {\n\n let info = MixInfo {\n\n address: crate::net::ip_addr_to_vec(&mix.addr),\n\n entry_port: mix.entry_port as u32,\n\n relay_port: mix.relay_port as u32,\n\n rendezvous_port: mix.rendezvous_port as u32,\n\n public_dh: pk.clone_to_vec(),\n\n fingerprint: mix.fingerprint.clone(),\n\n };\n\n mixes.push(info);\n\n }\n\n None => warn!(\n\n \"Don't have a DH key for mix {} in epoch {} (have {} non-matching)\",\n\n &mix.fingerprint,\n\n &epoch_no,\n\n mix.dh_map.len(),\n\n ),\n", "file_path": "src/directory/state.rs", "rank": 76, "score": 35675.54542171138 }, { "content": " round_duration: Duration::from_secs(7),\n\n round_waiting: Duration::from_secs(13),\n\n testbed_nat_addr: Vec::default(),\n\n testbed_nat_base_port: 9000,\n\n }\n\n }\n\n}\n\n\n\nimpl Config {\n\n /// TODO code: getter macro?\n\n pub fn phase_duration(&self) -> Duration {\n\n self.number_of_rounds * (self.round_duration + self.round_waiting)\n\n }\n\n\n\n pub fn epochs_in_advance(&self) -> u8 {\n\n self.epochs_in_advance\n\n }\n\n\n\n pub fn path_len(&self) -> u8 {\n\n self.path_len\n", "file_path": "src/directory/state.rs", "rank": 77, "score": 35674.53369891009 }, { "content": " }\n\n }\n\n let epoch_info = EpochInfo {\n\n epoch_no,\n\n setup_start_time: setup_start_time.as_secs(),\n\n communication_start_time: communication_start_time.as_secs(),\n\n round_duration: cfg.round_duration.as_secs_f64(),\n\n round_waiting: cfg.round_waiting.as_secs_f64(),\n\n number_of_rounds: cfg.number_of_rounds,\n\n path_length: cfg.path_len.into(),\n\n mixes,\n\n contact_service_addr: crate::net::ip_addr_to_vec(&self.contact_service_addr.ip()),\n\n contact_service_port: self.contact_service_addr.port() as u32,\n\n };\n\n epoch_queue.push_back(epoch_info);\n\n epoch_no += 1;\n\n setup_start_time += phase_duration;\n\n communication_start_time += phase_duration;\n\n }\n\n }\n", "file_path": "src/directory/state.rs", "rank": 78, "score": 35666.400951677366 }, { "content": " }\n\n\n\n pub fn round_duration(&self) -> Duration {\n\n self.round_duration\n\n }\n\n\n\n pub fn round_waiting(&self) -> Duration {\n\n self.round_waiting\n\n }\n\n\n\n pub fn testbed_nat_addr(&self) -> &[u8] {\n\n &self.testbed_nat_addr\n\n }\n\n\n\n pub fn testbed_nat_base_port(&self) -> u16 {\n\n self.testbed_nat_base_port\n\n }\n\n\n\n fn is_valid(&self) -> bool {\n\n self.phase_duration().subsec_nanos() == 0\n", "file_path": "src/directory/state.rs", "rank": 79, "score": 35666.34654531613 }, { "content": " delayed_cells: RwLock<BTreeMap<RoundNo, Cell>>,\n\n inject_queue: RwLock<VecDeque<Cell>>,\n\n max_round_no: RoundNo,\n\n last_upstream_round_no: RwLock<Option<RoundNo>>,\n\n last_downstream_round_no: RwLock<Option<RoundNo>>,\n\n}\n\n\n\nimpl Circuit {\n\n /// Creates the circuit (if everything is ok). Furthermore, it either returns the next setup\n\n /// packet (with destination) or the set of tokens to subscribe to (last layer)\n\n pub fn new(\n\n pkt: SetupPacketWithPrev,\n\n ephemeral_sk: &Key,\n\n rendezvous_map: Arc<RendezvousMap>,\n\n layer: u32,\n\n max_round_no: RoundNo,\n\n ) -> Result<(Self, NextSetupStep), Error> {\n\n let downstream_hop = pkt.previous_hop();\n\n if downstream_hop.is_none() && layer > 0 {\n\n return Err(Error::InputError(\n", "file_path": "src/mix/circuit.rs", "rank": 80, "score": 35169.43024508362 }, { "content": " /// Create a new circuit for epoch `epoch_no`, using the given `path` and subscribe to the\n\n /// given `tokens` (filled up by random dummy tokens if necessary).\n\n /// If `circuit_id` is `None`, a random id will be selected.\n\n /// Returns the circuit context and the setup packet.\n\n pub fn new(\n\n epoch_no: EpochNo,\n\n path: &[MixInfo],\n\n tokens: Vec<Token>,\n\n circuit_id: Option<CircuitId>,\n\n ) -> Result<(Circuit, PacketWithNextHop<SetupPacket>), Error> {\n\n let first_mix = path\n\n .first()\n\n .ok_or_else(|| Error::SizeMismatch(\"Expected path with length >= 1\".to_string()))?;\n\n let first_hop = first_mix.relay_address().ok_or_else(|| {\n\n Error::InputError(\"First mix does not have a valid relay address\".to_string())\n\n })?;\n\n\n\n assert_as_size_err!(\n\n tokens.len() <= SETUP_TOKENS,\n\n \"Cannot subscribe to {} tokens at once\",\n", "file_path": "src/client/circuit.rs", "rank": 81, "score": 35163.16659915856 }, { "content": " let aes = Aes256Gcm::new(aes_key);\n\n match aes.decrypt(\n\n nonce,\n\n &setup_pkt.onion,\n\n &mut decrypted,\n\n None,\n\n &setup_pkt.auth_tag,\n\n ) {\n\n Ok(_) => (),\n\n Err(e) => {\n\n warn!(\"Decryption of setup packet failed: {}\", e);\n\n warn!(\".. client_pk = 0x{}\", hex::encode(client_pk.borrow_raw()));\n\n warn!(\n\n \".. shared_dh_secret = 0x{}\",\n\n hex::encode(master_key.borrow_raw())\n\n );\n\n warn!(\".. aes_key = 0x{}\", hex::encode(aes.key().borrow_raw()));\n\n warn!(\".. nonce = 0x{}\", hex::encode(nonce));\n\n warn!(\".. auth_tag = 0x{}\", hex::encode(&setup_pkt.auth_tag));\n\n return Err(e);\n", "file_path": "src/mix/circuit.rs", "rank": 82, "score": 35159.222292438804 }, { "content": " }\n\n }\n\n\n\n let ttl = setup_pkt\n\n .ttl()\n\n .ok_or_else(|| Error::InputError(\"Should have been filtered by gRPC\".to_string()))?;\n\n\n\n let upstream_id = match ttl {\n\n 0 => {\n\n // rendezvous only uses 32bit of the circuit id\n\n let small_id: u32 = thread_cprng().gen();\n\n small_id.into()\n\n }\n\n _ => thread_cprng().gen(),\n\n };\n\n\n\n let mut circuit = Circuit {\n\n rendezvous_map,\n\n layer,\n\n downstream_id: setup_pkt.circuit_id,\n", "file_path": "src/mix/circuit.rs", "rank": 83, "score": 35157.86760832639 }, { "content": " cell.onion_mut()[0] = n;\n\n if (n as usize) < dropped {\n\n warn!(\"Have to drop {} cells -> NACK not big enough\", dropped);\n\n }\n\n\n\n let mut dropped_tokens: Vec<Token> = inject_queue_guard.iter().map(|c| c.token()).collect();\n\n if let Some(t) = additional_token {\n\n dropped_tokens.push(t);\n\n }\n\n let mut i = 8;\n\n for token in dropped_tokens {\n\n match cell.onion_mut().get_mut(i..i + 8) {\n\n Some(buf) => LittleEndian::write_u64(buf, token),\n\n None => {\n\n break;\n\n }\n\n }\n\n i += 8;\n\n }\n\n cell\n\n }\n\n}\n", "file_path": "src/mix/circuit.rs", "rank": 84, "score": 35156.57273027558 }, { "content": " cell.randomize();\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n /// Use `additional_token` if the NACK replaces a cell that is not inserted into the queue\n\n /// before.\n\n fn create_nack(&self, additional_token: Option<Token>) -> Cell {\n\n let mut cell: Cell = vec![0u8; CELL_LEN].try_into().unwrap();\n\n cell.set_circuit_id(self.downstream_id);\n\n cell.set_round_no(self.max_round_no);\n\n let inject_queue_guard = self.inject_queue.read().expect(\"Lock poisoned\");\n\n let dropped = inject_queue_guard.len()\n\n + match additional_token {\n\n Some(_) => 1,\n\n None => 0,\n\n };\n\n let n = cmp::min((ONION_LEN - 8) / 8, dropped) as u8;\n", "file_path": "src/mix/circuit.rs", "rank": 85, "score": 35156.10637245381 }, { "content": " None => {\n\n cell.set_round_no(PUBLISH_ROUND_NO);\n\n let rendezvous_addr = self\n\n .rendezvous_map\n\n .rendezvous_address(&cell.token(), false)\n\n .expect(\"Checked this at circuit creation\");\n\n NextCellStep::Rendezvous(PacketWithNextHop::new(cell, rendezvous_addr))\n\n }\n\n }\n\n }\n\n CellDirection::Downstream => match self.downstream_hop {\n\n Some(hop) => NextCellStep::Relay(PacketWithNextHop::new(cell, hop)),\n\n None => NextCellStep::Deliver(cell),\n\n },\n\n }\n\n }\n\n\n\n /// Use an injected cell or create a dummy cell to pad the circuit if necessary.\n\n /// Returns `None` if no padding is necessary.\n\n pub fn pad(&self, round_no: RoundNo, direction: CellDirection) -> Option<NextCellStep> {\n", "file_path": "src/mix/circuit.rs", "rank": 86, "score": 35152.94113015117 }, { "content": "\n\n /// Onion encryption/decryption.\n\n fn handle_onion(&self, cell: &mut Cell, direction: CellDirection) {\n\n match direction {\n\n CellDirection::Upstream => {\n\n let tweak_src = 24 * cell.round_no() as u64;\n\n match self.threefish.decrypt(tweak_src, cell.onion_mut()) {\n\n Ok(()) => (),\n\n Err(e) => {\n\n warn!(\"Onion decryption failed, randomizing instead: {}\", e);\n\n cell.randomize();\n\n }\n\n }\n\n }\n\n CellDirection::Downstream => {\n\n let tweak_src = 24 * cell.round_no() as u64 + 12;\n\n match self.threefish.encrypt(tweak_src, cell.onion_mut()) {\n\n Ok(()) => (),\n\n Err(e) => {\n\n warn!(\"Onion encryption failed, randomizing instead: {}\", e);\n", "file_path": "src/mix/circuit.rs", "rank": 87, "score": 35152.31661662622 }, { "content": "use log::*;\n\nuse rand::seq::SliceRandom;\n\nuse rand::Rng;\n\n\n\nuse std::net::{IpAddr, SocketAddr};\n\n\n\nuse crate::assert_as_size_err;\n\nuse crate::crypto::aes::Aes256Gcm;\n\nuse crate::crypto::cprng::thread_cprng;\n\nuse crate::crypto::key::{hkdf_sha256, Key};\n\nuse crate::crypto::threefish::Threefish2048;\n\nuse crate::crypto::{x25519, x448};\n\nuse crate::defs::{\n\n tokens_to_byte_vec, CircuitId, RoundNo, Token, SETUP_AUTH_LEN, SETUP_NONCE_LEN, SETUP_TOKENS,\n\n};\n\nuse crate::epoch::EpochNo;\n\nuse crate::error::Error;\n\nuse crate::net::PacketWithNextHop;\n\nuse crate::tonic_directory::MixInfo;\n\nuse crate::tonic_mix::SetupPacket;\n\n\n\n/// Derive AES and Threefish key from shared secret.\n", "file_path": "src/client/circuit.rs", "rank": 88, "score": 35151.600205647985 }, { "content": " tokens.len()\n\n );\n\n\n\n let mut rng = thread_cprng();\n\n let circuit_id = circuit_id.unwrap_or_else(|| rng.gen());\n\n\n\n struct Hop {\n\n nonce: Vec<u8>,\n\n aes: Aes256Gcm,\n\n pk: Vec<u8>,\n\n ip: Vec<u8>,\n\n port: Vec<u8>,\n\n }\n\n\n\n let mut hops = Vec::new();\n\n let mut threefishies = Vec::new();\n\n\n\n for mix in path {\n\n let mix_pk = Key::clone_from_slice(&mix.public_dh);\n\n let (pk, shared_key) = match mix_pk.len() {\n", "file_path": "src/client/circuit.rs", "rank": 89, "score": 35151.473175233514 }, { "content": " circuit_id, circuit.first_hop\n\n );\n\n let extend = PacketWithNextHop::new(setup_pkt, circuit.first_hop);\n\n Ok((circuit, extend))\n\n }\n\n\n\n /// Client circuit only has an upstream id\n\n pub fn circuit_id(&self) -> CircuitId {\n\n self.circuit_id\n\n }\n\n\n\n pub fn first_hop(&self) -> &SocketAddr {\n\n &self.first_hop\n\n }\n\n\n\n pub fn tokens(&self) -> &[Token] {\n\n &self.tokens\n\n }\n\n\n\n pub fn dummy_tokens(&self) -> &[Token] {\n", "file_path": "src/client/circuit.rs", "rank": 90, "score": 35151.31180178375 }, { "content": " \"Expected downstream hop information\".to_string(),\n\n ));\n\n }\n\n let setup_pkt = pkt.into_inner();\n\n let client_pk = Key::clone_from_slice(&setup_pkt.public_dh);\n\n let master_key = match ephemeral_sk.len() {\n\n x25519::KEY_LEN => x25519::generate_shared_secret(&client_pk, ephemeral_sk)?,\n\n x448::KEY_LEN => x448::generate_shared_secret(&client_pk, ephemeral_sk)?,\n\n _ => {\n\n return Err(Error::InputError(\n\n \"Our ephemeral sk has a strange size\".to_string(),\n\n ))\n\n }\n\n };\n\n let nonce = &setup_pkt.nonce;\n\n let (aes_key, onion_key) = derive_keys(&master_key, &nonce)?;\n\n let threefish = Threefish2048::new(onion_key)?;\n\n\n\n // decrypt onion part\n\n let mut decrypted = vec![0u8; setup_pkt.onion.len()];\n", "file_path": "src/mix/circuit.rs", "rank": 91, "score": 35151.14118117025 }, { "content": " let extend_info = ExtendInfo::new(next_setup_pkt, next_setup_hop);\n\n\n\n // TODO security: this should not be logged in production ...\n\n debug!(\n\n \"Created relay circuit with downstream id {} and upstream id {} in layer {}\",\n\n circuit.downstream_id, circuit.upstream_id, circuit.layer\n\n );\n\n Ok((circuit, NextSetupStep::Extend(extend_info)))\n\n }\n\n }\n\n\n\n /// circuit id used on the link towards the client (upstream rx, downstream tx)\n\n pub fn downstream_id(&self) -> CircuitId {\n\n self.downstream_id\n\n }\n\n\n\n /// circuit id used on the link towards the rendezvous node (upstream tx, downstream rx)\n\n pub fn upstream_id(&self) -> CircuitId {\n\n self.upstream_id\n\n }\n", "file_path": "src/mix/circuit.rs", "rank": 92, "score": 35151.0071286988 }, { "content": "use byteorder::{ByteOrder, LittleEndian};\n\nuse log::*;\n\nuse rand::Rng;\n\nuse std::cmp::{self, Ordering};\n\nuse std::collections::{BTreeMap, VecDeque};\n\nuse std::convert::TryInto;\n\nuse std::net::{IpAddr, Ipv4Addr, SocketAddr};\n\nuse std::sync::{Arc, RwLock};\n\n\n\npub type ExtendInfo = PacketWithNextHop<SetupPacket>;\n\n\n\npub enum NextSetupStep {\n\n Extend(ExtendInfo),\n\n Rendezvous(Vec<Token>),\n\n}\n\n\n\npub enum NextCellStep {\n\n Relay(PacketWithNextHop<Cell>),\n\n Rendezvous(PacketWithNextHop<Cell>),\n\n Deliver(Cell),\n", "file_path": "src/mix/circuit.rs", "rank": 93, "score": 35151.00384819172 }, { "content": " Some(CellCmd::Delay(_)) => {\n\n warn!(\"Somebody used the delay cmd, but it is not implemented yet\")\n\n }\n\n Some(CellCmd::Subscribe(n_tokens)) if self.is_exit() => {\n\n let slice_upper = (n_tokens as usize) * std::mem::size_of::<Token>();\n\n let tokens = crate::defs::tokens_from_bytes(&cell.onion()[0..slice_upper]);\n\n match tokens.len() {\n\n 0 => (),\n\n 1 => sub_collector.collect_subscription(tokens[0], self.upstream_id),\n\n _ => sub_collector.collect_subscriptions(tokens, self.upstream_id),\n\n }\n\n // this cell has no end-to-end payload -> drop\n\n return NextCellStep::Drop;\n\n }\n\n // remaining commands are not for upstream mixes\n\n _ => (),\n\n }\n\n\n\n match self.upstream_hop {\n\n Some(hop) => NextCellStep::Relay(PacketWithNextHop::new(cell, hop)),\n", "file_path": "src/mix/circuit.rs", "rank": 94, "score": 35149.97309128123 }, { "content": " CellDirection::Upstream => match self.upstream_hop {\n\n Some(hop) => Some(NextCellStep::Relay(PacketWithNextHop::new(cell, hop))),\n\n None => {\n\n cell.set_round_no(PUBLISH_ROUND_NO);\n\n let rendezvous_addr = self\n\n .rendezvous_map\n\n .rendezvous_address(&cell.token(), false)\n\n .expect(\"Checked at circuit setup\");\n\n Some(NextCellStep::Rendezvous(PacketWithNextHop::new(\n\n cell,\n\n rendezvous_addr,\n\n )))\n\n }\n\n },\n\n CellDirection::Downstream => match self.downstream_hop {\n\n Some(hop) => Some(NextCellStep::Relay(PacketWithNextHop::new(cell, hop))),\n\n None => Some(NextCellStep::Deliver(cell)),\n\n },\n\n }\n\n }\n", "file_path": "src/mix/circuit.rs", "rank": 95, "score": 35149.640401334545 }, { "content": " &self.dummy_tokens\n\n }\n\n\n\n pub fn onion_encrypt(&self, round_no: RoundNo, onion: &mut [u8]) -> Result<(), Error> {\n\n let tweak_src = 24 * round_no as u64;\n\n for tf in self.threefishies.iter().rev() {\n\n tf.encrypt(tweak_src, onion)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn onion_decrypt(&self, round_no: RoundNo, onion: &mut [u8]) -> Result<(), Error> {\n\n let tweak_src = 24 * round_no as u64 + 12;\n\n for tf in self.threefishies.iter() {\n\n tf.decrypt(tweak_src, onion)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/client/circuit.rs", "rank": 96, "score": 35148.76310450892 }, { "content": " x25519::KEY_LEN => {\n\n let (pk, sk) = x25519::generate_keypair();\n\n (pk, x25519::generate_shared_secret(&mix_pk, &sk)?)\n\n }\n\n x448::KEY_LEN => {\n\n let (pk, sk) = x448::generate_keypair();\n\n (pk, x448::generate_shared_secret(&mix_pk, &sk)?)\n\n }\n\n _ => {\n\n return Err(Error::InputError(\n\n \"Mix does not have public key with valid length\".to_string(),\n\n ))\n\n }\n\n };\n\n let mut nonce = vec![0u8; SETUP_NONCE_LEN];\n\n let mut rng = thread_cprng();\n\n rng.fill(nonce.as_mut_slice());\n\n let (aes_key, onion_key) = derive_keys(&shared_key, &nonce)?;\n\n let aes = Aes256Gcm::new(aes_key);\n\n let sock_addr = mix.relay_address().ok_or_else(|| {\n", "file_path": "src/client/circuit.rs", "rank": 97, "score": 35148.52980284154 }, { "content": " .encrypt(&hop.nonce, &plaintext, &mut ciphertext, None, &mut auth_tag)?;\n\n let layer_combined = vec![\n\n hop.ip.clone(),\n\n hop.port.clone(),\n\n hop.pk.clone(),\n\n hop.nonce.clone(),\n\n auth_tag,\n\n ciphertext,\n\n ];\n\n plaintext = layer_combined.into_iter().flatten().collect();\n\n }\n\n\n\n // last encryption (first hop) can be done in place\n\n let mut setup_pkt = SetupPacket {\n\n epoch_no,\n\n circuit_id,\n\n public_dh: first_hop_info.pk.clone(),\n\n nonce: first_hop_info.nonce.clone(),\n\n auth_tag: vec![0u8; 16],\n\n onion: plaintext.clone(),\n", "file_path": "src/client/circuit.rs", "rank": 98, "score": 35147.3194856682 }, { "content": " let mut last_round_no_guard = match direction {\n\n CellDirection::Upstream => self.last_upstream_round_no.write().expect(\"Lock poisoned\"),\n\n CellDirection::Downstream => self\n\n .last_downstream_round_no\n\n .write()\n\n .expect(\"Lock poisoned\"),\n\n };\n\n\n\n let need_dummy = match *last_round_no_guard {\n\n Some(last_round_no) => round_no != last_round_no,\n\n None => true,\n\n };\n\n\n\n if !need_dummy {\n\n return None;\n\n }\n\n\n\n *last_round_no_guard = Some(round_no);\n\n\n\n let circuit_id = match direction {\n", "file_path": "src/mix/circuit.rs", "rank": 99, "score": 35147.3008032141 } ]
Rust
Project/src/main.rs
qarmin/gtk-rs-fuzzer
def0baaabe356998cef130f3a3aec6654bd5b78d
#![allow(dead_code)] #![allow(unused_imports)] #![allow(unused_must_use)] mod create_objects; mod enum_things; mod helpers; mod implementations; mod ziemniak; use crate::create_objects::*; use crate::ziemniak::{run_tests, SettingsTaker}; use gtk4::prelude::*; use gtk4::*; use std::fs; use std::fs::File; fn main() { let application = gtk4::Application::builder().build(); application.connect_activate(move |application| { let window = gtk4::ApplicationWindow::new(application); window.set_title(Some("Fuzzer gtk-rs")); window.show(); let sf = read_from_file(); if TEST == 0 { crashes(); } else { run_tests(sf); } }); application.run(); } const TEST: u64 = 1; fn crashes() { println!("TESTSTTSTSTSTSTSTST"); } fn read_from_file() -> SettingsTaker { let string: String = match fs::read_to_string("settings.txt") { Ok(t) => t, Err(_) => { println!("Missing settings.txt file"); return SettingsTaker { ignored_functions: vec![], allowed_functions: vec![], ignored_classes: vec![], allowed_classes: vec![], repeating_number: 3, all_repeating_number: 1, number_of_max_executed_function: -1, }; } }; let mut st: SettingsTaker = SettingsTaker { ignored_functions: vec![], allowed_functions: vec![], ignored_classes: vec![], allowed_classes: vec![], repeating_number: 3, all_repeating_number: 1, number_of_max_executed_function: -1, }; enum MODES { None, IgnoredFunctions, AllowedFunctions, IgnoredClasses, AllowedClasses, Repeating, AllRepeating, MaxExecutedFunction, } let mut current_mode: MODES = MODES::None; for line in string.split('\n').map(|e| e.to_string()).collect::<Vec<String>>() { let new_line = line.trim().to_string(); if new_line.starts_with("//") { continue; } if new_line == "ignored_functions:" { current_mode = MODES::IgnoredFunctions; } else if new_line == "allowed_functions:" { current_mode = MODES::AllowedFunctions; } else if new_line == "ignored_classes:" { current_mode = MODES::IgnoredClasses; } else if new_line == "allowed_classes:" { current_mode = MODES::AllowedClasses; } else if new_line == "repeating_number:" { current_mode = MODES::Repeating; } else if new_line == "all_repeating_number:" { current_mode = MODES::AllRepeating; } else if new_line == "number_of_max_executed_function:" { current_mode = MODES::MaxExecutedFunction; } else { if !new_line.is_empty() { match current_mode { MODES::IgnoredFunctions => st.ignored_functions.push(new_line), MODES::AllowedFunctions => st.allowed_functions.push(new_line), MODES::IgnoredClasses => st.ignored_classes.push(new_line), MODES::AllowedClasses => st.allowed_classes.push(new_line), MODES::Repeating => { if let Ok(number) = new_line.parse() { st.repeating_number = number; } } MODES::AllRepeating => { if let Ok(number) = new_line.parse() { st.all_repeating_number = number; } } MODES::MaxExecutedFunction => { if let Ok(number) = new_line.parse() { st.number_of_max_executed_function = number; } } MODES::None => println!("SETTING: Missing mode for {}", new_line), } } } } { println!("Start settings loading"); if !st.ignored_classes.is_empty() { println!("Ignored classes:"); for i in &st.ignored_classes { println!("{}", i); } } if !st.allowed_classes.is_empty() { println!("Allowed classes:"); for i in &st.allowed_classes { println!("{}", i); } } if !st.allowed_functions.is_empty() { println!("Allowed functions:"); for i in &st.allowed_functions { println!("{}", i); } } if !st.ignored_functions.is_empty() { println!("Ignored functions:"); for i in &st.ignored_functions { println!("{}", i); } } println!("Repeating - {}", st.repeating_number); println!("All Repeating - {}", st.all_repeating_number); println!("Max Executed Functions - {}", st.number_of_max_executed_function); println!("End settings loading"); } st }
#![allow(dead_code)] #![allow(unused_imports)] #![allow(unused_must_use)] mod create_objects; mod enum_things; mod helpers; mod implementations; mod ziemniak; use crate::create_objects::*; use crate::ziemniak::{run_tests, SettingsTaker}; use gtk4::prelude::*; use gtk4::*; use std::fs; use std::fs::File; fn main() { let application = gtk4::Application::builder().build(); application.connect_activate(move |application| { let window = gtk4::ApplicationWindow::new(application); window.set_title(Some("Fuzzer gtk-rs")); window.show(); let sf = read_from_file(); if TEST == 0 { crashes(); } else { run_tests(sf); } }); application.run(); } const TEST: u64 = 1; fn crashes() { println!("TESTSTTSTSTSTSTSTST"); } fn read_from_file() -> SettingsTaker { let string: String = match fs::read_to_string("settings.txt") { Ok(t) => t, Err(_) => { println!("Missing settings.txt file"); return SettingsTaker { ignored_functions: vec![], allowed_functions: vec![], ignored_classes: vec![], allowed_classes: vec![], repeating_number: 3, all_repeating_number: 1, number_of_max_executed_function: -1, }; } }; let mut st: SettingsTaker = SettingsTaker { ignored_functions: vec![], allowed_functions: vec![], ignored_classes: vec![], allowed_classes: vec![], repeating_number: 3, all_repeating_number: 1, number_of_max_executed_function: -1, }; enum MODES { None, IgnoredFunctions, AllowedFunctions, IgnoredClasses, AllowedClasses, Repeating, AllRepeating, MaxExecutedFunction, } let mut current_mode: MODES = MODES::None; for line in string.split('\n').map(|e| e.to_string()).collect::<Vec<String>>() { let new_line = line.trim().to_string(); if new_line.starts_with("//") { continue; } if new_line == "ignored_functions:" { current_mode = MODES::IgnoredFunctions; } else if new_line == "allowed_functions:" { current_mode = MODES::AllowedFunctions; } else if new_line == "ignored_classes:" { current_mode = MODES::IgnoredClasses; } else if new_line == "allowed_classes:" { current_mode = MODES::AllowedClasses; } else if new_line == "repeating_number:" { current_mode = MODES::Repeating; } else if new_line == "all_repeating_number:" { current_mode = MODES::AllRepeating; } else if new_line == "number_of_max_executed_function:" { current_mode = MODES::MaxExecutedFunction; } else { if !new_line.is_empty() { match current_mode { MODES::IgnoredFunctions => st.ignored_functions.push(new_line), MODES::AllowedFunctions => st.allowed_functions.push(new_line), MODES::IgnoredClasses => st.ignored_classes.push(new_line), MODES::AllowedClasses => st.allowed_classes.push(new_line), MODES::Repeating => { if let Ok(number) = new_line.parse() { st.repeating_number = number; } } MODES::AllRepeating => { if let Ok(number) = new_line.parse() { st.all_repeating_number = number; } } MODES::MaxExecutedFunction => { if let Ok(number) = new_line.parse() { st.number_of_max_executed_function = number; } } MODES::None => println!("SETTING: Missing mode for {}", new_line), } } } } { println!("Start settings loading"); if !st.ignored_classes.is_empty() { println!("Ignored classes:"); for i in &st.ignored_classes { println!("{}", i); } }
if !st.allowed_functions.is_empty() { println!("Allowed functions:"); for i in &st.allowed_functions { println!("{}", i); } } if !st.ignored_functions.is_empty() { println!("Ignored functions:"); for i in &st.ignored_functions { println!("{}", i); } } println!("Repeating - {}", st.repeating_number); println!("All Repeating - {}", st.all_repeating_number); println!("Max Executed Functions - {}", st.number_of_max_executed_function); println!("End settings loading"); } st }
if !st.allowed_classes.is_empty() { println!("Allowed classes:"); for i in &st.allowed_classes { println!("{}", i); } }
if_condition
[ { "content": "pub fn take_vec_string() -> Vec<String> {\n\n let mut to_return = Vec::new();\n\n\n\n for _i in 0..thread_rng().gen_range(0..10) {\n\n to_return.push(take_string());\n\n }\n\n\n\n debug_printing_vec(&to_return);\n\n to_return\n\n}\n\n\n", "file_path": "Project/src/helpers.rs", "rank": 0, "score": 171640.24025093165 }, { "content": "pub fn get_vector_str_from_string(fff: &Vec<String>) -> Vec<&str> {\n\n let mut vec: Vec<&str> = Vec::new();\n\n for i in fff {\n\n vec.push(i);\n\n }\n\n vec\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 1, "score": 158223.89913243754 }, { "content": "pub fn debug_printing_vec(what_to_print: &Vec<String>) {\n\n if DEBUG_PRINTING {\n\n println!(\"Using argument \\\"{:?}\\\"(text length {})\", what_to_print, what_to_print.len());\n\n }\n\n}\n\n\n", "file_path": "Project/src/helpers.rs", "rank": 2, "score": 154655.61465260247 }, { "content": "pub fn run_tests(st: SettingsTaker) {\n\n let mut file = OpenOptions::new().write(true).truncate(true).create(true).open(\"things.txt\").unwrap();\n\n\n\n let all_classes: [(fn(&mut File, &SettingsTaker) -> (), &str); <<number_of_functions>>] = [<<function_arguments>>];\n\n \n\n for _i in 0..st.all_repeating_number {\n\n if st.allowed_classes.is_empty() {\n\n for (function, name) in all_classes {\n\n if !st.ignored_classes.contains(&name.to_string()) {\n\n function(&mut file, &st);\n\n }\n\n }\n\n } else {\n\n for (function, name) in all_classes {\n\n if st.allowed_classes.contains(&name.to_string()) {\n\n function(&mut file, &st);\n\n }\n\n }\n\n }\n\n }\n", "file_path": "src/creating_functions.rs", "rank": 3, "score": 126568.1718369009 }, { "content": "pub fn take_u64() -> u64 {\n\n let to_return: u64 = thread_rng().gen_range(0..100000);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 4, "score": 118403.34021944884 }, { "content": "pub fn take_string() -> String {\n\n let to_return;\n\n\n\n if rand::random::<bool>() {\n\n to_return = \"\".to_string();\n\n } else {\n\n to_return = thread_rng().gen_range(-100000..100000).to_string();\n\n }\n\n debug_printing(&to_return);\n\n to_return\n\n}\n\n\n", "file_path": "Project/src/helpers.rs", "rank": 5, "score": 118073.69754946872 }, { "content": "fn main() {\n\n let (class_info, class_functions, traits, enums, children_of_class) = collect_things();\n\n create_enums_file(&class_info, &class_functions, &traits, &enums, &children_of_class);\n\n create_implementation_file(&class_info, &class_functions, &traits, &enums, &children_of_class);\n\n create_project_file(class_info, class_functions, traits, enums, children_of_class)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 104604.6427591211 }, { "content": "pub fn stek_<<type_lowercase>>() -> (<<type>>, String) {\n\n let to_return = ENUM_<<type_uppercase>>_TYPE.choose(&mut rand::thread_rng()).unwrap();\n\n let to_return = (to_return.0, to_return.1.to_string());\n\n\n\n debug_printing(&to_return.1);\n\n return to_return;\n\n}\n\n\"#####;\n\n\n\n for (name_of_enum, constant_list) in enums {\n\n if IGNORED_ENUMS.contains(&name_of_enum.as_str()) {\n\n continue;\n\n }\n\n\n\n let mut to_write = single_enum_template\n\n .replace(\"<<type>>\", name_of_enum)\n\n .replace(\"<<number>>\", &constant_list.len().to_string())\n\n .replace(\"<<type_lowercase>>\", &name_of_enum.to_lowercase())\n\n .replace(\"<<type_uppercase>>\", &name_of_enum.to_uppercase());\n\n let mut arguments = \"\".to_string();\n", "file_path": "src/creating_enum.rs", "rank": 9, "score": 98582.71209323677 }, { "content": "pub fn stek_glib_type() -> (glib::Type, String) {\n\n let to_return = S_LIB_TYPE.choose(&mut rand::thread_rng()).unwrap();\n\n let to_return = (to_return.0, to_return.1.to_string());\n\n\n\n debug_printing(&to_return.1);\n\n return to_return;\n\n}\n\n\n", "file_path": "Project/src/helpers.rs", "rank": 10, "score": 95830.97092657257 }, { "content": "pub fn create_implementation_file(\n\n _class_info: &BTreeMap<String, Vec<String>>,\n\n _class_functions: &BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n _traits: &BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n _enums: &BTreeMap<String, Vec<String>>,\n\n children_of_class: &BTreeMap<String, Vec<String>>,\n\n) {\n\n let _ = fs::remove_file(PATH_TO_IMPLEMENTATIONS);\n\n\n\n let file = OpenOptions::new().write(true).truncate(true).create(true).open(PATH_TO_IMPLEMENTATIONS).unwrap();\n\n let mut file = BufWriter::new(file);\n\n\n\n let enum_start = r#####\"\n\nuse crate::create_objects::*;\n\nuse crate::helpers::*;\n\nuse gtk4::prelude::*;\n\nuse gtk4::*;\n\nuse std::fs;\n\nuse std::fs::{File, OpenOptions};\n\nuse rand::prelude::*;\n\nuse std::io::Write;\"#####;\n\n writeln!(file, \"{}\", enum_start).unwrap();\n\n\n\n // type - base type e.g. Widget (ImplAs<Widget>)\n\n // type_lowercase - base type lowercase\n\n // number_of_records - number of records\n\n // items -\n\n let single_impl_template = r#####\"\n", "file_path": "src/creating_implementation.rs", "rank": 12, "score": 90657.76448548005 }, { "content": "pub fn create_enums_file(\n\n _class_info: &BTreeMap<String, Vec<String>>,\n\n _class_functions: &BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n _traits: &BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n enums: &BTreeMap<String, Vec<String>>,\n\n _children_of_class: &BTreeMap<String, Vec<String>>,\n\n) {\n\n let _ = fs::remove_file(PATH_TO_ENUM_FILE);\n\n\n\n let file = OpenOptions::new().write(true).truncate(true).create(true).open(PATH_TO_ENUM_FILE).unwrap();\n\n let mut file = BufWriter::new(file);\n\n\n\n let enum_start = r#####\"\n\nuse crate::create_objects::*;\n\nuse crate::helpers::*;\n\nuse gtk4::prelude::*;\n\nuse gtk4::*;\n\nuse std::fs;\n\nuse std::fs::{File, OpenOptions};\n\nuse rand::prelude::*;\n", "file_path": "src/creating_enum.rs", "rank": 13, "score": 90635.22751112166 }, { "content": "pub fn gget_window() -> (Window, &'static str) {\n\n (Window::new(), r###\"Window::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 14, "score": 81073.40622554079 }, { "content": "fn collect_things() -> (\n\n BTreeMap<String, Vec<String>>,\n\n BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n BTreeMap<String, Vec<String>>,\n\n BTreeMap<String, Vec<String>>,\n\n) {\n\n // Do not modify result of this variable\n\n let mut class_info: BTreeMap<String, Vec<String>> = Default::default(); // Class + what extends e.g. Label -> [Widget, LabelExt]\n\n // Can be removed\n\n let mut class_functions: BTreeMap<String, BTreeMap<String, Vec<String>>> = Default::default(); // Class + functions + arguments e.g. Label -> new -> [&str]\n\n\n\n let mut traits: BTreeMap<String, BTreeMap<String, Vec<String>>> = Default::default();\n\n let mut enums: BTreeMap<String, Vec<String>> = Default::default();\n\n\n\n let mut children_of_class: BTreeMap<String, Vec<String>> = Default::default();\n\n\n\n let mut number_of_ignored_functions: u32 = 0;\n\n let mut number_of_ignored_gio_etc_functions: u32 = 0;\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 59833.51239150467 }, { "content": "fn count_objects(\n\n class_functions: &BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n traits: &BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n enums: &BTreeMap<String, Vec<String>>,\n\n what: &str,\n\n) {\n\n let mut counter_class = 0;\n\n let mut counter_methods = 0;\n\n let mut counter_arguments = 0;\n\n let traits_number = traits.len();\n\n let all_traits: usize = traits.iter().map(|(_e, b)| b.len()).sum();\n\n let enums_number = enums.len();\n\n let all_enums: usize = enums.iter().map(|(_e, b)| b.len()).sum();\n\n for function_list in class_functions.values() {\n\n counter_class += 1;\n\n for arguments in function_list.values() {\n\n counter_methods += 1;\n\n counter_arguments += arguments.len();\n\n }\n\n }\n\n println!(\n\n \"{} - Class: {}, Methods: {}, Arguments: {}, Traits: {}({}), Enums: {}({})\",\n\n what, counter_class, counter_methods, counter_arguments, traits_number, all_traits, enums_number, all_enums\n\n );\n\n}\n", "file_path": "src/main.rs", "rank": 16, "score": 59833.51239150467 }, { "content": "pub fn gget_settings() -> (Settings, &'static str) {\n\n (SettingsBuilder::build(Default::default()), r###\"SettingsBuilder::build(Default::default())\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 17, "score": 56634.362183632285 }, { "content": "pub fn create_project_file(\n\n _class_info: BTreeMap<String, Vec<String>>,\n\n class_functions: BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n _traits: BTreeMap<String, BTreeMap<String, Vec<String>>>,\n\n enums: BTreeMap<String, Vec<String>>,\n\n children_of_class: BTreeMap<String, Vec<String>>,\n\n) {\n\n let _ = fs::remove_file(PATH_TO_PROJECT_FILE);\n\n\n\n let file = OpenOptions::new().write(true).truncate(true).create(true).open(PATH_TO_PROJECT_FILE).unwrap();\n\n let mut file = BufWriter::new(file);\n\n\n\n let start_text = r#####\"\n\nuse crate::create_objects::*;\n\nuse crate::helpers::*;\n\nuse crate::enum_things::*;\n\nuse crate::implementations::*;\n\nuse gtk4::prelude::*;\n\nuse gtk4::*;\n\nuse std::fs;\n", "file_path": "src/creating_functions.rs", "rank": 18, "score": 53201.439022986655 }, { "content": "pub fn take_bool() -> bool {\n\n let to_return: bool = rand::random::<bool>();\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n\n\n\nconst S_LIB_TYPE: [(glib::Type, &str); 22] = [\n\n (glib::Type::INVALID, \"glib::Type::INVALID\"),\n\n (glib::Type::UNIT, \"glib::Type::UNIT\"),\n\n (glib::Type::I8, \"glib::Type::I8\"),\n\n (glib::Type::U8, \"glib::Type::U8\"),\n\n (glib::Type::BOOL, \"glib::Type::BOOL\"),\n\n (glib::Type::I32, \"glib::Type::I32\"),\n\n (glib::Type::U32, \"glib::Type::U32\"),\n\n (glib::Type::I_LONG, \"glib::Type::I_LONG\"),\n\n (glib::Type::U_LONG, \"glib::Type::U_LONG\"),\n\n (glib::Type::I64, \"glib::Type::I64\"),\n\n (glib::Type::U64, \"glib::Type::U64\"),\n\n (glib::Type::F32, \"glib::Type::F32\"),\n", "file_path": "Project/src/helpers.rs", "rank": 19, "score": 52747.34855261227 }, { "content": "pub fn take_i64() -> i64 {\n\n let to_return: i64 = thread_rng().gen_range(-100000..100000);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 20, "score": 52747.34855261227 }, { "content": "pub fn take_usize() -> usize {\n\n let to_return: usize = thread_rng().gen_range(0..100000);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 21, "score": 52747.34855261227 }, { "content": "pub fn take_f32() -> f32 {\n\n let to_return: f32 = thread_rng().gen_range(-100000.0..100000.0);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 22, "score": 52747.34855261227 }, { "content": "pub fn take_u32() -> u32 {\n\n let to_return: u32 = thread_rng().gen_range(0..100000);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 23, "score": 52747.34855261227 }, { "content": "pub fn take_i32() -> i32 {\n\n let to_return: i32 = thread_rng().gen_range(-100000..100000);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 24, "score": 52747.34855261227 }, { "content": "pub fn take_char() -> char {\n\n let to_return: char = thread_rng().gen_range(0..127) as u8 as char;\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n\n\n", "file_path": "Project/src/helpers.rs", "rank": 25, "score": 52747.34855261227 }, { "content": "pub fn take_f64() -> f64 {\n\n let to_return: f64 = thread_rng().gen_range(-100000.0..100000.0);\n\n\n\n debug_printing(&to_return.to_string());\n\n to_return\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 26, "score": 52747.34855261227 }, { "content": "pub fn debug_printing(what_to_print: &str) {\n\n if DEBUG_PRINTING {\n\n println!(\"Using argument \\\"{}\\\"(text length {})\", what_to_print, what_to_print.len());\n\n }\n\n}\n", "file_path": "Project/src/helpers.rs", "rank": 27, "score": 50569.69557869656 }, { "content": "pub fn imple_<<type_lowercase>>() -> (<<type>>, &'static str) {\n\n let number_of_records = <<number_of_records>>;\n\n\n\n match thread_rng().gen_range(0 as usize..number_of_records as usize) {\n\n <<items>>\n\n }\n\n}\n\n\"#####;\n\n\n\n for (name_of_class, children_list) in children_of_class {\n\n if children_list.is_empty() || IGNORED_IMPLEMENTATIONS.contains(&name_of_class.as_str()) {\n\n continue;\n\n }\n\n\n\n let mut to_write = single_impl_template\n\n .replace(\"<<type>>\", name_of_class)\n\n .replace(\"<<type_lowercase>>\", &name_of_class.to_lowercase())\n\n .replace(\"<<number_of_records>>\", &children_list.len().to_string());\n\n let mut arguments = \"\".to_string();\n\n for (index, child_of_item) in children_list.iter().enumerate() {\n", "file_path": "src/creating_implementation.rs", "rank": 28, "score": 46928.27559606581 }, { "content": "pub fn gget_windowhandle() -> (WindowHandle, &'static str) {\n\n (WindowHandle::new(), r###\"WindowHandle::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 29, "score": 45585.78008230132 }, { "content": "pub fn gget_shortcutswindow() -> (ShortcutsWindow, &'static str) {\n\n (ShortcutsWindow::builder().build(), r###\"ShortcutsWindow::builder().build()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 30, "score": 45585.78008230132 }, { "content": "pub fn gget_scrolledwindow() -> (ScrolledWindow, &'static str) {\n\n (ScrolledWindow::new(), r###\"ScrolledWindow::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 31, "score": 45585.78008230132 }, { "content": "pub fn gget_windowcontrols() -> (WindowControls, &'static str) {\n\n (WindowControls::default(), r###\"WindowControls::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 32, "score": 45585.78008230132 }, { "content": "pub fn gget_mediafile() -> (MediaFile, &'static str) {\n\n (MediaFile::new(), r###\"MediaFile::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 33, "score": 45528.08343732784 }, { "content": "pub fn gget_filefilter() -> (FileFilter, &'static str) {\n\n (FileFilter::new(), r###\"FileFilter::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 34, "score": 45528.08343732784 }, { "content": "pub fn gget_stringfilter() -> (StringFilter, &'static str) {\n\n (StringFilter::default(), r###\"StringFilter::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 35, "score": 45513.60082523477 }, { "content": "pub fn gget_stringlist() -> (StringList, &'static str) {\n\n (StringList::default(), r###\"StringList::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 36, "score": 45513.60082523477 }, { "content": "pub fn gget_stringobject() -> (StringObject, &'static str) {\n\n (StringObject::new(&take_string()), r###\"StringObject::new(&take_string())\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 37, "score": 45513.60082523477 }, { "content": "pub fn gget_stringsorter() -> (StringSorter, &'static str) {\n\n (StringSorter::default(), r###\"StringSorter::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 38, "score": 45513.60082523477 }, { "content": "pub fn gget_printsettings() -> (PrintSettings, &'static str) {\n\n (PrintSettings::new(), r###\"PrintSettings::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 39, "score": 45425.83258272963 }, { "content": "pub fn gget_filechooserwidget() -> (FileChooserWidget, &'static str) {\n\n (FileChooserWidget::default(), r###\"FileChooserWidget::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 40, "score": 44102.85618129913 }, { "content": "pub fn gget_filechooserdialog() -> (FileChooserDialog, &'static str) {\n\n (FileChooserDialog::default(), r###\"FileChooserDialog::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 41, "score": 44102.85618129913 }, { "content": "pub fn gget_filechoosernative() -> (FileChooserNative, &'static str) {\n\n (FileChooserNative::default(), r###\"FileChooserNative::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 42, "score": 44102.85618129913 }, { "content": "use std::ops::Range;\n\n\n\npub const PATH_TO_GTK_RS: &str = \"/home/rafal/Downloads/gtk4-rs/gtk4/src\";\n\npub const PATH_TO_GTK_RS_AUTO: &str = \"/home/rafal/Downloads/gtk4-rs/gtk4/src/auto\";\n\n\n\npub const PATH_TO_PROJECT_FILE: &str = \"/home/rafal/Projekty/Rust/gtk_rs_fuzzer/Project/src/ziemniak.rs\";\n\npub const PATH_TO_ENUM_FILE: &str = \"/home/rafal/Projekty/Rust/gtk_rs_fuzzer/Project/src/enum_things.rs\";\n\npub const PATH_TO_IMPLEMENTATIONS: &str = \"/home/rafal/Projekty/Rust/gtk_rs_fuzzer/Project/src/implementations.rs\";\n\n\n\npub const USE_PARENT_ITEMS: bool = true;\n\npub const USE_TRAIT_ITEMS: bool = true;\n\n// pub const NUMBER_OF_REPEATS: u32 = 2; // How many time repeat function executing to be sure that this function cause problems\n\n\n\npub const RANGE_OF_USED_CLASSES: Range<usize> = 0..200;\n\n\n\npub const IGNORED_FUNCTIONS: &[&str] = &[\n\n // Non existent functions(probably bug in parsing files or this will be available in 4.6 + versions)\n\n \"set_show_arrow\", // DropDown\n\n \"add_suffix\", // FileFilter\n\n \"set_always_show_arrow\", // MenuButton\n", "file_path": "src/settings.rs", "rank": 43, "score": 34716.685455209394 }, { "content": " \"set_icons\",\n\n \"end_widget\",\n\n \"add_mnemonic_label\",\n\n \"insert_text\", // TODO - needs mut as second argument\n\n \"add_objects_from_string\",\n\n // TODO not sure why argument number is wrong, maybe GTK 4.6 thing?\n\n \"pack_end\",\n\n \"pack_start\",\n\n \"set_child\",\n\n \"prepend\",\n\n \"append\",\n\n // TODO why this static functions were found?\n\n \"new_constant\",\n\n];\n\n\n\npub const FUNCTIONS_TO_USE: &[&str] = &[];\n\n\n\n// List of classes which will be used(IGNORED_CLASSES NOT ignore classes from this array).\n\n// If empty, all classes which are not present in IGNORED_CLASSES are used\n\npub const CLASSES_TO_USE: &[&str] = &[];\n", "file_path": "src/settings.rs", "rank": 44, "score": 34714.18869410697 }, { "content": "\n\npub const IGNORED_ENUMS: &[&str] = &[\n\n \"SymbolicColor\",\n\n \"NaturalWrapMode\",\n\n \"State\",\n\n \"EventControllerScrollFlags\", // Bitflags\n\n \"InputHints\", // Bitflags\n\n \"StateFlags\", // BitFlags\n\n \"PickFlags\", // Bitflags\n\n \"FontChooserLevel\", // Bitflags\n\n \"IconLookupFlags\", // BitFlags\n\n \"DialogFlags\", // Bitflags\n\n];\n\n\n\npub const IGNORED_IMPLEMENTATIONS: &[&str] = &[\"SymbolicPaintable\"];\n\n\n\npub const IGNORED_CLASSES: &[&str] = &[\n\n \"PageSetupUnixDialog\", // Not works on Windows\n\n \"PrintUnixDialog\", // Not works on Windows\n\n // Classes, which objects I can't create\n", "file_path": "src/settings.rs", "rank": 45, "score": 34711.49044087464 }, { "content": " \"PasswordEntryBuffer\",\n\n \"Tooltip\",\n\n \"PrintCapabilities\",\n\n \"Printer\",\n\n \"PrintJob\",\n\n \"Allocation\",\n\n \"TreeIter\",\n\n \"ListBase\",\n\n \"SymbolicPaintable\",\n\n \"CellArea\",\n\n \"Range\",\n\n \"MultiFilter\",\n\n \"Gesture\",\n\n \"GestureSingle\",\n\n \"ConstraintLayoutChild\",\n\n \"CallbackAction\",\n\n \"TreeModelFilter\",\n\n // Other\n\n \"ApplicationWindow\", // Only one ApplicationWindows can be created\n\n \"FileChooserWidget\", // Create a lot of warnings(and possibly also crashes) \"Too many open files\"\n", "file_path": "src/settings.rs", "rank": 46, "score": 34710.95228442862 }, { "content": " \"set_gtk_xft_dpi\",\n\n \"set_gtk_xft_hinting\",\n\n \"\",\n\n \"\",\n\n \"\",\n\n \"\",\n\n \"\",\n\n // Reported, but not fixed in used GTK/GTK-rs version\n\n \"emit_escape\", // https://github.com/gtk-rs/gtk4-rs/issues/870\n\n \"im_context\", // https://github.com/gtk-rs/gtk4-rs/issues/874\n\n \"insert_prefix\", // https://github.com/gtk-rs/gtk4-rs/issues/873\n\n \"drag_dest_item\", // https://github.com/gtk-rs/gtk-rs-core/issues/537\n\n \"print_settings\", // https://github.com/gtk-rs/gtk4-rs/issues/880\n\n \"selected_printer\", // https://github.com/gtk-rs/gtk4-rs/issues/882\n\n \"renderer\", // https://github.com/gtk-rs/gtk4-rs/issues/886\n\n \"surface\", // https://github.com/gtk-rs/gtk4-rs/issues/886\n\n \"content_type\", // https://github.com/gtk-rs/gtk4-rs/issues/887\n\n \"set_current_page\", // https://github.com/gtk-rs/gtk4-rs/issues/888\n\n \"request_mode\", // https://github.com/gtk-rs/gtk4-rs/issues/889\n\n \"current_path_string\", // https://github.com/gtk-rs/gtk4-rs/issues/890\n", "file_path": "src/settings.rs", "rank": 47, "score": 34710.645673591804 }, { "content": " \"set_primary\", // MenuButton\n\n \"set_gtk_hint_font_metrics\", // Settings\n\n \"set_indent_for_icon\", // TreeExpander\n\n \"emit_activate\", // AppChooserButton\n\n \"shows_arrow\", // DropDown\n\n \"current_drop\", // DropTarget\n\n \"natural_wrap_mode\", // Label\n\n \"is_primary\", // MenuButton\n\n \"is_gtk_hint_font_metrics\", // Settings\n\n \"must_always_show_arrow\", // MenuButton\n\n \"child\", // MenuButton\n\n \"is_indent_for_icon\", // TreeExpander\n\n \"compute_cursor_extents\", // Text\n\n // Other\n\n \"set_direction\", // Some parsing problem\n\n // Rust error, multiple applicable items in scope\n\n \"alignment\",\n\n \"font_map\",\n\n \"cursor\",\n\n \"set_alignment\",\n", "file_path": "src/settings.rs", "rank": 48, "score": 34708.75404610685 }, { "content": " // Reported, but crashes etc. are expected\n\n \"NamedAction\", // https://github.com/gtk-rs/gtk4-rs/issues/875\n\n \"SignalAction\", // Error same as in NamedAction\n\n \"ListStore\", // https://github.com/gtk-rs/gtk4-rs/issues/878\n\n \"TreeStore\", // https://github.com/gtk-rs/gtk4-rs/issues/878\n\n // Reported, but not fixed in used version GTK/GTK-rs\n\n \"PrintJob\", // https://github.com/gtk-rs/gtk4-rs/issues/881\n\n \"Printer\", // https://github.com/gtk-rs/gtk4-rs/issues/883\n\n \"SingleSelection\", // https://github.com/gtk-rs/gtk-rs-core/issues/539\n\n \"TreeListModel\", // https://github.com/gtk-rs/gtk-rs-core/issues/539\n\n \"ColumnViewColumn\", // https://github.com/gtk-rs/gtk4-rs/issues/885\n\n // TODO\n\n \"Popover\", // show cause crash\n\n];\n", "file_path": "src/settings.rs", "rank": 49, "score": 34708.491092193304 }, { "content": " \"set_pulse\",\n\n \"set_value\",\n\n \"set_digits\",\n\n \"set_climb_rate\",\n\n \"set_max_height\",\n\n \"set_max_width\",\n\n \"set_min_width\",\n\n \"set_min_height\",\n\n \"set_nat_height\",\n\n \"set_nat_width\",\n\n \"set_page\",\n\n \"set_gtk_cursor_aspect_ratio\",\n\n \"set_gtk_cursor_blink_time\",\n\n \"set_gtk_cursor_theme_size\",\n\n \"set_gtk_cursor_blink_timeout\",\n\n \"set_gtk_dnd_drag_threshold\",\n\n \"set_gtk_double_click_time\",\n\n \"set_gtk_recent_files_max_age\",\n\n \"set_gtk_double_click_distance\",\n\n \"set_gtk_xft_antialias\",\n", "file_path": "src/settings.rs", "rank": 50, "score": 34707.935669978266 }, { "content": " // Not reported, but panic are self describing\n\n \"set_height_request\",\n\n \"set_width_request\",\n\n \"set_day\",\n\n \"set_month\",\n\n \"set_year\",\n\n \"set_height\",\n\n \"set_max_width_chars\",\n\n \"set_scale\",\n\n \"set_weight\",\n\n \"set_size\",\n\n \"set_size_points\",\n\n \"set_width\",\n\n \"set_width_chars\",\n\n \"set_wrap_width\",\n\n \"set_xalign\",\n\n \"set_yalign\",\n\n \"set_text_column\",\n\n \"set_text_xalign\",\n\n \"set_text_yalign\",\n", "file_path": "src/settings.rs", "rank": 51, "score": 34706.71362509289 }, { "content": " \"set_visible\",\n\n \"show\", // Block instead a Popover\n\n \"set_language\",\n\n \"emit_move_cursor\",\n\n \"to_node\",\n\n \"gl_shader_pop_texture\",\n\n \"pop\",\n\n \"ignored_functions:\",\n\n \"layout_child\",\n\n \"page\",\n\n \"add_objects_from_resource\",\n\n \"measure\",\n\n \"page_title\",\n\n \"allocate\",\n\n \"set_drag_dest_item\",\n\n \"set_parent\",\n\n \"add_custom_tab\",\n\n \"start_widget\",\n\n \"list_mnemonic_labels\",\n\n \"center_widget\",\n", "file_path": "src/settings.rs", "rank": 52, "score": 34705.917013276965 }, { "content": " \"edit_widget\",\n\n \"edited_cell\",\n\n \"focus_cell\",\n\n \"emit_popup\",\n\n \"name\",\n\n \"widget\",\n\n \"chars\",\n\n \"current_folder\",\n\n \"current_name\",\n\n \"font_features\",\n\n // Pack Report\n\n \"preview_text\",\n\n \"map\",\n\n \"theme_name\",\n\n \"uri\",\n\n \"set_detailed_action_name\",\n\n \"header_bar\",\n\n \"emit_cycle_handle_focus\",\n\n \"popup\",\n\n \"realize\",\n", "file_path": "src/settings.rs", "rank": 53, "score": 34704.71573606839 }, { "content": " \"ATContext\",\n\n \"AssistantPage\",\n\n \"BuilderListItemFactory\",\n\n \"DragIcon\",\n\n \"FixedLayoutChild\",\n\n \"GridLayoutChild\",\n\n \"ListItem\",\n\n \"MapListModel\",\n\n \"NotebookPage\",\n\n \"OverlayLayoutChild\",\n\n \"PrintContext\",\n\n \"StackPage\",\n\n \"TreeListRow\",\n\n \"TreeSelection\",\n\n \"Widget\",\n\n \"TreeModelSort\",\n\n \"ShortcutsSection\",\n\n \"Application\",\n\n \"ShortcutTrigger\",\n\n \"ShortcutAction\",\n", "file_path": "src/settings.rs", "rank": 54, "score": 34704.6521141525 }, { "content": " full_path = full_path.join(&name);\n\n\n\n let all_text = fs::read_to_string(full_path).unwrap();\n\n let lines = all_text.split(\"\\n\");\n\n\n\n let mut function_name: String = \"\".to_string();\n\n let mut previous_arguments: String = \"\".to_string();\n\n let mut current_class: String = \"\".to_string();\n\n let mut current_trait: String = \"\".to_string();\n\n let mut current_enum: String = \"\".to_string();\n\n let mut continue_function_declaration = false;\n\n let mut counter = 0; // If equal to 0, can be cleared, used because Object<ffi is inside different library\n\n for line in lines {\n\n let old_line = line;\n\n let line = old_line.trim();\n\n // GENERAL - continue when found ending\n\n if continue_function_declaration {\n\n if (!current_class.is_empty() && line.ends_with(\"{\")) || (!current_trait.is_empty() && line.ends_with(\";\")) {\n\n if line.contains(\")\") {\n\n previous_arguments.push_str(&line[..line.find(\")\").unwrap()]);\n", "file_path": "src/main.rs", "rank": 55, "score": 34273.198355503635 }, { "content": " } else {\n\n // println!(\"MISSING parent class: {}\", parent_class); // TODO why is this missing?\n\n }\n\n }\n\n } else {\n\n // println!(\"MISSING normal class: {}\", name_of_class); // TODO why is this missing?\n\n }\n\n }\n\n }\n\n\n\n // Extend classes with traits functions\n\n if USE_TRAIT_ITEMS {\n\n for (name_of_class, used_traits) in &class_info {\n\n if class_functions.contains_key(name_of_class) {\n\n for used_trait in used_traits {\n\n if traits.contains_key(used_trait) {\n\n // println!(\"I'm in {}, {}\", name_of_class, parent_class);\n\n class_functions.get_mut(name_of_class).unwrap().append(&mut traits.get(used_trait).unwrap().clone());\n\n } else {\n\n // println!(\"MISSING parent class: {}\", parent_class); // TODO why is this missing?\n", "file_path": "src/main.rs", "rank": 56, "score": 34271.68823720039 }, { "content": " current_trait = \"\".to_string();\n\n current_enum = \"\".to_string();\n\n }\n\n } else if !current_enum.is_empty() {\n\n // println!(\"Name - {}, line {}\", name, line);\n\n if !line.starts_with(\"#\") {\n\n let mut thing = line.to_string();\n\n thing.pop(); // Remove last comma\n\n if !thing.contains(\"(\") {\n\n enums.get_mut(&current_enum).unwrap().push(thing.to_string());\n\n counter = 1;\n\n }\n\n }\n\n }\n\n }\n\n\n\n // println!(\"Found proper {} file\", name);\n\n }\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 34271.334256309645 }, { "content": "#![allow(clippy::collapsible_else_if)]\n\n#![allow(clippy::type_complexity)]\n\n#![allow(clippy::single_char_pattern)]\n\n#![allow(clippy::needless_bool)]\n\n\n\nmod creating_enum;\n\nmod creating_functions;\n\nmod creating_implementation;\n\nmod settings;\n\n\n\nuse crate::creating_enum::*;\n\nuse crate::creating_functions::*;\n\nuse crate::creating_implementation::*;\n\nuse crate::settings::*;\n\nuse std::collections::BTreeMap;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n", "file_path": "src/main.rs", "rank": 58, "score": 34270.705612698745 }, { "content": " // Means that this is one line function\n\n if line.contains(\"(\") && line.contains(\")\") {\n\n let text_to_check = &line[line.find(\"(\").unwrap() + 1..line.find(\")\").unwrap()];\n\n if text_to_check.contains(\"::\") {\n\n continue_function_declaration = false;\n\n previous_arguments.clear();\n\n function_name.clear();\n\n number_of_ignored_gio_etc_functions += 1;\n\n continue; // Things like gio::Pango are not supported\n\n }\n\n if !(text_to_check.starts_with(\"&self\") || text_to_check.starts_with(\"&mut self\") || text_to_check.starts_with(\"self\")) {\n\n number_of_ignored_functions += 1;\n\n continue;\n\n }\n\n let parts = text_to_check\n\n .split(\",\")\n\n .map(|e| e.replace(\" \", \"\"))\n\n .filter_map(|e| {\n\n let split = e.split(\":\").map(|e| e.to_string()).collect::<Vec<String>>();\n\n if split.len() == 2 {\n", "file_path": "src/main.rs", "rank": 59, "score": 34269.64967245146 }, { "content": " if let Some(found_space) = new_temp_line.find(\"(\") {\n\n current_class = new_temp_line[..found_space].to_string();\n\n if !class_info.contains_key(&current_class) {\n\n class_info.insert(current_class.clone(), Vec::new());\n\n }\n\n\n\n let e_text = \"@extends \";\n\n let i_text = \"@implements \";\n\n let e_index = line.find(e_text);\n\n let i_index = line.find(i_text);\n\n if let Some(e_index) = e_index {\n\n if let Some(i_index) = i_index {\n\n let text_to_check = &line[e_index + e_text.len()..i_index];\n\n let parts = text_to_check\n\n .split(\",\")\n\n .map(|e| e.replace(\" \", \"\").replace(\",\", \"\"))\n\n .filter(|e| !e.is_empty() && e.chars().next().unwrap().is_uppercase())\n\n .collect::<Vec<String>>();\n\n class_info.get_mut(&current_class).unwrap().extend(parts);\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 34269.4897395235 }, { "content": " continue_function_declaration = true;\n\n }\n\n } else if old_line.starts_with(\"pub enum \") {\n\n current_class.clear();\n\n current_trait.clear();\n\n current_enum.clear();\n\n\n\n let t_help = \"pub enum \";\n\n let end_help = \" {\";\n\n let end_name = &line[t_help.len()..line.len() - end_help.len()];\n\n\n\n if !end_name.contains(\"<\") {\n\n current_enum = end_name.to_string();\n\n enums.insert(end_name.to_string(), Default::default());\n\n counter = 1;\n\n }\n\n } else if old_line.starts_with(\"}\") {\n\n counter -= 1;\n\n if counter == 0 {\n\n current_class = \"\".to_string();\n", "file_path": "src/main.rs", "rank": 61, "score": 34269.47865580524 }, { "content": " }\n\n let parts = text_to_check\n\n .split(\",\")\n\n .map(|e| e.replace(\" \", \"\"))\n\n .filter_map(|e| {\n\n let split = e.split(\":\").map(|e| e.to_string()).collect::<Vec<String>>();\n\n if split.len() == 2 {\n\n return Some(split[1].clone());\n\n }\n\n None\n\n })\n\n .collect::<Vec<String>>();\n\n\n\n class_functions.entry(current_class.clone()).or_insert_with(Default::default);\n\n class_functions.get_mut(&current_class).unwrap().insert(function_name_local.to_string(), parts.clone());\n\n\n\n // println!(\"Arguments for function {}, {:?}\", function_name_local, parts);\n\n } else {\n\n panic!(\"HMMMMMMMMMMMMM\");\n\n }\n", "file_path": "src/main.rs", "rank": 62, "score": 34269.37174309234 }, { "content": " return Some(split[1].clone());\n\n }\n\n None\n\n })\n\n .collect::<Vec<String>>();\n\n\n\n traits.entry(current_trait.clone()).or_insert_with(Default::default);\n\n traits.get_mut(&current_trait).unwrap().insert(function_name_local.to_string(), parts.clone());\n\n\n\n // println!(\"Arguments for function {}, {:?}\", function_name_local, parts);\n\n } else {\n\n panic!(\"HMMMMMMMMMMMMM\");\n\n }\n\n } else {\n\n if let Some(line_index) = line.find(\"(\") {\n\n previous_arguments.push_str(&line[line_index..]);\n\n } else {\n\n panic!(\"Big HMMMM\");\n\n }\n\n function_name = function_name_local.to_string();\n", "file_path": "src/main.rs", "rank": 63, "score": 34269.01817401737 }, { "content": " .split(\",\")\n\n .map(|e| e.replace(\" \", \"\").replace(\",\", \"\"))\n\n .filter(|e| !e.is_empty() && e.chars().next().unwrap().is_uppercase())\n\n .collect::<Vec<String>>();\n\n class_info.get_mut(&current_class).unwrap().extend(parts);\n\n }\n\n }\n\n counter = 2;\n\n }\n\n }\n\n }\n\n // Finds Ext\n\n else if line.starts_with(\"pub trait \") && line.contains(\"Ext\") && !line.contains(\"Manual\") {\n\n current_class.clear();\n\n current_trait.clear();\n\n current_enum.clear();\n\n\n\n let t_help = \"pub trait \";\n\n let temp_line = &line[t_help.len()..];\n\n if let Some(s_index) = temp_line.find(\" \") {\n", "file_path": "src/main.rs", "rank": 64, "score": 34268.95172138073 }, { "content": " } else {\n\n if let Some(line_index) = line.find(\"(\") {\n\n previous_arguments.push_str(&line[line_index..]);\n\n } else {\n\n panic!(\"Big HMMMM\");\n\n }\n\n function_name = function_name_local.to_string();\n\n continue_function_declaration = true;\n\n }\n\n\n\n // println!(\"found function \\\"{}\\\" for \\\"{}\\\"\", function_name_local, current_class);\n\n }\n\n // Finds\n\n else if line.contains(\"Object<ffi::\") {\n\n current_class.clear();\n\n current_trait.clear();\n\n current_enum.clear();\n\n let t_help = \"pub struct \";\n\n if let Some(found_item) = line.find(t_help) {\n\n let new_temp_line = &line[found_item + t_help.len()..];\n", "file_path": "src/main.rs", "rank": 65, "score": 34268.719250485236 }, { "content": " println!(\n\n \"Ignored functions(connect, static methods etc.) - {}, Ignored functions(gdk, gio etc. arguments) - {}\",\n\n number_of_ignored_functions, number_of_ignored_gio_etc_functions\n\n );\n\n\n\n count_objects(&class_functions, &traits, &enums, \"At start \");\n\n\n\n // Extend classes with parent functions\n\n if USE_PARENT_ITEMS {\n\n let base_functions = class_functions.clone(); // Needed to have same set of functions across all iterations\n\n\n\n for (name_of_class, parent_classes) in &class_info {\n\n if class_functions.contains_key(name_of_class) {\n\n for parent_class in parent_classes {\n\n if class_functions.contains_key(parent_class) {\n\n // println!(\"I'm in {}, {}\", name_of_class, parent_class);\n\n class_functions\n\n .get_mut(name_of_class)\n\n .unwrap()\n\n .append(&mut base_functions.get(parent_class).unwrap().clone());\n", "file_path": "src/main.rs", "rank": 66, "score": 34268.69007882629 }, { "content": " }\n\n }\n\n } else {\n\n // println!(\"MISSING normal class: {}\", name_of_class); // TODO why is this missing?\n\n }\n\n }\n\n }\n\n\n\n count_objects(&class_functions, &traits, &enums, \"After adding parents\");\n\n\n\n // Remove classes which won't be used\n\n if !CLASSES_TO_USE.is_empty() {\n\n let keys = class_functions.clone().into_keys();\n\n for used_class in keys {\n\n if !CLASSES_TO_USE.iter().any(|e| *e == used_class) {\n\n class_functions.remove(&used_class);\n\n }\n\n }\n\n } else {\n\n for ignored in IGNORED_CLASSES {\n", "file_path": "src/main.rs", "rank": 67, "score": 34268.64353602832 }, { "content": " if previous_arguments.contains(\"::\") {\n\n continue_function_declaration = false;\n\n previous_arguments.clear();\n\n function_name.clear();\n\n continue; // Things like gio::Pango are not supported\n\n }\n\n let parts = previous_arguments\n\n .split(\",\")\n\n .map(|e| e.replace(\" \", \"\"))\n\n .filter_map(|e| {\n\n let split = e.split(\":\").map(|e| e.to_string()).collect::<Vec<String>>();\n\n if split.len() == 2 {\n\n return Some(split[1].clone());\n\n }\n\n None\n\n })\n\n .collect::<Vec<String>>();\n\n\n\n if !current_class.is_empty() {\n\n class_functions.entry(current_class.clone()).or_insert_with(Default::default);\n", "file_path": "src/main.rs", "rank": 68, "score": 34268.48168831854 }, { "content": " let text_to_check = &line[i_index + i_text.len()..line.len() - 1];\n\n let parts = text_to_check\n\n .split(\",\")\n\n .map(|e| e.replace(\" \", \"\").replace(\",\", \"\"))\n\n .filter(|e| !e.is_empty() && e.chars().next().unwrap().is_uppercase())\n\n .collect::<Vec<String>>();\n\n class_info.get_mut(&current_class).unwrap().extend(parts);\n\n } else {\n\n let text_to_check = &line[e_index + e_text.len()..line.len() - 1];\n\n let parts = text_to_check\n\n .split(\",\")\n\n .map(|e| e.replace(\" \", \"\").replace(\",\", \"\"))\n\n .filter(|e| !e.is_empty() && e.chars().next().unwrap().is_uppercase())\n\n .collect::<Vec<String>>();\n\n class_info.get_mut(&current_class).unwrap().extend(parts);\n\n }\n\n } else {\n\n if let Some(i_index) = i_index {\n\n let text_to_check = &line[i_index + i_text.len()..line.len() - 1];\n\n let parts = text_to_check\n", "file_path": "src/main.rs", "rank": 69, "score": 34268.09579628473 }, { "content": " for (class, parents) in &class_info {\n\n if !IGNORED_CLASSES.contains(&class.as_str()) {\n\n for parent in parents {\n\n children_of_class.entry(parent.clone()).or_insert_with(Default::default);\n\n children_of_class.get_mut(parent).unwrap().push(class.to_string());\n\n }\n\n }\n\n }\n\n // TODO adds self class to implemented by things\n\n // for (class, children) in &mut children_of_class {\n\n // if !IGNORED_CLASSES.contains(&class.as_str()) {\n\n // children.push(class.clone());\n\n // }\n\n // }\n\n // for (parent, classes_begin) in children_of_class {\n\n // for begin in classes_begin {\n\n // println!(\"{}.{}\", parent, begin);\n\n // }\n\n // }\n\n\n", "file_path": "src/main.rs", "rank": 70, "score": 34267.584416480466 }, { "content": " class_functions.get_mut(&current_class).unwrap().insert(function_name.clone(), parts.clone());\n\n } else if !current_trait.is_empty() {\n\n traits.entry(current_trait.clone()).or_insert_with(Default::default);\n\n traits.get_mut(&current_trait).unwrap().insert(function_name.clone(), parts.clone());\n\n } else {\n\n panic!(\"\")\n\n }\n\n\n\n // println!(\"Arguments for multiline function {}, {:?}\", function_name, parts);\n\n continue_function_declaration = false;\n\n previous_arguments.clear();\n\n function_name.clear();\n\n }\n\n } else {\n\n previous_arguments.push_str(line);\n\n }\n\n }\n\n // Finds objects function\n\n else if line.starts_with(\"pub fn \") && line.contains(\"(\") && !current_class.is_empty() {\n\n // println!(\"{}\", line);\n", "file_path": "src/main.rs", "rank": 71, "score": 34267.51622938487 }, { "content": " class_functions.remove(&ignored.to_string());\n\n }\n\n }\n\n\n\n // Remove functions which won't be used\n\n if !FUNCTIONS_TO_USE.is_empty() {\n\n let keys = class_functions.clone();\n\n for (name_of_class, function_list) in keys {\n\n for (function, _) in function_list {\n\n if !FUNCTIONS_TO_USE.iter().any(|e| *e == function) {\n\n class_functions.get_mut(&name_of_class).unwrap().remove(&function);\n\n }\n\n }\n\n }\n\n } else {\n\n for ignored in IGNORED_FUNCTIONS {\n\n for functions in class_functions.values_mut() {\n\n functions.remove(&ignored.to_string());\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 72, "score": 34267.120370120334 }, { "content": " let ext_name = &temp_line[..s_index];\n\n let end_name = if ext_name.ends_with(':') {\n\n &ext_name[..ext_name.len() - 4]\n\n } else {\n\n &ext_name[..ext_name.len() - 3]\n\n };\n\n current_trait = end_name.to_string();\n\n traits.insert(end_name.to_string(), Default::default());\n\n counter = 1;\n\n }\n\n } else if line.starts_with(\"fn \") && line.contains(\"(\") && !current_trait.is_empty() {\n\n // println!(\"{}\", line);\n\n let text_to_check = \"fn \";\n\n let function_name_local = &line[text_to_check.len()..line.find(\"(\").unwrap()];\n\n if function_name_local.contains(\"connect_\") || function_name_local.contains(\"<\") {\n\n number_of_ignored_functions += 1;\n\n continue; // Connect function are not supported\n\n }\n\n\n\n if line.ends_with(\";\") {\n", "file_path": "src/main.rs", "rank": 73, "score": 34266.94537695291 }, { "content": " let text_to_check = \"pub fn \";\n\n let function_name_local = &line[text_to_check.len()..line.find(\"(\").unwrap()];\n\n if function_name_local.contains(\"connect_\") || function_name_local.contains(\"<\") {\n\n continue; // Connect function are not supported\n\n }\n\n\n\n if line.ends_with(\"{\") {\n\n // Means that this is one line function\n\n if line.contains(\"(\") && line.contains(\")\") {\n\n let text_to_check = &line[line.find(\"(\").unwrap() + 1..line.find(\")\").unwrap()];\n\n if text_to_check.contains(\"::\") {\n\n continue_function_declaration = false;\n\n previous_arguments.clear();\n\n function_name.clear();\n\n number_of_ignored_gio_etc_functions += 1;\n\n continue; // Things like gio::Pango are not supported\n\n }\n\n if !(text_to_check.starts_with(\"&self\") || text_to_check.starts_with(\"&mut self\") || text_to_check.starts_with(\"self\")) {\n\n number_of_ignored_functions += 1;\n\n continue;\n", "file_path": "src/main.rs", "rank": 74, "score": 34266.69474964969 }, { "content": " for path_dir in [PATH_TO_GTK_RS, PATH_TO_GTK_RS_AUTO] {\n\n let dir = fs::read_dir(path_dir).unwrap_or_else(|_| panic!(\"Cannot open dir {}\", path_dir));\n\n for entry in dir {\n\n let entry_data = match entry {\n\n Ok(t) => t,\n\n Err(_e) => {\n\n println!(\"Cannot read entries of {}\", path_dir);\n\n continue;\n\n }\n\n };\n\n let name = entry_data.file_name().to_string_lossy().to_string();\n\n if !name.ends_with(\".rs\") {\n\n continue;\n\n }\n\n // if name != \"label.rs\" {\n\n // continue;\n\n // }\n\n\n\n let mut full_path = PathBuf::new();\n\n full_path = full_path.join(&path_dir);\n", "file_path": "src/main.rs", "rank": 75, "score": 34266.33844154069 }, { "content": " // }\n\n // {\n\n // for (name_of_class, function_list) in &traits {\n\n // for (name_of_function, arguments) in function_list {\n\n // let mut what = format!(\"{}.{}(\", name_of_class, name_of_function);\n\n // for index in 0..arguments.len() {\n\n // what += \"\\\"\";\n\n // what += &arguments[index];\n\n // what += \"\\\"\";\n\n // if index != arguments.len() - 1 {\n\n // what += \",\";\n\n // }\n\n // }\n\n // what += \")\";\n\n //\n\n // println!(\"{}\", what);\n\n // }\n\n // }\n\n // }\n\n // {\n", "file_path": "src/main.rs", "rank": 76, "score": 34264.41817478948 }, { "content": " }\n\n\n\n // Print all classes + functions + arguments\n\n // {\n\n // for (name_of_class, function_list) in &class_functions {\n\n // for (name_of_function, arguments) in function_list {\n\n // let mut what = format!(\"{}.{}(\", name_of_class, name_of_function);\n\n // for index in 0..arguments.len() {\n\n // what += \"\\\"\";\n\n // what += &arguments[index];\n\n // what += \"\\\"\";\n\n // if index != arguments.len() - 1 {\n\n // what += \",\";\n\n // }\n\n // }\n\n // what += \")\";\n\n //\n\n // println!(\"{}\", what);\n\n // }\n\n // }\n", "file_path": "src/main.rs", "rank": 77, "score": 34264.1200454956 }, { "content": " // for (name_of_class, function_list) in &traits {\n\n // for (name_of_function, _arguments) in function_list {\n\n // println!(\"{}.{} --- Trait\", name_of_class, name_of_function)\n\n // }\n\n // }\n\n // }\n\n // {\n\n // for (name_of_class, function_list) in &class_functions {\n\n // for (name_of_function, _arguments) in function_list {\n\n // println!(\"{}.{}\", name_of_class, name_of_function)\n\n // }\n\n // }\n\n // }\n\n // {\n\n // for (name_of_class, similar_clases) in &class_info {\n\n // println!(\"{}.{:?}\", name_of_class, similar_clases)\n\n // }\n\n // }\n\n\n\n count_objects(&class_functions, &traits, &enums, \"End results \");\n\n\n\n (class_info, class_functions, traits, enums, children_of_class)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 78, "score": 34263.454851896604 }, { "content": "use crate::settings::*;\n\nuse std::collections::BTreeMap;\n\nuse std::fs;\n\nuse std::fs::OpenOptions;\n\nuse std::io::{BufWriter, Write};\n\n\n", "file_path": "src/creating_implementation.rs", "rank": 79, "score": 33278.78883256463 }, { "content": " arguments += &format!(\n\n \"{} => {{ let (l_r, r_r) = gget_{}(); (l_r.upcast::<{}>(),r_r) }}\",\n\n index,\n\n child_of_item.to_lowercase(),\n\n name_of_class\n\n );\n\n\n\n arguments += \"\\n\";\n\n if index != child_of_item.len() - 1 {\n\n arguments += \" \";\n\n }\n\n }\n\n arguments += \"_ => panic!()\";\n\n to_write = to_write.replace(\"<<items>>\", &arguments);\n\n assert!(!to_write.contains(\"<<\"));\n\n writeln!(file, \"{}\", to_write).unwrap();\n\n }\n\n}\n", "file_path": "src/creating_implementation.rs", "rank": 80, "score": 33275.037459910105 }, { "content": "use std::io::Write;\"#####;\n\n writeln!(file, \"{}\", enum_start).unwrap();\n\n\n\n // enum_arguments - arguments\n\n // type - type\n\n // type_lowercase - type in lowercase\n\n // type_uppercase - type in uppercase\n\n // number - number of constants\n\n let single_enum_template = r#####\"\n\nconst ENUM_<<type_uppercase>>_TYPE: [(<<type>>, &str); <<number>>] = [\n\n <<enum_arguments>>\n\n];\n\n\n", "file_path": "src/creating_enum.rs", "rank": 81, "score": 33261.20233693716 }, { "content": "use crate::settings::*;\n\nuse std::collections::BTreeMap;\n\nuse std::fs;\n\nuse std::fs::OpenOptions;\n\nuse std::io::{BufWriter, Write};\n\n\n", "file_path": "src/creating_enum.rs", "rank": 82, "score": 33258.7687875026 }, { "content": " for (index, constant) in constant_list.iter().enumerate() {\n\n let th = format!(\"{}::{}\", name_of_enum, constant);\n\n arguments += &format!(\"({},\\\"{}\\\")\", th, th);\n\n if index != constant_list.len() - 1 {\n\n arguments += \",\";\n\n }\n\n arguments += \"\\n\";\n\n if index != constant_list.len() - 1 {\n\n arguments += \" \";\n\n }\n\n }\n\n to_write = to_write.replace(\"<<enum_arguments>>\", &arguments);\n\n assert!(!to_write.contains(\"<<\"));\n\n writeln!(file, \"{}\", to_write).unwrap();\n\n }\n\n}\n", "file_path": "src/creating_enum.rs", "rank": 83, "score": 33255.01452451497 }, { "content": "use gtk4::*;\n\nuse rand::prelude::SliceRandom;\n\nuse rand::{thread_rng, Rng};\n\n\n\nconst DEBUG_PRINTING: bool = true;\n", "file_path": "Project/src/helpers.rs", "rank": 84, "score": 33016.88711446764 }, { "content": " (glib::Type::F64, \"glib::Type::F64\"),\n\n (glib::Type::STRING, \"glib::Type::STRING\"),\n\n (glib::Type::POINTER, \"glib::Type::POINTER\"),\n\n (glib::Type::VARIANT, \"glib::Type::VARIANT\"),\n\n (glib::Type::INTERFACE, \"glib::Type::INTERFACE\"),\n\n (glib::Type::ENUM, \"glib::Type::ENUM\"),\n\n (glib::Type::FLAGS, \"glib::Type::FLAGS\"),\n\n (glib::Type::BOXED, \"glib::Type::BOXED\"),\n\n (glib::Type::PARAM_SPEC, \"glib::Type::PARAM_SPEC\"),\n\n (glib::Type::OBJECT, \"glib::Type::OBJECT\"),\n\n];\n\n\n", "file_path": "Project/src/helpers.rs", "rank": 85, "score": 33015.214650906484 }, { "content": "//pub fn gget_pagesetupunixdialog() -> (PageSetupUnixDialog, &'static str) {\n\n// (PageSetupUnixDialog::default(), r###\"PageSetupUnixDialog::default()\"###)\n\n//}\n\npub fn gget_paned() -> (Paned, &'static str) {\n\n (Paned::default(), r###\"Paned::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 92, "score": 22248.88578286462 }, { "content": "pub fn gget_anyfilter() -> (AnyFilter, &'static str) {\n\n (AnyFilter::default(), r###\"AnyFilter::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 93, "score": 22246.493734421998 }, { "content": "pub fn gget_builder() -> (Builder, &'static str) {\n\n (Builder::new(), r###\"Builder::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 94, "score": 22246.493734421998 }, { "content": "pub fn gget_assistant() -> (Assistant, &'static str) {\n\n (Assistant::new(), r###\"Assistant::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 95, "score": 22246.493734421998 }, { "content": "pub fn gget_box() -> (Box, &'static str) {\n\n (Box::new(stek_orientation().0, take_i32()), r###\"Box::new(stek_orientation().0, take_i32())\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 96, "score": 22246.493734421998 }, { "content": "pub fn gget_adjustment() -> (Adjustment, &'static str) {\n\n (Adjustment::default(), r###\"Adjustment::default()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 97, "score": 22246.493734421998 }, { "content": "pub fn gget_bitset() -> (Bitset, &'static str) {\n\n (Bitset::new_empty(), r###\"Bitset::new_empty()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 98, "score": 22246.493734421998 }, { "content": "pub fn gget_aboutdialog() -> (AboutDialog, &'static str) {\n\n (AboutDialog::new(), r###\"AboutDialog::new()\"###)\n\n}\n", "file_path": "Project/src/create_objects.rs", "rank": 99, "score": 22246.493734421998 } ]
Rust
src/gl33/buffer.rs
phaazon/luminance-gl-rs
73e156ce276b3882335201bde5ca31bc6afa4ba5
use gl; use gl::types::*; use gl33::token::GL33; use luminance::buffer; use std::cmp::Ordering::*; use std::mem; use std::os::raw::c_void; use std::ptr; use std::slice; pub type Buffer<T> = buffer::Buffer<GL33, T>; pub type BufferSlice<'a, T> = buffer::BufferSlice<'a, GL33, T>; pub type BufferSliceMut<'a, T> = buffer::BufferSliceMut<'a, GL33, T>; #[derive(Debug, Clone, Eq, PartialEq)] pub struct GLBuffer { pub handle: GLuint, pub bytes: usize } unsafe impl buffer::HasBuffer for GL33 { type ABuffer = GLBuffer; fn new(size: usize) -> Self::ABuffer { let mut buffer: GLuint = 0; unsafe { gl::GenBuffers(1, &mut buffer); gl::BindBuffer(gl::ARRAY_BUFFER, buffer); gl::BufferData(gl::ARRAY_BUFFER, size as isize, ptr::null(), gl::STREAM_DRAW); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } GLBuffer { handle: buffer, bytes: size } } fn free(buffer: &mut Self::ABuffer) { unsafe { gl::DeleteBuffers(1, &buffer.handle) } } fn write_whole<T>(buffer: &Self::ABuffer, values: &[T]) -> Result<(), buffer::BufferError> { let bytes = values.len() * mem::size_of::<T>(); let (warning, bytes) = match bytes.cmp(&buffer.bytes) { Less => (Some(buffer::BufferError::TooFewValues), bytes), Greater => (Some(buffer::BufferError::TooManyValues), buffer.bytes), _ => (None, bytes) }; unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::WRITE_ONLY); ptr::copy_nonoverlapping(values.as_ptr() as *const c_void, ptr, bytes); let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } match warning { Some(w) => Err(w), None => Ok(()) } } fn write<T>(buffer: &Self::ABuffer, off: usize, x: T) -> Result<(), buffer::BufferError> where T: Copy { if off >= buffer.bytes { return Err(buffer::BufferError::Overflow); } unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::WRITE_ONLY); *(ptr.offset(off as isize) as *mut T) = x; let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } Ok(()) } fn read_whole<T>(buffer: &Self::ABuffer, nb: usize) -> Vec<T> where T: Copy { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_ONLY) as *const T; let values = Vec::from(slice::from_raw_parts(ptr, nb)); let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); values } } fn read<T>(buffer: &Self::ABuffer, off: usize) -> Option<T> where T: Copy { if off >= buffer.bytes { return None; } unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_ONLY); let x = *(ptr.offset(off as isize) as *const T); let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); Some(x) } } fn map<T>(buffer: &mut Self::ABuffer) -> *const T { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_ONLY) as *const T } } fn map_mut<T>(buffer: &mut Self::ABuffer) -> *mut T { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_WRITE) as *mut T } } fn unmap(buffer: &mut Self::ABuffer) { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); gl::UnmapBuffer(gl::ARRAY_BUFFER); } } }
use gl; use gl::types::*; use gl33::token::GL33; use luminance::buffer; use std::cmp::Ordering::*; use std::mem; use std::os::raw::c_void; use std::ptr; use std::slice; pub type Buffer<T> = buffer::Buffer<GL33, T>; pub type BufferSlice<'a, T> = buffer::BufferSlice<'a, GL33, T>; pub type BufferSliceMut<'a, T> = buffer::BufferSliceMut<'a, GL33, T>; #[derive(Debug, Clone, Eq, PartialEq)] pub struct GLBuffer { pub handle: GLuint, pub bytes: usize } unsafe impl buffer::HasBuffer for GL33 { type ABuffer = GLBuffer; fn new(size: usize) -> Self::ABuffer { let mut buffer: GLuint = 0; unsafe { gl::GenBuffers(1, &mut buffer); gl::BindBuffer(gl::ARRAY_BUFFER, buffer); gl::BufferData(gl::ARRAY_BUFFER, size as isize, ptr::null(), gl::STREAM_DRAW); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } GLBuffer { handle: buffer, bytes: size } } fn free(buffer: &mut Self::ABuffer) { unsafe { gl::DeleteBuffers(1, &buffer.handle) } } fn write_whole<T>(buffer: &Self::ABuffer, values: &[T]) -> Result<(), buffer::BufferError> { let bytes = values.len() * mem::size_of::<T>(); let (warning, bytes) = match bytes.cmp(&buffer.bytes) { Less => (Some(buffer::BufferError::TooFewValues), bytes), Greater => (Some(buffer::BufferError::TooManyValues), buffer.bytes), _ => (None, bytes) }; unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::WRITE_ONLY); ptr::copy_nonoverlapping(values.as_ptr() as *const c_void, ptr, bytes); let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } match warning { Some(w) => Err(w), None => Ok(()) } } fn write<T>(buffer: &Self::ABuffer, off: usize, x: T) -> Result<(), buffer::BufferError> where T: Copy { if off >= buffer.bytes {
fn read_whole<T>(buffer: &Self::ABuffer, nb: usize) -> Vec<T> where T: Copy { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_ONLY) as *const T; let values = Vec::from(slice::from_raw_parts(ptr, nb)); let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); values } } fn read<T>(buffer: &Self::ABuffer, off: usize) -> Option<T> where T: Copy { if off >= buffer.bytes { return None; } unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_ONLY); let x = *(ptr.offset(off as isize) as *const T); let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); Some(x) } } fn map<T>(buffer: &mut Self::ABuffer) -> *const T { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_ONLY) as *const T } } fn map_mut<T>(buffer: &mut Self::ABuffer) -> *mut T { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); gl::MapBuffer(gl::ARRAY_BUFFER, gl::READ_WRITE) as *mut T } } fn unmap(buffer: &mut Self::ABuffer) { unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); gl::UnmapBuffer(gl::ARRAY_BUFFER); } } }
return Err(buffer::BufferError::Overflow); } unsafe { gl::BindBuffer(gl::ARRAY_BUFFER, buffer.handle); let ptr = gl::MapBuffer(gl::ARRAY_BUFFER, gl::WRITE_ONLY); *(ptr.offset(off as isize) as *mut T) = x; let _ = gl::UnmapBuffer(gl::ARRAY_BUFFER); gl::BindBuffer(gl::ARRAY_BUFFER, 0); } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn create_texture<L, D>(target: GLenum, size: D::Size, mipmaps: usize, pf: PixelFormat, sampler: &Sampler) -> Result<()>\n\n where L: Layerable,\n\n D: Dimensionable,\n\n D::Size: Copy {\n\n set_texture_levels(target, mipmaps);\n\n\n\n apply_sampler_to_texture(target, sampler);\n\n\n\n create_texture_storage::<L, D>(size, mipmaps, pf)\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 0, "score": 152199.90806647402 }, { "content": "fn create_texture_storage<L, D>(size: D::Size, mipmaps: usize, pf: PixelFormat) -> Result<()>\n\n where L: Layerable,\n\n D: Dimensionable,\n\n D::Size: Copy {\n\n match gl_pixel_format(pf) {\n\n Some(glf) => {\n\n let (format, iformat, encoding) = glf;\n\n\n\n match (L::layering(), D::dim()) {\n\n // 1D texture\n\n (Layering::Flat, Dim::Dim1) => {\n\n create_texture_1d_storage(format, iformat, encoding, D::width(size), mipmaps);\n\n Ok(())\n\n },\n\n // 2D texture\n\n (Layering::Flat, Dim::Dim2) => {\n\n create_texture_2d_storage(format, iformat, encoding, D::width(size), D::height(size), mipmaps);\n\n Ok(())\n\n },\n\n // 3D texture\n", "file_path": "src/gl33/texture.rs", "rank": 1, "score": 137463.67535917994 }, { "content": "fn component_type_weight(t: &Type) -> usize {\n\n match *t {\n\n Type::Integral => mem::size_of::<i32>(),\n\n Type::Unsigned => mem::size_of::<u32>(),\n\n Type::Floating => mem::size_of::<f32>(),\n\n Type::Boolean => mem::size_of::<bool>()\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 2, "score": 121957.84811243309 }, { "content": "pub fn debug_gl() {\n\n let e = unsafe { gl::GetError() };\n\n\n\n match e {\n\n gl::NO_ERROR => println!(\"no error\"),\n\n gl::INVALID_ENUM => println!(\"invalid enum\"),\n\n gl::INVALID_VALUE => println!(\"invalid value\"),\n\n gl::INVALID_OPERATION => println!(\"invalid operation\"),\n\n gl::INVALID_FRAMEBUFFER_OPERATION => println!(\"invalid frameuffer operation\"),\n\n gl::OUT_OF_MEMORY => println!(\"out of memory\"),\n\n _ => println!(\"unknown error: {}\", e)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 3, "score": 113275.97316723605 }, { "content": "// Return something if no match can be established.\n\nfn uniform_type_match(program: GLuint, name: &str, ty: Type, dim: Dim) -> Option<String> {\n\n let mut size: GLint = 0;\n\n let mut typ: GLuint = 0;\n\n\n\n unsafe {\n\n // get the index of the uniform\n\n let mut index = 0;\n\n gl::GetUniformIndices(program, 1, [name.as_ptr() as *const i8].as_ptr(), &mut index);\n\n // get its size and type\n\n gl::GetActiveUniform(program, index, 0, null_mut(), &mut size, &mut typ, null_mut());\n\n }\n\n\n\n // FIXME\n\n // early-return if array – we don’t support them yet\n\n if size != 1 {\n\n return None;\n\n }\n\n\n\n match (ty, dim) {\n\n (Type::Integral, Dim::Dim1) if typ != gl::INT => Some(\"requested int doesn't match\".to_owned()),\n", "file_path": "src/gl33/shader/program.rs", "rank": 4, "score": 110356.70397070028 }, { "content": "fn from_type(t: &Type) -> GLenum {\n\n match *t {\n\n Type::Integral | Type::Boolean => gl::INT,\n\n Type::Unsigned => gl::UNSIGNED_INT,\n\n Type::Floating => gl::FLOAT,\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 5, "score": 99280.79454111819 }, { "content": "// Return the number of components.\n\npub fn pixel_components(pf: PixelFormat) -> usize {\n\n match pf.format {\n\n Format::RGB(_, _, _) => 3,\n\n Format::RGBA(_, _, _, _) => 4,\n\n Format::Depth(_) => 1,\n\n _ => panic!(\"unsupported pixel format\")\n\n }\n\n}\n", "file_path": "src/pixel.rs", "rank": 6, "score": 97923.26249915318 }, { "content": "// Retrieve the uniform location.\n\nfn get_uniform_location(program: GLuint, name: &str, ty: Type, dim: Dim) -> (Location, Option<UniformWarning>) {\n\n let c_name = CString::new(name.as_bytes()).unwrap();\n\n let location = if ty == Type::BufferBinding {\n\n let index = unsafe { gl::GetUniformBlockIndex(program, c_name.as_ptr() as *const GLchar) };\n\n\n\n if index == gl::INVALID_INDEX {\n\n return (Location::UniformBlock(-1), Some(UniformWarning::Inactive(name.to_owned())));\n\n }\n\n\n\n Location::UniformBlock(index as GLint)\n\n } else {\n\n let location = unsafe { gl::GetUniformLocation(program, c_name.as_ptr() as *const GLchar) };\n\n\n\n if location == -1 {\n\n return (Location::Uniform(-1), Some(UniformWarning::Inactive(name.to_owned())));\n\n }\n\n\n\n Location::Uniform(location)\n\n };\n\n\n\n if let Some(err) = uniform_type_match(program, name, ty, dim) {\n\n return (location, Some(UniformWarning::TypeMismatch(name.to_owned(), err)));\n\n }\n\n\n\n (location, None)\n\n}\n\n\n", "file_path": "src/gl33/shader/program.rs", "rank": 7, "score": 97831.90359138565 }, { "content": "// FIXME: check for GL_ARB_tessellation_shader extension if we need tessellation shaders\n\nfn from_shader_type(t: Type) -> GLenum {\n\n match t {\n\n Type::TessellationControlShader => gl::TESS_CONTROL_SHADER,\n\n Type::TessellationEvaluationShader => gl::TESS_EVALUATION_SHADER,\n\n Type::VertexShader => gl::VERTEX_SHADER,\n\n Type::GeometryShader => gl::GEOMETRY_SHADER,\n\n Type::FragmentShader => gl::FRAGMENT_SHADER\n\n }\n\n}\n\n\n", "file_path": "src/gl33/shader/stage.rs", "rank": 8, "score": 95678.29039653708 }, { "content": "fn set_point_line_size(mode: Mode, size: Option<f32>) {\n\n let computed = size.unwrap_or(1.);\n\n\n\n match mode {\n\n Mode::Point => unsafe { gl::PointSize(computed) },\n\n Mode::Line | Mode::LineStrip => unsafe { gl::LineWidth(computed) },\n\n _ => {}\n\n }\n\n}\n", "file_path": "src/gl33/tessellation.rs", "rank": 9, "score": 88068.46937390757 }, { "content": "fn set_point_line_size(mode: Mode, size: Option<f32>) {\n\n let computed = size.unwrap_or(1.);\n\n\n\n match mode {\n\n Mode::Point => unsafe { gl::PointSize(computed) },\n\n Mode::Line | Mode::LineStrip => unsafe { gl::LineWidth(computed) },\n\n _ => {}\n\n }\n\n}\n\n\n\n// Align an offset.\n", "file_path": "src/gl33/tess.rs", "rank": 10, "score": 88068.4693739076 }, { "content": "fn opengl_sized_type(f: &VertexComponentFormat) -> GLenum {\n\n match (f.comp_type, f.unit_size) {\n\n (Type::Integral, 1) => gl::BYTE,\n\n (Type::Integral, 2) => gl::SHORT,\n\n (Type::Integral, 4) => gl::INT,\n\n (Type::Unsigned, 1) | (Type::Boolean, 1) => gl::UNSIGNED_BYTE,\n\n (Type::Unsigned, 2) => gl::UNSIGNED_SHORT,\n\n (Type::Unsigned, 4) => gl::UNSIGNED_INT,\n\n (Type::Floating, 4) => gl::FLOAT,\n\n _ => panic!(\"unsupported vertex component format: {:?}\", f)\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 11, "score": 85857.94062099382 }, { "content": "pub fn to_target(l: Layering, d: Dim) -> GLenum {\n\n match l {\n\n Layering::Flat => match d {\n\n Dim::Dim1 => gl::TEXTURE_1D,\n\n Dim::Dim2 => gl::TEXTURE_2D,\n\n Dim::Dim3 => gl::TEXTURE_3D,\n\n Dim::Cubemap => gl::TEXTURE_CUBE_MAP\n\n },\n\n Layering::Layered => match d {\n\n Dim::Dim1 => gl::TEXTURE_1D_ARRAY,\n\n Dim::Dim2 => gl::TEXTURE_2D_ARRAY,\n\n Dim::Dim3 => panic!(\"3D textures array not supported\"),\n\n Dim::Cubemap => gl::TEXTURE_CUBE_MAP_ARRAY\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 12, "score": 83825.14041548813 }, { "content": "// Return the format, internal sized-format and type.\n\npub fn gl_pixel_format(pf: PixelFormat) -> Option<(GLenum, GLenum, GLenum)> {\n\n match (pf.format, pf.encoding) {\n\n (Format::RGB(8, 8, 8), Type::Unsigned) => Some((gl::RGB_INTEGER, gl::RGB8UI, gl::UNSIGNED_BYTE)),\n\n (Format::RGBA(8, 8, 8, 8), Type::Unsigned) => Some((gl::RGBA_INTEGER, gl::RGBA8UI, gl::UNSIGNED_BYTE)),\n\n (Format::RGB(32, 32, 32), Type::Floating) => Some((gl::RGB, gl::RGB32F, gl::FLOAT)),\n\n (Format::RGBA(32, 32, 32, 32), Type::Floating) => Some((gl::RGBA, gl::RGBA32F, gl::FLOAT)),\n\n (Format::Depth(32), Type::Floating) => Some((gl::DEPTH_COMPONENT, gl::DEPTH_COMPONENT32F, gl::FLOAT)),\n\n _ => panic!(\"unsupported pixel format\")\n\n }\n\n}\n\n\n", "file_path": "src/pixel.rs", "rank": 13, "score": 82286.72766777339 }, { "content": "#[inline]\n\nfn off_align(off: usize, align: usize) -> usize {\n\n let a = align - 1;\n\n (off + a) & !a\n\n}\n", "file_path": "src/gl33/tess.rs", "rank": 14, "score": 78404.180568166 }, { "content": "// Upload texels into the texture’s memory. Becareful of the type of texels you send down.\n\nfn upload_texels<L, D, P, T>(target: GLenum, off: D::Offset, size: D::Size, texels: &[T])\n\n where L: Layerable,\n\n D: Dimensionable,\n\n D::Offset: Copy,\n\n D::Size: Copy,\n\n P: Pixel {\n\n let pf = P::pixel_format();\n\n\n\n match gl_pixel_format(pf) {\n\n Some((format, _, encoding)) => {\n\n match L::layering() {\n\n Layering::Flat => {\n\n match D::dim() {\n\n Dim::Dim1 => unsafe { gl::TexSubImage1D(target, 0, D::x_offset(off) as GLint, D::width(size) as GLsizei, format, encoding, texels.as_ptr() as *const c_void) },\n\n Dim::Dim2 => unsafe { gl::TexSubImage2D(target, 0, D::x_offset(off) as GLint, D::y_offset(off) as GLint, D::width(size) as GLsizei, D::height(size) as GLsizei, format, encoding, texels.as_ptr() as *const c_void) },\n\n Dim::Dim3 => unsafe { gl::TexSubImage3D(target, 0, D::x_offset(off) as GLint, D::y_offset(off) as GLint, D::z_offset(off) as GLint, D::width(size) as GLsizei, D::height(size) as GLsizei, D::depth(size) as GLsizei, format, encoding, texels.as_ptr() as *const c_void) },\n\n Dim::Cubemap => unsafe { gl::TexSubImage3D(target, 0, D::x_offset(off) as GLint, D::y_offset(off) as GLint, (gl::TEXTURE_CUBE_MAP_POSITIVE_X + D::z_offset(off)) as GLint, D::width(size) as GLsizei, D::width(size) as GLsizei, 1, format, encoding, texels.as_ptr() as *const c_void) }\n\n }\n\n },\n\n Layering::Layered => panic!(\"Layering::Layered not implemented yet\")\n\n }\n\n },\n\n None => panic!(\"unknown pixel format\")\n\n }\n\n}\n", "file_path": "src/gl33/texture.rs", "rank": 15, "score": 77751.86385437354 }, { "content": "fn dim_as_size(d: &Dim) -> GLint {\n\n match *d {\n\n Dim::Dim1 => 1,\n\n Dim::Dim2 => 2,\n\n Dim::Dim3 => 3,\n\n Dim::Dim4 => 4\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 16, "score": 68255.71622150062 }, { "content": "// Weight in bytes of a vertex component.\n\nfn component_weight(f: &VertexComponentFormat) -> usize {\n\n dim_as_size(&f.dim) as usize * f.unit_size\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 17, "score": 65989.58130544996 }, { "content": "fn component_weight(f: &VertexComponentFormat) -> usize {\n\n from_dim(&f.dim) as usize * component_type_weight(&f.component_type)\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 18, "score": 65986.45480198531 }, { "content": "// Weight in bytes of a single vertex, taking into account padding so that the vertex stay correctly\n\n// aligned.\n\nfn offset_based_vertex_weight(formats: &[VertexComponentFormat], offsets: &[usize]) -> usize {\n\n if formats.is_empty() || offsets.is_empty() {\n\n return 0;\n\n }\n\n\n\n off_align(offsets[offsets.len() - 1] + component_weight(&formats[formats.len() - 1]), formats[0].align)\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 19, "score": 65481.76433502783 }, { "content": "fn vertex_weight(formats: &[VertexComponentFormat]) -> usize {\n\n formats.iter().fold(0, |a, f| a + component_weight(f))\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 20, "score": 64256.758811453125 }, { "content": "fn set_texture_levels(target: GLenum, mipmaps: usize) {\n\n unsafe {\n\n gl::TexParameteri(target, gl::TEXTURE_BASE_LEVEL, 0);\n\n gl::TexParameteri(target, gl::TEXTURE_MAX_LEVEL, mipmaps as GLint - 1);\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 21, "score": 62230.28015892773 }, { "content": "// Compute offsets for all the vertex components according to the alignments provided.\n\nfn aligned_offsets(formats: &[VertexComponentFormat]) -> Vec<usize> {\n\n let mut offsets = Vec::with_capacity(formats.len());\n\n let mut off = 0;\n\n\n\n // compute offsets\n\n for f in formats {\n\n off = off_align(off, f.align); // keep the current component format aligned\n\n offsets.push(off);\n\n off += component_weight(f); // increment the offset by the pratical size of the component\n\n }\n\n\n\n offsets\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 22, "score": 62230.28015892773 }, { "content": "fn set_component_format(i: u32, stride: GLsizei, off: usize, f: &VertexComponentFormat) {\n\n match f.comp_type {\n\n Type::Floating => {\n\n unsafe {\n\n gl::VertexAttribPointer(i as GLuint, dim_as_size(&f.dim), opengl_sized_type(&f), gl::FALSE, stride, ptr::null::<GLvoid>().offset(off as isize));\n\n }\n\n },\n\n Type::Integral | Type::Unsigned | Type::Boolean => {\n\n unsafe {\n\n gl::VertexAttribIPointer(i as GLuint, dim_as_size(&f.dim), opengl_sized_type(&f), stride, ptr::null::<GLvoid>().offset(off as isize));\n\n }\n\n }\n\n }\n\n\n\n unsafe {\n\n gl::EnableVertexAttribArray(i as GLuint);\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 23, "score": 56608.05038801663 }, { "content": "fn create_texture_1d_storage(format: GLenum, iformat: GLenum, encoding: GLenum, w: u32, mipmaps: usize) {\n\n for level in 0..mipmaps {\n\n let w = w / 2u32.pow(level as u32);\n\n\n\n unsafe { gl::TexImage1D(gl::TEXTURE_1D, level as GLint, iformat as GLint, w as GLsizei, 0, format, encoding, ptr::null()) };\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 24, "score": 51281.565156492055 }, { "content": "fn create_cubemap_storage(format: GLenum, iformat: GLenum, encoding: GLenum, s: u32, mipmaps: usize) {\n\n for level in 0..mipmaps {\n\n let s = s / 2u32.pow(level as u32);\n\n\n\n unsafe { gl::TexImage2D(gl::TEXTURE_CUBE_MAP, level as GLint, iformat as GLint, s as GLsizei, s as GLsizei, 0, format, encoding, ptr::null()) };\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 25, "score": 51281.565156492055 }, { "content": "fn create_texture_2d_storage(format: GLenum, iformat: GLenum, encoding: GLenum, w: u32, h: u32, mipmaps: usize) {\n\n for level in 0..mipmaps {\n\n let div = 2u32.pow(level as u32);\n\n let w = w / div;\n\n let h = h / div;\n\n\n\n unsafe { gl::TexImage2D(gl::TEXTURE_2D, level as GLint, iformat as GLint, w as GLsizei, h as GLsizei, 0, format, encoding, ptr::null()) };\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 26, "score": 49800.98308368525 }, { "content": "fn create_texture_3d_storage(format: GLenum, iformat: GLenum, encoding: GLenum, w: u32, h: u32, d: u32, mipmaps: usize) {\n\n for level in 0..mipmaps {\n\n let div = 2u32.pow(level as u32);\n\n let w = w / div;\n\n let h = h / div;\n\n let d = d / div;\n\n\n\n unsafe { gl::TexImage3D(gl::TEXTURE_3D, level as GLint, iformat as GLint, w as GLsizei, h as GLsizei, d as GLsizei, 0, format, encoding, ptr::null()) };\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 27, "score": 48424.71771991589 }, { "content": "fn get_status() -> Option<String> {\n\n let status = unsafe { gl::CheckFramebufferStatus(gl::FRAMEBUFFER) };\n\n\n\n match status {\n\n gl::FRAMEBUFFER_COMPLETE => None,\n\n gl::FRAMEBUFFER_UNDEFINED => Some(String::from(\"framebuffer undefined\")),\n\n gl::FRAMEBUFFER_INCOMPLETE_ATTACHMENT => Some(String::from(\"incomplete attachment\")),\n\n gl::FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT => Some(String::from(\"incomplete missing attachment\")),\n\n gl::FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER => Some(String::from(\"incomplete draw buffer\")),\n\n gl::FRAMEBUFFER_INCOMPLETE_READ_BUFFER => Some(String::from(\"incomplete read buffer\")),\n\n gl::FRAMEBUFFER_UNSUPPORTED => Some(String::from(\"unsupported\")),\n\n gl::FRAMEBUFFER_INCOMPLETE_MULTISAMPLE => Some(String::from(\"incomplete multisample\")),\n\n gl::FRAMEBUFFER_INCOMPLETE_LAYER_TARGETS => Some(String::from(\"incomplete layer targets\")),\n\n _ => Some(String::from(\"unknown\"))\n\n }\n\n}\n", "file_path": "src/gl33/framebuffer.rs", "rank": 28, "score": 44896.2857485088 }, { "content": "fn from_dim(d: &Dim) -> GLint {\n\n match *d {\n\n Dim::Dim1 => 1,\n\n Dim::Dim2 => 2,\n\n Dim::Dim3 => 3,\n\n Dim::Dim4 => 4\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 36, "score": 44454.88135046601 }, { "content": "fn set_depth_test(test: bool) {\n\n unsafe {\n\n if test {\n\n gl::Enable(gl::DEPTH_TEST);\n\n } else {\n\n gl::Disable(gl::DEPTH_TEST);\n\n }\n\n }\n\n}\n", "file_path": "src/gl33/pipeline.rs", "rank": 37, "score": 43738.152085433554 }, { "content": "fn from_mode(mode: Mode) -> GLenum {\n\n match mode {\n\n Mode::Point => gl::POINTS,\n\n Mode::Line => gl::LINES,\n\n Mode::LineStrip => gl::LINE_STRIP,\n\n Mode::Triangle => gl::TRIANGLES,\n\n Mode::TriangleFan => gl::TRIANGLE_FAN,\n\n Mode::TriangleStrip => gl::TRIANGLE_STRIP\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 38, "score": 43296.74768739076 }, { "content": "fn from_filter(filter: Filter) -> GLenum {\n\n match filter {\n\n Filter::Nearest => gl::NEAREST,\n\n Filter::Linear => gl::LINEAR\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 39, "score": 43296.74768739076 }, { "content": "fn from_wrap(wrap: Wrap) -> GLenum {\n\n match wrap {\n\n Wrap::ClampToEdge => gl::CLAMP_TO_EDGE,\n\n Wrap::Repeat => gl::REPEAT,\n\n Wrap::MirroredRepeat => gl::MIRRORED_REPEAT\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 40, "score": 43296.74768739076 }, { "content": "fn opengl_mode(mode: Mode) -> GLenum {\n\n match mode {\n\n Mode::Point => gl::POINTS,\n\n Mode::Line => gl::LINES,\n\n Mode::LineStrip => gl::LINE_STRIP,\n\n Mode::Triangle => gl::TRIANGLES,\n\n Mode::TriangleFan => gl::TRIANGLE_FAN,\n\n Mode::TriangleStrip => gl::TRIANGLE_STRIP\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 41, "score": 42209.62396514593 }, { "content": "// Give OpenGL types information on the content of the VBO by setting vertex formats and pointers\n\n// to buffer memory.\n\nfn set_vertex_pointers(formats: &[VertexComponentFormat]) {\n\n let offsets = aligned_offsets(formats);\n\n let vertex_weight = offset_based_vertex_weight(formats, &offsets) as GLsizei;\n\n\n\n for (i, (format, off)) in formats.iter().zip(offsets).enumerate() {\n\n set_component_format(i as u32, vertex_weight, off, format);\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 42, "score": 41635.55968608058 }, { "content": "fn set_vertex_pointers(formats: &[VertexComponentFormat]) {\n\n let vertex_weight = vertex_weight(formats) as GLsizei;\n\n let mut offset = 0;\n\n\n\n for (i, format) in formats.iter().enumerate() {\n\n set_component_format(i as u32, vertex_weight, offset, format);\n\n offset += component_weight(format) as u32;\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 43, "score": 41628.57797592653 }, { "content": "fn glsl_pragma_src(src: &str) -> String {\n\n let mut pragma = String::from(GLSL_PRAGMA);\n\n pragma.push_str(src);\n\n\n\n pragma\n\n}\n\n\n\nconst GLSL_PRAGMA: &'static str = \"\\\n\n#version 330 core\\n\\\n\n#extension GL_ARB_separate_shader_objects : require\\n\";\n", "file_path": "src/gl33/shader/stage.rs", "rank": 44, "score": 41187.17357788375 }, { "content": "fn from_depth_comparison(fun: DepthComparison) -> GLenum {\n\n match fun {\n\n DepthComparison::Never => gl::NEVER,\n\n DepthComparison::Always => gl::ALWAYS,\n\n DepthComparison::Equal => gl::EQUAL,\n\n DepthComparison::NotEqual => gl::NOTEQUAL,\n\n DepthComparison::Less => gl::LESS,\n\n DepthComparison::LessOrEqual => gl::LEQUAL,\n\n DepthComparison::Greater => gl::GREATER,\n\n DepthComparison::GreaterOrEqual => gl::GEQUAL\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 45, "score": 41187.17357788375 }, { "content": "fn from_blending_factor(factor: blending::Factor) -> GLenum {\n\n match factor {\n\n blending::Factor::One => gl::ONE,\n\n blending::Factor::Zero => gl::ZERO,\n\n blending::Factor::SrcColor => gl::SRC_COLOR,\n\n blending::Factor::SrcColorComplement => gl::ONE_MINUS_SRC_COLOR,\n\n blending::Factor::DestColor => gl::DST_COLOR,\n\n blending::Factor::DestColorComplement => gl::ONE_MINUS_DST_COLOR,\n\n blending::Factor::SrcAlpha => gl::SRC_ALPHA,\n\n blending::Factor::SrcAlphaComplement => gl::ONE_MINUS_SRC_ALPHA,\n\n blending::Factor::DstAlpha => gl::DST_ALPHA,\n\n blending::Factor::DstAlphaComplement => gl::ONE_MINUS_DST_ALPHA,\n\n blending::Factor::SrcAlphaSaturate => gl::SRC_ALPHA_SATURATE\n\n }\n\n}\n\n\n", "file_path": "src/gl33/pipeline.rs", "rank": 46, "score": 40790.46312099986 }, { "content": "fn from_blending_equation(equation: blending::Equation) -> GLenum {\n\n match equation {\n\n blending::Equation::Additive => gl::FUNC_ADD,\n\n blending::Equation::Subtract => gl::FUNC_SUBTRACT,\n\n blending::Equation::ReverseSubtract => gl::FUNC_REVERSE_SUBTRACT,\n\n blending::Equation::Min => gl::MIN,\n\n blending::Equation::Max => gl::MAX\n\n }\n\n}\n\n\n", "file_path": "src/gl33/pipeline.rs", "rank": 47, "score": 40790.46312099986 }, { "content": "fn apply_sampler_to_texture(target: GLenum, sampler: &Sampler) {\n\n unsafe {\n\n gl::TexParameteri(target, gl::TEXTURE_WRAP_R, from_wrap(sampler.wrap_r) as GLint);\n\n gl::TexParameteri(target, gl::TEXTURE_WRAP_S, from_wrap(sampler.wrap_s) as GLint);\n\n gl::TexParameteri(target, gl::TEXTURE_WRAP_T, from_wrap(sampler.wrap_t) as GLint);\n\n gl::TexParameteri(target, gl::TEXTURE_MIN_FILTER, from_filter(sampler.minification) as GLint);\n\n gl::TexParameteri(target, gl::TEXTURE_MAG_FILTER, from_filter(sampler.minification) as GLint);\n\n match sampler.depth_comparison {\n\n Some(fun) => {\n\n gl::TexParameteri(target, gl::TEXTURE_COMPARE_FUNC, from_depth_comparison(fun) as GLint);\n\n gl::TexParameteri(target, gl::TEXTURE_COMPARE_MODE, gl::COMPARE_REF_TO_TEXTURE as GLint);\n\n },\n\n None => {\n\n gl::TexParameteri(target, gl::TEXTURE_COMPARE_MODE, gl::NONE as GLint);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 48, "score": 39827.081618179494 }, { "content": "fn run_render_command<'a>(program: &Program, piped: &Pipe<'a, RenderCommand<'a>>) {\n\n let update_program = &piped.update_program;\n\n let render_cmd = &piped.next;\n\n\n\n update_program(program);\n\n\n\n set_blending(render_cmd.blending);\n\n set_depth_test(render_cmd.depth_test);\n\n\n\n for piped_tess in &render_cmd.tessellations {\n\n let tess_update_program = &piped_tess.update_program;\n\n let tess = &piped_tess.next;\n\n\n\n tess_update_program(program);\n\n\n\n (tess.repr.render)(render_cmd.rasterization_size, render_cmd.instances);\n\n }\n\n}\n\n\n", "file_path": "src/gl33/pipeline.rs", "rank": 49, "score": 37664.99150708597 }, { "content": "fn set_blending(blending: Option<(blending::Equation, blending::Factor, blending::Factor)>) {\n\n match blending {\n\n Some((equation, src_factor, dest_factor)) => {\n\n unsafe {\n\n gl::Enable(gl::BLEND);\n\n gl::BlendEquation(from_blending_equation(equation));\n\n gl::BlendFunc(from_blending_factor(src_factor), from_blending_factor(dest_factor));\n\n }\n\n },\n\n None => {\n\n unsafe { gl::Disable(gl::BLEND) };\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/pipeline.rs", "rank": 50, "score": 36805.366001655384 }, { "content": "fn set_component_format(i: u32, stride: GLsizei, off: u32, cf: &VertexComponentFormat) {\n\n unsafe {\n\n gl::VertexAttribPointer(i as GLuint, from_dim(&cf.dim), from_type(&cf.component_type), gl::FALSE, stride, ptr::null().offset(off as isize));\n\n gl::EnableVertexAttribArray(i as GLuint);\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 51, "score": 35219.670085251186 }, { "content": "## 0.13.1\n\n\n\n- **Deprecated.**\n\n\n\n# 0.13\n\n\n\n- Vertices (`Vertex`) are now aligned based on what decides the Rust compiler. This is very\n\n important, especially because of the version 0.15.0 adding non-32-bit vertex components: alignment\n\n and padding is now completely handled for you and you have nothing to care about.\n\n- Changed the meaning of the semantic maps (uniforms). It is now required to provide a `Uniform` to\n\n build a new `Sem`. This is an improvement in the sense that the *unsafe* zone is restricted to the\n\n declaration of uniforms for a given program. This *unsafe* zone will be covered in a next release\n\n by a macro to make it safe.\n\n\n\n# 0.12\n\n\n\n- Support for luminance-0.15.0 (yeah, lazy changelog line, sorry :D).\n\n\n\n# 0.11\n\n\n\n- `UniformWarning::TypeMismatch` now includes the name of the uniform which type mismatches with the\n\n requested on.\n\n\n\n# 0.10\n\n\n\n- Changed the pipeline workflow by introducing `Pipe` objects.\n\n- Removed strong typing in shader programs (`Program<T>` is now `Program`).\n\n- Removed strong typing in shader stages (`Stage<T>` is now `Stage`).\n\n\n\n## 0.9.1\n\n\n\n- Fixed segfault when a Tessellation with no bound buffer gets dropped.\n\n\n\n# 0.9\n\n\n\n- Added attribute-less tessellations.\n\n- Enhanced shader-related documentation.\n\n- Removed `Slot`.\n\n\n\n# 0.8\n\n\n\n- Support of texture / uniform buffer sets.\n\n\n\n## 0.7.1\n\n\n\n# 0.7\n\n\n\n- Textures support in shaders.\n\n\n\n## 0.6.2\n\n\n\n- Replaced some internal references to `Vec` by slices.\n\n\n\n## 0.6.1\n\n\n\n- Fixed memory corruption in new_shader / new_program.\n\n\n\n# 0.6\n\n\n\n- Uniform warnings.\n\n\n\n## 0.5.6\n\n\n\n- Fixed runtime reification of uniforms.\n\n\n\n## 0.5.5\n\n\n\n- Support for runtime reification of uniforms.\n\n\n\n## 0.5.4\n\n\n\n- Added support for getting textures’ texels.\n\n\n\n## 0.5.3\n\n\n\n- Support for raw texture uploads.\n\n\n\n## 0.5.2\n\n\n\n- Added documentation link.\n\n\n\n## 0.5.1\n\n\n\n- Fixed vertex input offsets. That issue makes all prior versions fail when trying to handle\n\n multi-attributes vertices. You are very advised to upgrade to this version then.\n\n\n", "file_path": "CHANGELOG.md", "rank": 52, "score": 21063.10744229308 }, { "content": "# 0.5\n\n\n\n- Fixed viewport issue.\n\n- Removed the need of **core**.\n\n- Removed `UniformName`.\n\n- Fixed the `update_textures` function regarding **luminance**.\n\n- Using `AsRef` for `update_textures`.\n\n- Adapted mipmaps as `usize`.\n\n- Panic if unknown pixel format.\n\n\n\n## 0.4.3\n\n\n\n- Implemented `Uniform` for `Texture`.\n\n\n\n## 0.4.2\n\n\n\n- Fixed `HasFramebuffer::free_framebuffer`.\n\n\n\n## 0.4.1\n\n\n\n- Crate fixed because of *0.4.0* being broken then yanked.\n\n\n\n# 0.4\n\n\n\n- Implemented existential quantification for `Pipeline`.\n\n- Added travis CI support.\n\n\n\n## 0.3.2\n\n\n\n- Added `ProgramProxy` in the export list of lib.rs.\n\n\n\n## 0.3.1\n\n\n\n- Added `ProgramProxy` alias.\n\n\n\n# 0.3\n\n\n\n- `Program` now has its *uniform interface* tagged in the type.\n\n- Added support for `luminance-0.4.0`.\n\n\n\n## 0.2.1\n\n\n\n- Removed `W` use from `Buffer` as it was removed in `luminance-0.3.0`.\n\n\n\n# 0.2\n\n\n\n- Added support for `luminance-0.2.0`.\n\n\n\n# 0.1\n\n\n\n- Initial revision.\n", "file_path": "CHANGELOG.md", "rank": 53, "score": 21061.273316218187 }, { "content": "# This project is deprecated\n\n\n\nThis crate is now deprecated. If you were using it and would like to update, please visit the page\n\nof [luminance].\n\n\n\n> Why did this crate get deprecated?\n\n\n\nMostly because [luminance] is now a standalone project (no backend involved). It enables the library\n\nto be more concise by narrowing its scope and provide a well-defined interface instead of a blurred\n\nset of mixins.\n\n\n\n[luminance]: https://crates.io/crates/luminance\n", "file_path": "README.md", "rank": 54, "score": 21057.093839430272 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse gl33::buffer::Buffer;\n\nuse gl33::token::GL33;\n\nuse luminance::tessellation::{self, HasTessellation, Mode};\n\nuse luminance::vertex::{Dim, Type, Vertex, VertexComponentFormat};\n\nuse std::mem;\n\nuse std::ptr;\n\n\n\npub type Tessellation = tessellation::Tessellation<GL33>;\n\n\n\npub struct GLTess {\n\n // closure taking the point / line size and the number of instances to render\n\n pub render: Box<Fn(Option<f32>, u32)>,\n\n vao: GLenum,\n\n buffers: Vec<GLenum>\n\n}\n\n\n\nimpl HasTessellation for GL33 {\n\n type Tessellation = GLTess;\n", "file_path": "src/gl33/tessellation.rs", "rank": 55, "score": 15003.809264709495 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse luminance::tess::{self, HasTess, Mode};\n\nuse luminance::vertex::{Dim, Type, Vertex, VertexComponentFormat, VertexFormat};\n\nuse std::mem;\n\nuse std::ptr;\n\n\n\nuse gl33::buffer::{Buffer, GLBuffer};\n\nuse gl33::token::GL33;\n\n\n\npub type Tess = tess::Tess<GL33>;\n\n\n\npub struct GLTess {\n\n // closure taking the point / line size and the number of instances to render\n\n pub render: Box<Fn(Option<f32>, u32)>,\n\n vao: GLenum,\n\n vbo: Option<GLBuffer>,\n\n ibo: Option<GLBuffer>,\n\n vertex_format: VertexFormat,\n\n vert_nb: usize\n", "file_path": "src/gl33/tess.rs", "rank": 56, "score": 15003.310455199187 }, { "content": " pub fn new(handle: GLuint, target: GLenum) -> Self {\n\n GLTexture {\n\n handle: handle,\n\n target: target\n\n }\n\n }\n\n}\n\n\n\nimpl HasTexture for GL33 {\n\n type ATexture = GLTexture;\n\n\n\n fn new_texture<L, D, P>(size: D::Size, mipmaps: usize, sampler: &Sampler) -> Result<Self::ATexture>\n\n where L: Layerable,\n\n D: Dimensionable,\n\n D::Size: Copy,\n\n P: Pixel {\n\n let mut texture = 0;\n\n let target = to_target(L::layering(), D::dim());\n\n\n\n unsafe {\n", "file_path": "src/gl33/texture.rs", "rank": 57, "score": 15002.926835013033 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse gl33::token::GL33;\n\nuse luminance::texture::{self, DepthComparison, Dim, Dimensionable, Filter, HasTexture, Layerable,\n\n Layering, Result, Sampler, TextureError, Wrap, dim_capacity};\n\nuse luminance::pixel::{Pixel, PixelFormat};\n\nuse pixel::{gl_pixel_format, pixel_components};\n\nuse std::mem;\n\nuse std::os::raw::c_void;\n\nuse std::ptr;\n\n\n\npub type Texture<L, D, P> = texture::Texture<GL33, L, D, P>;\n\n\n\n// OpenGL texture representation.\n\npub struct GLTexture {\n\n pub handle: GLuint, // handle to GPU texture object\n\n pub target: GLenum // « type » of the texture; used for bindings\n\n}\n\n\n\nimpl GLTexture {\n", "file_path": "src/gl33/texture.rs", "rank": 58, "score": 14999.339942602906 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse gl33::texture::{GLTexture, create_texture, to_target};\n\nuse gl33::token::GL33;\n\nuse luminance::framebuffer::{self, ColorSlot, DepthSlot, FramebufferError, HasFramebuffer, Result};\n\nuse luminance::texture::{Dimensionable, Layerable};\n\nuse std::default::Default;\n\n\n\npub type Framebuffer<L, D, CS, DS> = framebuffer::Framebuffer<GL33, L, D, CS, DS>;\n\n\n\npub struct GLFramebuffer {\n\n pub handle: GLuint,\n\n pub renderbuffer: Option<GLuint>,\n\n pub w: u32,\n\n pub h: u32,\n\n}\n\n\n\nimpl HasFramebuffer for GL33 {\n\n type Framebuffer = GLFramebuffer;\n\n\n", "file_path": "src/gl33/framebuffer.rs", "rank": 59, "score": 14998.951153736634 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse gl33::token::GL33;\n\nuse luminance::blending;\n\nuse luminance::framebuffer::{ColorSlot, DepthSlot};\n\nuse luminance::pipeline::{self, HasPipeline};\n\nuse luminance::texture::{Dimensionable, Layerable};\n\n\n\nuse gl33::shader::program::Program;\n\n\n\npub type Pipeline<'a, L, D, CS, DS> = pipeline::Pipeline<'a, GL33, L, D, CS, DS>;\n\npub type Pipe<'a, T> = pipeline::Pipe<'a, GL33, T>;\n\npub type ShadingCommand<'a> = pipeline::ShadingCommand<'a, GL33>;\n\npub type RenderCommand<'a> = pipeline::RenderCommand<'a, GL33>;\n\n\n\nimpl HasPipeline for GL33 {\n\n fn run_pipeline<L, D, CS, DS>(cmd: &Pipeline<L, D, CS, DS>)\n\n where L: Layerable,\n\n D: Dimensionable,\n\n D::Size: Copy,\n", "file_path": "src/gl33/pipeline.rs", "rank": 60, "score": 14997.353320674807 }, { "content": " gl::GenTextures(1, &mut texture);\n\n gl::BindTexture(target, texture);\n\n }\n\n \n\n create_texture::<L, D>(target, size, mipmaps, P::pixel_format(), sampler)?;\n\n\n\n // FIXME: maybe we can get rid of this\n\n unsafe {\n\n gl::BindTexture(target, 0);\n\n }\n\n\n\n Ok(GLTexture::new(texture, target))\n\n }\n\n\n\n fn free(texture: &mut Self::ATexture) {\n\n unsafe { gl::DeleteTextures(1, &texture.handle) }\n\n }\n\n\n\n fn clear_part<L, D, P>(texture: &Self::ATexture, gen_mipmaps: bool, off: D::Offset, size: D::Size, pixel: P::Encoding)\n\n where L: Layerable, D: Dimensionable, D::Offset: Copy, D::Size: Copy, P: Pixel, P::Encoding: Copy {\n", "file_path": "src/gl33/texture.rs", "rank": 61, "score": 14996.428917668765 }, { "content": "\n\n gl::BindTexture(target, 0);\n\n\n\n let mut gl_framebuffer = GLFramebuffer {\n\n handle: framebuffer,\n\n renderbuffer: depth_renderbuffer,\n\n w: D::width(size),\n\n h: D::height(size)\n\n };\n\n\n\n match get_status() {\n\n Some(incomplete) => {\n\n gl::BindFramebuffer(gl::FRAMEBUFFER, 0);\n\n\n\n Self::free_framebuffer(&mut gl_framebuffer);\n\n\n\n Err(FramebufferError::Incomplete(incomplete))\n\n },\n\n None => {\n\n gl::BindFramebuffer(gl::FRAMEBUFFER, 0);\n", "file_path": "src/gl33/framebuffer.rs", "rank": 62, "score": 14995.689331672991 }, { "content": " fn new_framebuffer<L, D, CS, DS>(size: D::Size, mipmaps: usize) -> Result<(Self::Framebuffer, Vec<Self::ATexture>, Option<Self::ATexture>)>\n\n where L: Layerable,\n\n D: Dimensionable,\n\n D::Size: Copy,\n\n CS: ColorSlot<Self, L, D>,\n\n DS: DepthSlot<Self, L, D> {\n\n let mut framebuffer: GLuint = 0;\n\n let color_formats = CS::color_formats();\n\n let depth_format = DS::depth_format();\n\n let target = to_target(L::layering(), D::dim());\n\n let mut textures: Vec<GLuint> = vec![0; color_formats.len() + if depth_format.is_some() { 1 } else { 0 }]; // FIXME: remove that (inference)\n\n let mut depth_texture: Option<GLuint> = None;\n\n let mut depth_renderbuffer: Option<GLuint> = None;\n\n\n\n unsafe {\n\n gl::GenFramebuffers(1, &mut framebuffer);\n\n\n\n gl::BindFramebuffer(gl::FRAMEBUFFER, framebuffer);\n\n\n\n // generate all the required textures with the correct formats\n", "file_path": "src/gl33/framebuffer.rs", "rank": 63, "score": 14995.478374115222 }, { "content": "\n\n fn default_framebuffer<D>(size: D::Size) -> Self::Framebuffer\n\n where D: Dimensionable,\n\n D::Size: Copy {\n\n GLFramebuffer {\n\n handle: 0,\n\n renderbuffer: None,\n\n w: D::width(size),\n\n h: D::height(size)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/framebuffer.rs", "rank": 64, "score": 14994.979270816893 }, { "content": "pub mod buffer;\n\npub mod framebuffer;\n\npub mod pipeline;\n\npub mod shader;\n\npub mod tess;\n\npub mod texture;\n\npub mod token;\n\n\n\npub use self::buffer::{Buffer, BufferSlice, BufferSliceMut};\n\npub use self::framebuffer::Framebuffer;\n\npub use self::pipeline::{Pipe, Pipeline, RenderCommand, ShadingCommand};\n\npub use self::shader::program::{Program, Uniform};\n\npub use self::shader::stage::Stage;\n\npub use self::tess::Tess;\n\npub use self::texture::Texture;\n\npub use self::token::*;\n", "file_path": "src/gl33/mod.rs", "rank": 65, "score": 14994.842462841041 }, { "content": "}\n\n\n\nimpl HasTess for GL33 {\n\n type Tess = GLTess;\n\n\n\n fn new_tess<T>(mode: Mode, vertices: &[T], indices: Option<&[u32]>) -> Self::Tess where T: Vertex {\n\n let mut vao: GLuint = 0;\n\n let vert_nb = vertices.len();\n\n\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut vao);\n\n\n\n gl::BindVertexArray(vao);\n\n\n\n // vertex buffer\n\n let vertex_buffer = Buffer::new(vert_nb);\n\n vertex_buffer.fill(vertices);\n\n\n\n // once the vertex buffer is filled, we get its internal representation and we leak it so that\n\n // it’s not dropped at the end of the scope\n", "file_path": "src/gl33/tess.rs", "rank": 66, "score": 14994.608286106257 }, { "content": " set_point_line_size(mode, size);\n\n\n\n if instances == 1 {\n\n gl::DrawArrays(from_mode(mode), 0, vert_nb as GLsizei);\n\n } else if instances > 1 {\n\n gl::DrawArraysInstanced(from_mode(mode), 0, vert_nb as GLsizei, instances as GLsizei);\n\n } else {\n\n panic!(\"cannot render 0 instance\");\n\n }\n\n }),\n\n vao: vao,\n\n buffers: vec![vbo]\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn destroy(tessellation: &mut Self::Tessellation) {\n\n // delete vertex array and all bound buffers\n\n unsafe {\n\n gl::DeleteVertexArrays(1, &tessellation.vao);\n\n gl::DeleteBuffers(tessellation.buffers.len() as GLsizei, tessellation.buffers.as_ptr());\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 67, "score": 14994.266777356372 }, { "content": " let mut w = 0;\n\n let mut h = 0;\n\n\n\n gl::BindTexture(texture.target, texture.handle);\n\n\n\n // retrieve the size of the texture (w and h)\n\n gl::GetTexLevelParameteriv(texture.target, 0, gl::TEXTURE_WIDTH, &mut w);\n\n gl::GetTexLevelParameteriv(texture.target, 0, gl::TEXTURE_HEIGHT, &mut h);\n\n\n\n // resize the vec to allocate enough space to host the returned texels\n\n texels.resize((w * h) as usize * pixel_components(pf), mem::uninitialized());\n\n\n\n gl::GetTexImage(texture.target, 0, format, ty, texels.as_mut_ptr() as *mut c_void);\n\n\n\n gl::BindTexture(texture.target, 0);\n\n }\n\n\n\n texels\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 68, "score": 14993.914864639888 }, { "content": "\n\n fn new<T: 'static>(mode: Mode, vertices: &[T], indices: Option<&[u32]>) -> Self::Tessellation where T: Vertex {\n\n let mut vao: GLuint = 0;\n\n let vert_nb = vertices.len();\n\n\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut vao);\n\n\n\n gl::BindVertexArray(vao);\n\n\n\n // vertex buffer\n\n let vertex_buffer = Buffer::new(vert_nb);\n\n vertex_buffer.fill(vertices);\n\n\n\n // once the vertex buffer is filled, we get its internal representation’s handle and we leak\n\n // it so that it’s not dropped at the end of the scope\n\n let vbo = vertex_buffer.repr.handle;\n\n mem::forget(vertex_buffer);\n\n\n\n gl::BindBuffer(gl::ARRAY_BUFFER, vbo);\n", "file_path": "src/gl33/tessellation.rs", "rank": 69, "score": 14993.700196840286 }, { "content": " vertex_format: T::vertex_format(),\n\n vert_nb: vert_nb\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn destroy_tess(tess: &mut Self::Tess) {\n\n // delete vertex array and all bound buffers\n\n unsafe {\n\n gl::DeleteVertexArrays(1, &tess.vao);\n\n\n\n if let &Some(ref vbo) = &tess.vbo {\n\n gl::DeleteBuffers(1, &vbo.handle);\n\n }\n\n\n\n if let &Some(ref ibo) = &tess.ibo {\n\n gl::DeleteBuffers(1, &ibo.handle);\n\n }\n\n }\n", "file_path": "src/gl33/tess.rs", "rank": 70, "score": 14993.638763939185 }, { "content": " }\n\n\n\n fn attributeless(mode: Mode, vert_nb: usize) -> Self::Tess {\n\n let mut vao = 0;\n\n\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut vao);\n\n\n\n gl::BindVertexArray(vao);\n\n gl::BindVertexArray(0);\n\n\n\n GLTess {\n\n render: Box::new(move |size, instances| {\n\n gl::BindVertexArray(vao);\n\n\n\n set_point_line_size(mode, size);\n\n\n\n if instances == 1 {\n\n gl::DrawArrays(opengl_mode(mode), 0, vert_nb as GLsizei);\n\n } else if instances > 1 {\n", "file_path": "src/gl33/tess.rs", "rank": 71, "score": 14993.1855708832 }, { "content": " Self::upload_part::<L, D, P>(texture, gen_mipmaps, off, size, &vec![pixel; dim_capacity::<D>(size) as usize])\n\n }\n\n\n\n fn upload_part<L, D, P>(texture: &Self::ATexture, gen_mipmaps: bool, off: D::Offset, size: D::Size, texels: &[P::Encoding])\n\n where L: Layerable, D::Offset: Copy, D::Size: Copy, D: Dimensionable, P: Pixel {\n\n unsafe {\n\n gl::BindTexture(texture.target, texture.handle);\n\n\n\n upload_texels::<L, D, P, P::Encoding>(texture.target, off, size, texels);\n\n\n\n if gen_mipmaps {\n\n gl::GenerateMipmap(texture.target);\n\n }\n\n\n\n gl::BindTexture(texture.target, 0);\n\n }\n\n }\n\n\n\n fn upload_part_raw<L, D, P>(texture: &Self::ATexture, gen_mipmaps: bool, off: D::Offset, size: D::Size, texels: &[P::RawEncoding])\n\n where L: Layerable, D::Offset: Copy, D::Size: Copy, D: Dimensionable, P: Pixel {\n", "file_path": "src/gl33/texture.rs", "rank": 72, "score": 14993.12842341957 }, { "content": " gl::GenTextures((textures.len()) as GLint, textures.as_mut_ptr());\n\n\n\n // color textures\n\n if color_formats.is_empty() {\n\n gl::DrawBuffer(gl::NONE);\n\n } else {\n\n for (i, (format, texture)) in color_formats.iter().zip(&textures).enumerate() {\n\n gl::BindTexture(target, *texture);\n\n create_texture::<L, D>(target, size, mipmaps, *format, &Default::default()).map_err(FramebufferError::TextureError)?;\n\n gl::FramebufferTexture(gl::FRAMEBUFFER, gl::COLOR_ATTACHMENT0 + i as GLenum, *texture, 0);\n\n }\n\n\n\n // specify the list of color buffers to draw to\n\n let color_buf_nb = color_formats.len() as GLsizei;\n\n let color_buffers: Vec<_> = (gl::COLOR_ATTACHMENT0..gl::COLOR_ATTACHMENT0 + color_buf_nb as GLenum).collect();\n\n\n\n gl::DrawBuffers(color_buf_nb, color_buffers.as_ptr());\n\n }\n\n\n\n // depth texture, if exists\n", "file_path": "src/gl33/framebuffer.rs", "rank": 73, "score": 14993.095690066637 }, { "content": " let vbo = vertex_buffer.repr.clone();\n\n mem::forget(vertex_buffer);\n\n\n\n gl::BindBuffer(gl::ARRAY_BUFFER, vbo.handle);\n\n set_vertex_pointers(&T::vertex_format());\n\n\n\n // in case of indexed render, create the required objects\n\n if let Some(indices) = indices {\n\n let ind_nb = indices.len();\n\n let index_buffer = Buffer::new(ind_nb);\n\n index_buffer.fill(indices);\n\n\n\n // same than vertex buffer, once the index buffer is filled, we leak it to the void\n\n let ibo = index_buffer.repr.clone();\n\n mem::forget(index_buffer);\n\n\n\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, ibo.handle);\n\n\n\n gl::BindVertexArray(0);\n\n\n", "file_path": "src/gl33/tess.rs", "rank": 74, "score": 14992.947879624142 }, { "content": "\n\n let textures = textures.into_iter().map(|t| GLTexture::new(t, target)).collect();\n\n let depth_texture = depth_texture.map(|t| GLTexture::new(t, target));\n\n Ok((gl_framebuffer, textures, depth_texture))\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn free_framebuffer(framebuffer: &mut Self::Framebuffer) {\n\n unsafe {\n\n if let Some(renderbuffer) = framebuffer.renderbuffer {\n\n gl::DeleteRenderbuffers(1, &renderbuffer);\n\n }\n\n\n\n if framebuffer.handle != 0 {\n\n gl::DeleteFramebuffers(1, &framebuffer.handle);\n\n }\n\n }\n\n }\n", "file_path": "src/gl33/framebuffer.rs", "rank": 75, "score": 14992.921619817884 }, { "content": " gl::DrawArraysInstanced(opengl_mode(mode), 0, vert_nb as GLsizei, instances as GLsizei);\n\n } else {\n\n panic!(\"cannot render 0 instance\");\n\n }\n\n }),\n\n vao: vao,\n\n vbo: None,\n\n ibo: None,\n\n vertex_format: Vec::new(),\n\n vert_nb: vert_nb\n\n }\n\n }\n\n }\n\n\n\n fn vertex_format(tesse: &Self::Tess) -> &VertexFormat {\n\n &tesse.vertex_format\n\n }\n\n\n\n fn get_vertex_buffer_ref_mut(tess: &mut Self::Tess) -> Option<(&mut Self::ABuffer, usize)> {\n\n let vert_nb = tess.vert_nb;\n\n tess.vbo.as_mut().map(|vbo| (vbo, vert_nb))\n\n }\n\n}\n\n\n\n// Give OpenGL types information on the content of the VBO by setting vertex formats and pointers\n\n// to buffer memory.\n", "file_path": "src/gl33/tess.rs", "rank": 76, "score": 14992.790139505632 }, { "content": " unsafe {\n\n gl::BindTexture(texture.target, texture.handle);\n\n\n\n upload_texels::<L, D, P, P::RawEncoding>(texture.target, off, size, texels);\n\n\n\n if gen_mipmaps {\n\n gl::GenerateMipmap(texture.target);\n\n }\n\n\n\n gl::BindTexture(texture.target, 0);\n\n }\n\n }\n\n\n\n // FIXME: cubemaps?\n\n fn get_raw_texels<P>(texture: &Self::ATexture) -> Vec<P::RawEncoding> where P: Pixel, P::RawEncoding: Copy {\n\n let mut texels = Vec::new();\n\n let pf = P::pixel_format();\n\n let (format, _, ty) = gl_pixel_format(pf).unwrap();\n\n\n\n unsafe {\n", "file_path": "src/gl33/texture.rs", "rank": 77, "score": 14992.690494163515 }, { "content": " set_vertex_pointers(&T::vertex_format());\n\n\n\n // in case of indexed render, create the required objects\n\n if let Some(indices) = indices {\n\n let ind_nb = indices.len();\n\n let index_buffer = Buffer::new(ind_nb);\n\n index_buffer.fill(indices);\n\n\n\n // same than vertex buffer, once the index buffer is filled, we leak it to the void\n\n let ibo = index_buffer.repr.handle;\n\n mem::forget(index_buffer);\n\n\n\n gl::BindBuffer(gl::ELEMENT_ARRAY_BUFFER, ibo);\n\n\n\n gl::BindVertexArray(0);\n\n\n\n GLTess {\n\n render: Box::new(move |size, instances| {\n\n gl::BindVertexArray(vao);\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 78, "score": 14991.410894640747 }, { "content": " set_point_line_size(mode, size);\n\n\n\n if instances == 1 {\n\n gl::DrawElements(from_mode(mode), ind_nb as GLsizei, gl::UNSIGNED_INT, ptr::null());\n\n } else if instances > 1 {\n\n gl::DrawElementsInstanced(from_mode(mode), ind_nb as GLsizei, gl::UNSIGNED_INT, ptr::null(), instances as GLsizei);\n\n } else {\n\n panic!(\"cannot index-render 0 instance\");\n\n }\n\n }),\n\n vao: vao,\n\n buffers: vec![vbo, ibo]\n\n }\n\n } else {\n\n gl::BindVertexArray(0);\n\n\n\n GLTess {\n\n render: Box::new(move |size, instances| {\n\n gl::BindVertexArray(vao);\n\n\n", "file_path": "src/gl33/tessellation.rs", "rank": 79, "score": 14991.33163284047 }, { "content": " CS: ColorSlot<Self, L, D>,\n\n DS: DepthSlot<Self, L, D> {\n\n let clear_color = cmd.clear_color;\n\n\n\n unsafe {\n\n gl::BindFramebuffer(gl::FRAMEBUFFER, cmd.framebuffer.repr.handle);\n\n gl::Viewport(0, 0, cmd.framebuffer.repr.w as GLint, cmd.framebuffer.repr.h as GLint);\n\n gl::ClearColor(clear_color[0], clear_color[1], clear_color[2], clear_color[3]);\n\n gl::Clear(gl::COLOR_BUFFER_BIT | gl::DEPTH_BUFFER_BIT);\n\n\n\n // traverse the texture set and bind required textures\n\n for (unit, tex) in cmd.texture_set.iter().enumerate() {\n\n gl::ActiveTexture(gl::TEXTURE0 + unit as GLenum);\n\n gl::BindTexture(tex.repr.target, tex.repr.handle);\n\n }\n\n\n\n // traverse the buffer set and bind required buffers\n\n for (index, buf) in cmd.buffer_set.iter().enumerate() {\n\n gl::BindBufferBase(gl::UNIFORM_BUFFER, index as GLuint, buf.repr.handle);\n\n }\n", "file_path": "src/gl33/pipeline.rs", "rank": 80, "score": 14991.220951468838 }, { "content": " (Layering::Flat, Dim::Dim3) => {\n\n create_texture_3d_storage(format, iformat, encoding, D::width(size), D::height(size), D::depth(size), mipmaps);\n\n Ok(())\n\n },\n\n // cubemap\n\n (Layering::Flat, Dim::Cubemap) => {\n\n create_cubemap_storage(format, iformat, encoding, D::width(size), mipmaps);\n\n Ok(())\n\n },\n\n _ => Err(TextureError::TextureStorageCreationFailed(format!(\"unsupported texture OpenGL pixel format: {:?}\", glf)))\n\n }\n\n },\n\n None => Err(TextureError::TextureStorageCreationFailed(format!(\"unsupported texture pixel format: {:?}\", pf)))\n\n }\n\n}\n\n\n", "file_path": "src/gl33/texture.rs", "rank": 81, "score": 14990.689523917812 }, { "content": " if let Some(format) = depth_format {\n\n let texture = textures.pop().unwrap();\n\n\n\n gl::BindTexture(target, texture);\n\n create_texture::<L, D>(target, size, mipmaps, format, &Default::default()).map_err(FramebufferError::TextureError)?;\n\n gl::FramebufferTexture(gl::FRAMEBUFFER, gl::DEPTH_ATTACHMENT, texture, 0);\n\n\n\n depth_texture = Some(texture);\n\n } else {\n\n let mut renderbuffer: GLuint = 0;\n\n\n\n gl::GenRenderbuffers(1, &mut renderbuffer);\n\n gl::BindRenderbuffer(gl::RENDERBUFFER, renderbuffer);\n\n gl::RenderbufferStorage(gl::RENDERBUFFER, gl::DEPTH_COMPONENT32F, D::width(size) as GLsizei, D::height(size) as GLsizei);\n\n gl::BindRenderbuffer(gl::RENDERBUFFER, 0);\n\n\n\n gl::FramebufferRenderbuffer(gl::FRAMEBUFFER, gl::DEPTH_ATTACHMENT, gl::RENDERBUFFER, renderbuffer);\n\n\n\n depth_renderbuffer = Some(renderbuffer);\n\n }\n", "file_path": "src/gl33/framebuffer.rs", "rank": 82, "score": 14989.664817845643 }, { "content": " GLTess {\n\n render: Box::new(move |size, instances| {\n\n gl::BindVertexArray(vao);\n\n\n\n set_point_line_size(mode, size);\n\n\n\n if instances == 1 {\n\n gl::DrawElements(opengl_mode(mode), ind_nb as GLsizei, gl::UNSIGNED_INT, ptr::null());\n\n } else if instances > 1 {\n\n gl::DrawElementsInstanced(opengl_mode(mode), ind_nb as GLsizei, gl::UNSIGNED_INT, ptr::null(), instances as GLsizei);\n\n } else {\n\n panic!(\"cannot index-render 0 instance\");\n\n }\n\n }),\n\n vao: vao,\n\n vbo: Some(vbo),\n\n ibo: Some(ibo),\n\n vertex_format: T::vertex_format(),\n\n vert_nb: vert_nb\n\n }\n", "file_path": "src/gl33/tess.rs", "rank": 83, "score": 14989.631229687362 }, { "content": " } else {\n\n gl::BindVertexArray(0);\n\n\n\n GLTess {\n\n render: Box::new(move |size, instances| {\n\n gl::BindVertexArray(vao);\n\n\n\n set_point_line_size(mode, size);\n\n\n\n if instances == 1 {\n\n gl::DrawArrays(opengl_mode(mode), 0, vert_nb as GLsizei);\n\n } else if instances > 1 {\n\n gl::DrawArraysInstanced(opengl_mode(mode), 0, vert_nb as GLsizei, instances as GLsizei);\n\n } else {\n\n panic!(\"cannot render 0 instance\");\n\n }\n\n }),\n\n vao: vao,\n\n vbo: Some(vbo),\n\n ibo: None,\n", "file_path": "src/gl33/tess.rs", "rank": 84, "score": 14989.525065563153 }, { "content": "#[derive(Debug)]\n\npub struct GL33;\n", "file_path": "src/gl33/token.rs", "rank": 85, "score": 14989.307589164911 }, { "content": " }\n\n\n\n for piped_shading_cmd in &cmd.shading_commands {\n\n Self::run_shading_command(piped_shading_cmd);\n\n }\n\n }\n\n\n\n fn run_shading_command<'a>(piped: &Pipe<'a, ShadingCommand>) {\n\n let update_program = &piped.update_program;\n\n let shading_cmd = &piped.next;\n\n\n\n unsafe { gl::UseProgram(shading_cmd.program.0.id) };\n\n\n\n update_program(&shading_cmd.program);\n\n\n\n for piped_render_cmd in &shading_cmd.render_commands {\n\n run_render_command(&shading_cmd.program, piped_render_cmd);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/pipeline.rs", "rank": 86, "score": 14987.186003416226 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse gl33::token::GL33;\n\nuse luminance::shader::stage;\n\nuse luminance::shader::stage::{HasStage, StageError, Type};\n\nuse std::ffi::CString;\n\nuse std::ptr::{null, null_mut};\n\n\n\npub type Stage = stage::Stage<GL33>;\n\n\n\nimpl HasStage for GL33 {\n\n type AStage = GLuint;\n\n\n\n fn new_shader(shader_type: Type, src: &str) -> Result<Self::AStage, StageError> {\n\n unsafe {\n\n let src = CString::new(glsl_pragma_src(src).as_bytes()).unwrap();\n\n let handle = gl::CreateShader(from_shader_type(shader_type));\n\n\n\n if handle == 0 {\n\n return Err(StageError::CompilationFailed(shader_type, String::from(\"unable to create shader stage\")));\n", "file_path": "src/gl33/shader/stage.rs", "rank": 87, "score": 14461.619636090882 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse luminance::linear::{M22, M33, M44};\n\nuse luminance::shader::program::{self, Dim, HasProgram, ProgramError, Type, Sem, SemIndex,\n\n UniformWarning};\n\nuse std::collections::HashMap;\n\nuse std::ffi::CString;\n\nuse std::ptr::null_mut;\n\n\n\nuse gl33::token::GL33;\n\n\n\npub type Program = program::Program<GL33>;\n\n\n\npub struct GLProgram {\n\n pub id: GLuint, // OpenGL ID\n\n uni_sem_map: HashMap<SemIndex, GLint>, // mapping between user semantic (indexes) and OpenGL uniform locations\n\n ubo_sem_map: HashMap<SemIndex, GLint>, // mapping between user semantic (indexes) and OpenGL uniform block indexes\n\n}\n\n\n\nimpl HasProgram for GL33 {\n", "file_path": "src/gl33/shader/program.rs", "rank": 88, "score": 14456.468814207832 }, { "content": "use gl;\n\nuse gl::types::*;\n\nuse gl33::token::GL33;\n\nuse luminance::linear::*;\n\nuse luminance::shader::uniform;\n\n\n\npub type Uniform<T> = uniform::Uniform<GL33, T>;\n\npub type Uniformable = uniform::Uniformable<GL33>;\n\n\n\nimpl uniform::HasUniform for GL33 {\n\n type U = GLint;\n\n\n\n fn update1_i32(u: &Self::U, x: i32) {\n\n unsafe { gl::Uniform1i(*u, x) }\n\n }\n\n\n\n fn update2_i32(u: &Self::U, v: [i32; 2]) {\n\n unsafe { gl::Uniform2iv(*u, 1, &v as *const i32) }\n\n }\n\n\n", "file_path": "src/gl33/shader/uniform.rs", "rank": 89, "score": 14455.497109034173 }, { "content": " }\n\n\n\n gl::ShaderSource(handle, 1, [src.as_ptr()].as_ptr(), null());\n\n gl::CompileShader(handle);\n\n\n\n let mut compiled: GLint = gl::FALSE as GLint;\n\n gl::GetShaderiv(handle, gl::COMPILE_STATUS, &mut compiled);\n\n\n\n if compiled == (gl::TRUE as GLint) {\n\n Ok(handle)\n\n } else {\n\n let mut log_len: GLint = 0;\n\n gl::GetShaderiv(handle, gl::INFO_LOG_LENGTH, &mut log_len);\n\n\n\n let mut log: Vec<u8> = Vec::with_capacity(log_len as usize);\n\n gl::GetShaderInfoLog(handle, log_len, null_mut(), log.as_mut_ptr() as *mut GLchar);\n\n\n\n gl::DeleteShader(handle);\n\n\n\n log.set_len(log_len as usize);\n", "file_path": "src/gl33/shader/stage.rs", "rank": 90, "score": 14454.462149897858 }, { "content": " }\n\n }\n\n\n\n let gl_program = GLProgram {\n\n id: program,\n\n uni_sem_map: uni_sem_map,\n\n ubo_sem_map: ubo_sem_map,\n\n };\n\n\n\n Ok((gl_program, warnings))\n\n } else {\n\n let mut log_len: GLint = 0;\n\n gl::GetProgramiv(program, gl::INFO_LOG_LENGTH, &mut log_len);\n\n\n\n let mut log: Vec<u8> = Vec::with_capacity(log_len as usize);\n\n gl::GetProgramInfoLog(program, log_len, null_mut(), log.as_mut_ptr() as *mut GLchar);\n\n\n\n gl::DeleteProgram(program);\n\n\n\n log.set_len(log_len as usize);\n", "file_path": "src/gl33/shader/program.rs", "rank": 91, "score": 14452.029652531959 }, { "content": "\n\n Err(ProgramError::LinkFailed(String::from_utf8(log).unwrap()))\n\n }\n\n }\n\n }\n\n\n\n fn free_program(program: &mut Self::Program) {\n\n unsafe { gl::DeleteProgram(program.id) }\n\n }\n\n\n\n fn update_uniforms<F>(program: &Self::Program, f: F) where F: Fn() {\n\n unsafe { gl::UseProgram(program.id) };\n\n f();\n\n unsafe { gl::UseProgram(0) };\n\n }\n\n\n\n fn update1_i32(program: &Self::Program, u: SemIndex, x: i32) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform1i(program.uni_sem_map[&u], x) }\n\n }\n", "file_path": "src/gl33/shader/program.rs", "rank": 92, "score": 14451.951860192732 }, { "content": " (Type::Integral, Dim::Dim2) if typ != gl::INT_VEC2 => Some(\"requested ivec2 doesn't match\".to_owned()),\n\n (Type::Integral, Dim::Dim3) if typ != gl::INT_VEC3 => Some(\"requested ivec3 doesn't match\".to_owned()),\n\n (Type::Integral, Dim::Dim4) if typ != gl::INT_VEC4 => Some(\"requested ivec4 doesn't match\".to_owned()),\n\n (Type::Unsigned, Dim::Dim1) if typ != gl::UNSIGNED_INT => Some(\"requested uint doesn't match\".to_owned()),\n\n (Type::Unsigned, Dim::Dim2) if typ != gl::UNSIGNED_INT_VEC2 => Some(\"requested uvec2 doesn't match\".to_owned()),\n\n (Type::Unsigned, Dim::Dim3) if typ != gl::UNSIGNED_INT_VEC3 => Some(\"requested uvec3 doesn't match\".to_owned()),\n\n (Type::Unsigned, Dim::Dim4) if typ != gl::UNSIGNED_INT_VEC4 => Some(\"requested uvec4 doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim1) if typ != gl::FLOAT => Some(\"requested float doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim2) if typ != gl::FLOAT_VEC2 => Some(\"requested vec2 doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim3) if typ != gl::FLOAT_VEC3 => Some(\"requested vec3 doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim4) if typ != gl::FLOAT_VEC4 => Some(\"requested vec4 doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim22) if typ != gl::FLOAT_MAT2 => Some(\"requested mat2 doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim33) if typ != gl::FLOAT_MAT3 => Some(\"requested mat3 doesn't match\".to_owned()),\n\n (Type::Floating, Dim::Dim44) if typ != gl::FLOAT_MAT4 => Some(\"requested mat4 doesn't match\".to_owned()),\n\n (Type::Boolean, Dim::Dim1) if typ != gl::BOOL => Some(\"requested bool doesn't match\".to_owned()),\n\n (Type::Boolean, Dim::Dim2) if typ != gl::BOOL_VEC2 => Some(\"requested bvec2 doesn't match\".to_owned()),\n\n (Type::Boolean, Dim::Dim3) if typ != gl::BOOL_VEC3 => Some(\"requested bvec3 doesn't match\".to_owned()),\n\n (Type::Boolean, Dim::Dim4) if typ != gl::BOOL_VEC4 => Some(\"requested bvec4 doesn't match\".to_owned()),\n\n _ => None\n\n }\n\n}\n\n\n\npub type Uniform<T> = program::Uniform<GL33, T>;\n\npub type Uniformable = program::Uniformable<GL33>;\n", "file_path": "src/gl33/shader/program.rs", "rank": 93, "score": 14451.855747437363 }, { "content": "\n\n let mut linked: GLint = gl::FALSE as GLint;\n\n gl::GetProgramiv(program, gl::LINK_STATUS, &mut linked);\n\n\n\n if linked == (gl::TRUE as GLint) {\n\n let mut uni_sem_map = HashMap::new();\n\n let mut ubo_sem_map = HashMap::new();\n\n let mut warnings = Vec::new();\n\n\n\n for sem in sem_map {\n\n let (loc, warning) = get_uniform_location(program, sem.name(), sem.ty(), sem.dim());\n\n\n\n match loc {\n\n Location::Uniform(location) => uni_sem_map.insert(sem.index(), location),\n\n Location::UniformBlock(index) => ubo_sem_map.insert(sem.index(), index)\n\n };\n\n\n\n // if there’s a warning, add it to the list of warnings\n\n if let Some(warning) = warning {\n\n warnings.push(warning);\n", "file_path": "src/gl33/shader/program.rs", "rank": 94, "score": 14449.844710324049 }, { "content": " fn update2_slice_i32(program: &Self::Program, u: SemIndex, v: &[[i32; 2]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform2iv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const i32) }\n\n }\n\n\n\n fn update3_slice_i32(program: &Self::Program, u: SemIndex, v: &[[i32; 3]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform3iv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const i32) }\n\n }\n\n\n\n fn update4_slice_i32(program: &Self::Program, u: SemIndex, v: &[[i32; 4]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform4iv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const i32) }\n\n }\n\n\n\n fn update1_u32(program: &Self::Program, u: SemIndex, x: u32) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform1ui(program.uni_sem_map[&u], x) }\n\n }\n\n\n", "file_path": "src/gl33/shader/program.rs", "rank": 95, "score": 14448.920358406394 }, { "content": " fn update2_slice_u32(program: &Self::Program, u: SemIndex, v: &[[u32; 2]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform2uiv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const u32) }\n\n }\n\n\n\n fn update3_slice_u32(program: &Self::Program, u: SemIndex, v: &[[u32; 3]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform3uiv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const u32) }\n\n }\n\n\n\n fn update4_slice_u32(program: &Self::Program, u: SemIndex, v: &[[u32; 4]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform4uiv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const u32) }\n\n }\n\n\n\n fn update1_f32(program: &Self::Program, u: SemIndex, x: f32) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform1f(program.uni_sem_map[&u], x) }\n\n }\n\n\n", "file_path": "src/gl33/shader/program.rs", "rank": 96, "score": 14448.920358406394 }, { "content": " fn update3_slice_bool(program: &Self::Program, u: SemIndex, v: &[[bool; 3]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n let v: Vec<_> = v.iter().map(|x| [x[0] as u32, x[1] as u32, x[2] as u32]).collect();\n\n unsafe { gl::Uniform3uiv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const u32) }\n\n }\n\n\n\n fn update4_slice_bool(program: &Self::Program, u: SemIndex, v: &[[bool; 4]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n let v: Vec<_> = v.iter().map(|x| [x[0] as u32, x[1] as u32, x[2] as u32, x[3] as u32]).collect();\n\n unsafe { gl::Uniform4uiv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const u32) }\n\n }\n\n\n\n fn update_texture_unit(program: &Self::Program, u: SemIndex, unit: u32) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform1i(program.uni_sem_map[&u], unit as GLint) }\n\n }\n\n\n\n fn update_buffer_binding(program: &Self::Program, u: SemIndex, binding: u32) {\n\n assert!((u as usize) < program.ubo_sem_map.len());\n\n unsafe { gl::UniformBlockBinding(program.id, program.ubo_sem_map[&u] as GLuint, binding as GLuint) }\n\n }\n\n}\n\n\n", "file_path": "src/gl33/shader/program.rs", "rank": 97, "score": 14448.760172605269 }, { "content": " fn update2_slice_f32(program: &Self::Program, u: SemIndex, v: &[[f32; 2]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform2fv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const f32) }\n\n }\n\n\n\n fn update3_slice_f32(program: &Self::Program, u: SemIndex, v: &[[f32; 3]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform3fv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const f32) }\n\n }\n\n\n\n fn update4_slice_f32(program: &Self::Program, u: SemIndex, v: &[[f32; 4]]) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n unsafe { gl::Uniform4fv(program.uni_sem_map[&u], v.len() as GLsizei, v.as_ptr() as *const f32) }\n\n }\n\n\n\n fn update22_f32(program: &Self::Program, u: SemIndex, m: M22) {\n\n assert!((u as usize) < program.uni_sem_map.len());\n\n Self::update22_slice_f32(program, u, &[m])\n\n }\n\n\n", "file_path": "src/gl33/shader/program.rs", "rank": 98, "score": 14448.511051058986 }, { "content": "\n\n Err(StageError::CompilationFailed(shader_type, String::from_utf8(log).unwrap()))\n\n }\n\n }\n\n }\n\n\n\n fn free_shader(shader: &mut Self::AStage) {\n\n unsafe { gl::DeleteShader(*shader) }\n\n }\n\n}\n\n\n\n// FIXME: check for GL_ARB_tessellation_shader extension if we need tessellation shaders\n", "file_path": "src/gl33/shader/stage.rs", "rank": 99, "score": 14448.378509632463 } ]
Rust
src/test/instruction_tests/instr_vpsrldq.rs
ftilde/rust-x86asm
f6584b8cfe8e75d978bf7b83a67c69444fd3f161
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode}; #[test] fn vpsrldq_1() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: Some(Literal8(44)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 217, 115, 222, 44], OperandSize::Dword, ) } #[test] fn vpsrldq_2() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM1)), operand3: Some(Literal8(62)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 209, 115, 217, 62], OperandSize::Qword, ) } #[test] fn vpsrldq_3() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM3)), operand3: Some(Literal8(60)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 229, 115, 219, 60], OperandSize::Dword, ) } #[test] fn vpsrldq_4() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM7)), operand3: Some(Literal8(89)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 221, 115, 223, 89], OperandSize::Qword, ) } #[test] fn vpsrldq_5() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM2)), operand3: Some(Literal8(105)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 249, 115, 218, 105], OperandSize::Dword, ) } #[test] fn vpsrldq_6() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced( ESI, EDX, Two, 835075344, Some(OperandSize::Xmmword), None, )), operand3: Some(Literal8(110)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 69, 8, 115, 156, 86, 16, 61, 198, 49, 110], OperandSize::Dword, ) } #[test] fn vpsrldq_7() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM9)), operand2: Some(Direct(XMM17)), operand3: Some(Literal8(40)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 177, 53, 8, 115, 217, 40], OperandSize::Qword, ) } #[test] fn vpsrldq_8() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM16)), operand2: Some(IndirectScaledDisplaced( RDX, Four, 681780529, Some(OperandSize::Xmmword), None, )), operand3: Some(Literal8(26)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 125, 0, 115, 28, 149, 49, 37, 163, 40, 26], OperandSize::Qword, ) } #[test] fn vpsrldq_9() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM7)), operand3: Some(Literal8(53)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 229, 115, 223, 53], OperandSize::Dword, ) } #[test] fn vpsrldq_10() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM0)), operand2: Some(IndirectScaledIndexedDisplaced( EBX, ECX, Four, 1085957988, Some(OperandSize::Ymmword), None, )), operand3: Some(Literal8(100)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 125, 40, 115, 156, 139, 100, 103, 186, 64, 100], OperandSize::Dword, ) } #[test] fn vpsrldq_11() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM29)), operand2: Some(Direct(YMM18)), operand3: Some(Literal8(52)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 177, 21, 32, 115, 218, 52], OperandSize::Qword, ) } #[test] fn vpsrldq_12() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM26)), operand2: Some(Indirect(RSI, Some(OperandSize::Ymmword), None)), operand3: Some(Literal8(35)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 45, 32, 115, 30, 35], OperandSize::Qword, ) } #[test] fn vpsrldq_13() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM7)), operand3: Some(Literal8(123)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 125, 72, 115, 223, 123], OperandSize::Dword, ) } #[test] fn vpsrldq_14() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM1)), operand2: Some(IndirectScaledDisplaced( ECX, Four, 496895416, Some(OperandSize::Zmmword), None, )), operand3: Some(Literal8(116)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 117, 72, 115, 28, 141, 184, 5, 158, 29, 116], OperandSize::Dword, ) } #[test] fn vpsrldq_15() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM25)), operand3: Some(Literal8(44)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 145, 117, 72, 115, 217, 44], OperandSize::Qword, ) } #[test] fn vpsrldq_16() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM3)), operand2: Some(IndirectScaledDisplaced( RDI, Two, 260769911, Some(OperandSize::Zmmword), None, )), operand3: Some(Literal8(124)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 101, 72, 115, 28, 125, 119, 8, 139, 15, 124], OperandSize::Qword, ) }
use instruction_def::*; use test::run_test; use Operand::*; use Reg::*; use RegScale::*; use RegType::*; use {BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Re
erandSize::Xmmword), None, )), operand3: Some(Literal8(26)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 125, 0, 115, 28, 149, 49, 37, 163, 40, 26], OperandSize::Qword, ) } #[test] fn vpsrldq_9() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM7)), operand3: Some(Literal8(53)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 229, 115, 223, 53], OperandSize::Dword, ) } #[test] fn vpsrldq_10() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM0)), operand2: Some(IndirectScaledIndexedDisplaced( EBX, ECX, Four, 1085957988, Some(OperandSize::Ymmword), None, )), operand3: Some(Literal8(100)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 125, 40, 115, 156, 139, 100, 103, 186, 64, 100], OperandSize::Dword, ) } #[test] fn vpsrldq_11() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM29)), operand2: Some(Direct(YMM18)), operand3: Some(Literal8(52)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 177, 21, 32, 115, 218, 52], OperandSize::Qword, ) } #[test] fn vpsrldq_12() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM26)), operand2: Some(Indirect(RSI, Some(OperandSize::Ymmword), None)), operand3: Some(Literal8(35)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 45, 32, 115, 30, 35], OperandSize::Qword, ) } #[test] fn vpsrldq_13() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM0)), operand2: Some(Direct(ZMM7)), operand3: Some(Literal8(123)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 125, 72, 115, 223, 123], OperandSize::Dword, ) } #[test] fn vpsrldq_14() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM1)), operand2: Some(IndirectScaledDisplaced( ECX, Four, 496895416, Some(OperandSize::Zmmword), None, )), operand3: Some(Literal8(116)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 117, 72, 115, 28, 141, 184, 5, 158, 29, 116], OperandSize::Dword, ) } #[test] fn vpsrldq_15() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM1)), operand2: Some(Direct(ZMM25)), operand3: Some(Literal8(44)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 145, 117, 72, 115, 217, 44], OperandSize::Qword, ) } #[test] fn vpsrldq_16() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(ZMM3)), operand2: Some(IndirectScaledDisplaced( RDI, Two, 260769911, Some(OperandSize::Zmmword), None, )), operand3: Some(Literal8(124)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 101, 72, 115, 28, 125, 119, 8, 139, 15, 124], OperandSize::Qword, ) }
g, RoundingMode}; #[test] fn vpsrldq_1() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM4)), operand2: Some(Direct(XMM6)), operand3: Some(Literal8(44)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 217, 115, 222, 44], OperandSize::Dword, ) } #[test] fn vpsrldq_2() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM5)), operand2: Some(Direct(XMM1)), operand3: Some(Literal8(62)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 209, 115, 217, 62], OperandSize::Qword, ) } #[test] fn vpsrldq_3() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM3)), operand2: Some(Direct(YMM3)), operand3: Some(Literal8(60)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 229, 115, 219, 60], OperandSize::Dword, ) } #[test] fn vpsrldq_4() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(YMM4)), operand2: Some(Direct(YMM7)), operand3: Some(Literal8(89)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 221, 115, 223, 89], OperandSize::Qword, ) } #[test] fn vpsrldq_5() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM0)), operand2: Some(Direct(XMM2)), operand3: Some(Literal8(105)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[197, 249, 115, 218, 105], OperandSize::Dword, ) } #[test] fn vpsrldq_6() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM7)), operand2: Some(IndirectScaledIndexedDisplaced( ESI, EDX, Two, 835075344, Some(OperandSize::Xmmword), None, )), operand3: Some(Literal8(110)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 241, 69, 8, 115, 156, 86, 16, 61, 198, 49, 110], OperandSize::Dword, ) } #[test] fn vpsrldq_7() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM9)), operand2: Some(Direct(XMM17)), operand3: Some(Literal8(40)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None, }, &[98, 177, 53, 8, 115, 217, 40], OperandSize::Qword, ) } #[test] fn vpsrldq_8() { run_test( &Instruction { mnemonic: Mnemonic::VPSRLDQ, operand1: Some(Direct(XMM16)), operand2: Some(IndirectScaledDisplaced( RDX, Four, 681780529, Some(Op
random
[ { "content": "fn encode64_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode64_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 0, "score": 280256.95362013153 }, { "content": "fn encode16_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode16_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 1, "score": 280256.95362013153 }, { "content": "fn encode32_helper2(mnemonic: Mnemonic, operand1: Operand, operand2: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(operand1),\n\n operand2: Some(operand2),\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode32_helper(&instr, expected);\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 2, "score": 280256.9536201316 }, { "content": "fn random_reg_of_size(size: OperandSize) -> Reg {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16(),\n\n OperandSize::Dword => random_reg_32(),\n\n OperandSize::Qword => random_reg_64(),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 3, "score": 250622.95876428366 }, { "content": "fn random_reg_of_size_no_stack(size: OperandSize) -> Reg {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16_no_stack(),\n\n OperandSize::Dword => random_reg_32_no_stack(),\n\n OperandSize::Qword => random_reg_64_no_stack(),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 4, "score": 247590.56002620317 }, { "content": "#[allow(dead_code)]\n\nfn encode32_helper1(mnemonic: Mnemonic, op1: Operand, expected: &Vec<u8>) {\n\n let instr = Instruction {\n\n mnemonic: mnemonic,\n\n operand1: Some(op1),\n\n operand2: None,\n\n operand3: None,\n\n operand4: None,\n\n ..Default::default()\n\n };\n\n encode32_helper(&instr, expected);\n\n}\n", "file_path": "src/test/mod.rs", "rank": 5, "score": 239111.0027203758 }, { "content": "fn make_rm(size: OperandSize, reg_type: RegType) -> InstructionToken {\n\n let vec = vec![InstructionToken::Reg(reg_type, size), InstructionToken::Mem(size)];\n\n InstructionToken::Set(vec)\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 6, "score": 226741.94853232466 }, { "content": "fn random_reg(reg_type: RegType, size: OperandSize, addr_size: OperandSize, \n\n def: &InstructionDefinition) -> Reg {\n\n match reg_type {\n\n RegType::General => {\n\n match size {\n\n OperandSize::Byte => random_reg_8(),\n\n OperandSize::Word => random_reg_16(),\n\n OperandSize::Dword => random_reg_32(),\n\n OperandSize::Qword => random_reg_64(),\n\n OperandSize::Unsized => random_reg_of_size(addr_size),\n\n _ => panic!(\"Invalid general register size: {:?}.\", size)\n\n }\n\n },\n\n RegType::Avx => {\n\n let allow_all = if let Some(CompositePrefix::Evex {..}) = def.composite_prefix {\n\n addr_size == OperandSize::Qword } else { false };\n\n match size {\n\n OperandSize::Xmmword => random_xmm_reg(allow_all),\n\n OperandSize::Ymmword => random_ymm_reg(allow_all),\n\n OperandSize::Zmmword => random_zmm_reg(allow_all),\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 7, "score": 226276.24141309317 }, { "content": "fn parse_operand_encoding_opt(operand: &str) -> Option<(OperandEncoding, OperandAccess)> {\n\n if operand.len() != 0 {\n\n Some(parse_operand_encoding(operand.as_bytes()).unwrap().1)\n\n } else {\n\n None\n\n }\n\n}\n\n\n\nnamed!(instruction_sep, eat_separator!(&b\", \"[..]));\n\nnamed!(parse_token_list<Vec<Vec<InstructionToken>>>, separated_list!(instruction_sep, parse_instruction_part));\n\nnamed!(parse_instruction<&[u8], (String, Vec<InstructionToken>), u32>, do_parse!(\n\n mnemonic: alphanumeric >> opt!(instruction_sep) >>\n\n tokens: opt!(complete!(parse_token_list)) >>\n\n (build_result(mnemonic, tokens))\n\n )\n\n);\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 8, "score": 220171.35634447268 }, { "content": "fn write_operand<W: Write>(op: &Operand, instr_def: &InstructionDefinition, f: &mut W)\n\n -> io::Result<()> { \n\n match *op {\n\n Operand::Direct(reg) => write!(f, \"{}\", reg),\n\n Operand::Indirect(reg, size, seg) => \n\n write_indirect(f, Some(reg), None, None, None, size, seg, instr_def),\n\n Operand::IndirectDisplaced(reg, dsp, size, seg) =>\n\n write_indirect(f, Some(reg), None, None, Some(dsp), size, seg, instr_def),\n\n Operand::IndirectScaledIndexed(base, index, scale, size, seg) => \n\n write_indirect(f, Some(base), Some(index), Some(scale), None, size, seg, instr_def),\n\n Operand::IndirectScaledIndexedDisplaced(base, index, scale, dsp, size, seg) =>\n\n write_indirect(f, Some(base), Some(index), Some(scale), Some(dsp), size, seg,\n\n instr_def),\n\n Operand::IndirectScaledDisplaced(reg, scale, dsp, size, seg) =>\n\n write_indirect(f, Some(reg), None, Some(scale), Some(dsp), size, seg, instr_def),\n\n Operand::Memory(addr, size, seg) |\n\n Operand::Offset(addr, size, seg) => size_seg_helper(f, size, seg, |fmt| write!(fmt, \"[{}]\", addr)), // TODO Is this correct?\n\n Operand::Literal8(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal16(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal32(v) => write!(f, \"0x{:X}\", v),\n\n Operand::Literal64(v) => write!(f, \"0x{:X}\", v),\n\n Operand::MemoryAndSegment16(seg, addr) => write!(f, \"0x{:X}:0x{:X}\", seg, addr),\n\n Operand::MemoryAndSegment32(seg, addr) => write!(f, \"0x{:X}:0x{:X}\", seg, addr),\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 9, "score": 216796.06457283392 }, { "content": "fn random_mib(size: OperandSize, addr_size: OperandSize) -> Operand {\n\n Operand::IndirectScaledIndexed(\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_of_size_no_stack(addr_size),\n\n RegScale::One,\n\n Some(size), None)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 10, "score": 210642.13609289666 }, { "content": "fn random_mem(size: OperandSize, addr_size: OperandSize) -> Operand {\n\n if addr_size != OperandSize::Word {\n\n match rand::random::<u32>() % 5 { // Select addressing mode\n\n 0 => { // Indirect - [EAX]\n\n Operand::Indirect(\n\n random_reg_of_size_no_stack(addr_size),\n\n Some(size), None)\n\n },\n\n 1 => { // Indirect Displaced - [EAX+5]\n\n Operand::IndirectDisplaced(\n\n random_reg_of_size_no_stack(addr_size),\n\n (rand::random::<u32>() as u64) & 0x7FFFFFFF,\n\n Some(size), None)\n\n },\n\n 2 => { // Indirect Scaled Indexed - [EAX+EBX*2]\n\n Operand::IndirectScaledIndexed(\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_of_size_no_stack(addr_size),\n\n random_reg_scale(),\n\n Some(size), None)\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 11, "score": 210642.13609289666 }, { "content": "fn random_imm(size: OperandSize) -> Operand {\n\n let mut gen = rand::thread_rng();\n\n match size {\n\n OperandSize::Byte => Operand::Literal8(gen.gen_range::<u8>(0, 128)),\n\n OperandSize::Word => Operand::Literal16(\n\n gen.gen_range::<u16>(u8::max_value() as u16 + 1, i16::max_value() as u16)),\n\n OperandSize::Dword => Operand::Literal32(\n\n gen.gen_range::<u32>(u16::max_value() as u32 + 1, i32::max_value() as u32)),\n\n OperandSize::Qword => Operand::Literal64(\n\n gen.gen_range::<u64>(u32::max_value() as u64 + 1, i64::max_value() as u64)),\n\n OperandSize::Far16 => Operand::MemoryAndSegment16(rand::random(), rand::random()),\n\n OperandSize::Far32 => Operand::MemoryAndSegment32(rand::random(), rand::random()),\n\n _ => panic!(\"Invalid immediate value size: {:?}.\", size)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 12, "score": 207766.85809683916 }, { "content": "fn random_fixed(fixed_op: FixedOperand) -> Operand {\n\n match fixed_op {\n\n FixedOperand::Reg(reg) => Operand::Direct(reg),\n\n FixedOperand::Constant(c) => Operand::Literal8(c as u8)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 13, "score": 205977.06770755706 }, { "content": "#[test]\n\nfn operand_type_mask_reg() {\n\n decode_helper(\n\n &vec![0x62, 0xF1, 0xED, 0x28, 0xC2, 0xDB, 0x05],\n\n Mode::Long,\n\n &Instruction::new4(\n\n Mnemonic::VCMPPD,\n\n Operand::Direct(Reg::K3),\n\n Operand::Direct(Reg::YMM2),\n\n Operand::Direct(Reg::YMM3),\n\n Operand::Literal8(5),\n\n ),\n\n ); // VCMPPD K3, YMM2, YMM3, 5\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 14, "score": 201370.6497836186 }, { "content": "fn instr_token_to_operand_type(token: &InstructionToken) -> (OperandType, Option<OperandSize>) {\n\n match *token {\n\n InstructionToken::Reg(reg_type, op_size)\n\n => (OperandType::Reg(reg_type), Some(op_size)),\n\n InstructionToken::Mem(op_size)\n\n => (OperandType::Mem(Some(op_size)), Some(op_size)),\n\n InstructionToken::Imm(op_size)\n\n => (OperandType::Imm, Some(op_size)),\n\n InstructionToken::Bcst(bcst_size)\n\n => (OperandType::Bcst(bcst_size), None),\n\n InstructionToken::Rel(op_size)\n\n => (OperandType::Rel(op_size), Some(op_size)),\n\n InstructionToken::Offset(op_size)\n\n => (OperandType::Offset, Some(op_size)),\n\n InstructionToken::FixedReg(reg)\n\n => (OperandType::Fixed(FixedOperand::Reg(reg)), Some(reg.size())),\n\n InstructionToken::Constant(val)\n\n => (OperandType::Fixed(FixedOperand::Constant(val)), Some(OperandSize::Unsized)),\n\n InstructionToken::Mib\n\n => (OperandType::Mib, Some(OperandSize::Unsized)),\n\n _ => panic!(\"Unsupported type: {:?}\", *token)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 15, "score": 200097.0847270127 }, { "content": "fn compare_sizes(a: &[Option<OperandSize>; 4], b: &[Option<OperandSize>; 4]) -> bool {\n\n a.iter()\n\n .zip(b.iter())\n\n .all(|(m_s1, m_s2)| match (*m_s1, *m_s2) {\n\n (Some(s1), Some(s2)) => {\n\n s1 == s2 || s1 == OperandSize::Unsized || s2 == OperandSize::Unsized\n\n }\n\n (None, None) => true,\n\n _ => false,\n\n })\n\n}\n\n\n", "file_path": "src/instruction_def.rs", "rank": 16, "score": 192845.6586978265 }, { "content": "fn random_zmm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::ZMM0, Reg::ZMM1, Reg::ZMM2, Reg::ZMM3, Reg::ZMM4, Reg::ZMM5, Reg::ZMM6, Reg::ZMM7,\n\n Reg::ZMM8, Reg::ZMM9, Reg::ZMM10, Reg::ZMM11, Reg::ZMM12, Reg::ZMM13, Reg::ZMM14, Reg::ZMM15,\n\n Reg::ZMM16, Reg::ZMM17, Reg::ZMM18, Reg::ZMM19, Reg::ZMM20, Reg::ZMM21, Reg::ZMM22, Reg::ZMM23,\n\n Reg::ZMM24, Reg::ZMM25, Reg::ZMM26, Reg::ZMM27, Reg::ZMM28, Reg::ZMM29, Reg::ZMM30, Reg::ZMM31\n\n ]) } else { random_of(&[\n\n Reg::ZMM0, Reg::ZMM1, Reg::ZMM2, Reg::ZMM3, Reg::ZMM4, Reg::ZMM5, Reg::ZMM6, Reg::ZMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 17, "score": 191565.45545156102 }, { "content": "fn random_ymm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::YMM0, Reg::YMM1, Reg::YMM2, Reg::YMM3, Reg::YMM4, Reg::YMM5, Reg::YMM6, Reg::YMM7,\n\n Reg::YMM8, Reg::YMM9, Reg::YMM10, Reg::YMM11, Reg::YMM12, Reg::YMM13, Reg::YMM14, Reg::YMM15,\n\n Reg::YMM16, Reg::YMM17, Reg::YMM18, Reg::YMM19, Reg::YMM20, Reg::YMM21, Reg::YMM22, Reg::YMM23,\n\n Reg::YMM24, Reg::YMM25, Reg::YMM26, Reg::YMM27, Reg::YMM28, Reg::YMM29, Reg::YMM30, Reg::YMM31\n\n ]) } else { random_of(&[\n\n Reg::YMM0, Reg::YMM1, Reg::YMM2, Reg::YMM3, Reg::YMM4, Reg::YMM5, Reg::YMM6, Reg::YMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 18, "score": 191565.45545156102 }, { "content": "fn random_xmm_reg(use_all: bool) -> Reg { \n\n if use_all { random_of(&[\n\n Reg::XMM0, Reg::XMM1, Reg::XMM2, Reg::XMM3, Reg::XMM4, Reg::XMM5, Reg::XMM6, Reg::XMM7,\n\n Reg::XMM8, Reg::XMM9, Reg::XMM10, Reg::XMM11, Reg::XMM12, Reg::XMM13, Reg::XMM14, Reg::XMM15,\n\n Reg::XMM16, Reg::XMM17, Reg::XMM18, Reg::XMM19, Reg::XMM20, Reg::XMM21, Reg::XMM22, Reg::XMM23,\n\n Reg::XMM24, Reg::XMM25, Reg::XMM26, Reg::XMM27, Reg::XMM28, Reg::XMM29, Reg::XMM30, Reg::XMM31\n\n ]) } else { random_of(&[\n\n Reg::XMM0, Reg::XMM1, Reg::XMM2, Reg::XMM3, Reg::XMM4, Reg::XMM5, Reg::XMM6, Reg::XMM7,\n\n ]) }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 19, "score": 191565.45545156102 }, { "content": "fn make_operand_combinations(instr: &InstructionDefinition) -> Vec<Vec<OperandDefinition>> {\n\n let set_parts = instr.operands.iter().by_ref().filter_map(\n\n |maybe_op| maybe_op.as_ref().and_then(\n\n |op| if let OperandType::Set(ref items) = op.op_type {\n\n if instr.mnemonic.find(\"CVT\").is_none() {\n\n Some(items.clone())\n\n } else {\n\n Some(items.iter().filter(|i| if let OperandType::Bcst(_) = **i { false }\n\n else { true }).cloned().collect())\n\n }\n\n } else { None }\n\n )\n\n ).next();\n\n if let Some(parts) = set_parts { \n\n parts.iter().map(|part| instr.operands.iter().filter_map(\n\n |maybe_op| maybe_op.as_ref().map(|op| if let OperandType::Set(_) = op.op_type {\n\n OperandDefinition {\n\n encoding: op.encoding,\n\n access: op.access,\n\n size: op.size,\n\n op_type: part.clone()\n\n }\n\n } else { op.clone() }\n\n )).collect()).collect()\n\n } else {\n\n vec![instr.operands.iter().filter_map(|x| x.as_ref()).cloned().collect()]\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 20, "score": 185780.6062209554 }, { "content": "fn generate_implied_encoding(op_type: &OperandType, has_embedded_reg: bool) \n\n -> (OperandEncoding, OperandAccess) {\n\n match *op_type { // TODO Better implied operand access\n\n OperandType::Reg(reg_type) => {\n\n (OperandEncoding::ModRmRm, OperandAccess::Read)\n\n },\n\n OperandType::Mem(_) => (OperandEncoding::ModRmRm, OperandAccess::Read),\n\n OperandType::Imm => (OperandEncoding::Imm, OperandAccess::Read),\n\n OperandType::Constant => (OperandEncoding::Fixed, OperandAccess::Read),\n\n OperandType::Offset => (OperandEncoding::Imm, OperandAccess::Read),\n\n OperandType::Rel(_) => (OperandEncoding::Imm, OperandAccess::Read),\n\n OperandType::Mib => (OperandEncoding::ModRmRm, OperandAccess::Read),\n\n OperandType::Bcst(_) => (OperandEncoding::ModRmRm, OperandAccess::Read),\n\n OperandType::Fixed(_) => (OperandEncoding::Fixed, OperandAccess::Read),\n\n OperandType::Set(_) => (OperandEncoding::ModRmRm, OperandAccess::Read)\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 21, "score": 176909.59031000725 }, { "content": "fn get_op_sizes(def: &InstructionDefinition, instr: &Instruction) -> [Option<OperandSize>; 4] {\n\n let ops = instr.operands();\n\n let mut iter = def.operands.iter().zip(ops.iter()).map(|(def, op)| {\n\n def.as_ref()\n\n .map(|d| op.map_or(d.size, |o| d.get_real_size(&o)))\n\n });\n\n [\n\n iter.next().unwrap_or(None),\n\n iter.next().unwrap_or(None),\n\n iter.next().unwrap_or(None),\n\n iter.next().unwrap_or(None),\n\n ]\n\n}\n\n\n", "file_path": "src/instruction_def.rs", "rank": 22, "score": 169734.7716982148 }, { "content": "fn random_debug_reg() -> Reg { random_of(&[Reg::DR0, Reg::DR1, Reg::DR2, Reg::DR3, Reg::DR4, Reg::DR5]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 23, "score": 169042.74534991494 }, { "content": "fn random_segment_reg() -> Reg { random_of(&[Reg::CS, Reg::DS, Reg::ES, Reg::FS, Reg::GS, Reg::SS]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 24, "score": 169042.74534991494 }, { "content": "fn random_control_reg() -> Reg { random_of(&[Reg::CR0, Reg::CR1, Reg::CR2, Reg::CR3, Reg::CR4 ]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 25, "score": 167469.63867650472 }, { "content": "fn instr_tokens_to_operand<'a, I>(tokens: I, enc_info: Option<(OperandEncoding, OperandAccess)>,\n\n has_embedded_reg: bool) -> OperandDefinition where I : Iterator<Item=&'a InstructionToken> {\n\n let pairs: Vec<(OperandType, Option<OperandSize>)> = \n\n tokens.map(instr_token_to_operand_type).collect();\n\n let size = pairs.iter().filter_map(|p| p.1).filter(|s| *s != OperandSize::Unsized).next()\n\n .unwrap_or(OperandSize::Unsized);\n\n let op_type = if pairs.len() == 1 {\n\n pairs[0].0.clone()\n\n } else {\n\n OperandType::Set(pairs.iter().map(|i| i.0.clone()).collect())\n\n };\n\n\n\n let (enc, access) = enc_info.unwrap_or_else(\n\n || generate_implied_encoding(&op_type, has_embedded_reg));\n\n\n\n OperandDefinition {\n\n encoding: enc,\n\n access: access,\n\n size: size,\n\n op_type: op_type\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/main.rs", "rank": 26, "score": 166207.4125774673 }, { "content": "fn random_bound_reg() -> Reg { random_of(&[Reg::BND0, Reg::BND1, Reg::BND2, Reg::BND3]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 27, "score": 165368.3111783985 }, { "content": "fn parse_operand_parts_final(parts: Vec<InstructionToken>) -> InstructionToken {\n\n if parts.len() == 1 { parts.into_iter().next().unwrap() }\n\n else { InstructionToken::Set(parts) }\n\n}\n\n\n\nnamed!(type_suffix, complete!(alt_complete!(\n\n tag!(\"fp\") | \n\n tag!(\"int\") |\n\n tag!(\"dec\") |\n\n tag!(\"bcd\")\n\n)));\n\n\n\nnamed!(parse_operand_part<InstructionToken>, alt_complete!(\n\n tag!(\"imm8/r\") => { |_| InstructionToken::Imm(OperandSize::Byte) } |\n\n do_parse!(tag!(\"rel\") >> size: parse_size >> (make_sized(size, |s| InstructionToken::Rel(s))) ) |\n\n do_parse!(tag!(\"r/m\") >> size: parse_size >> (make_rm(size, RegType::General)) ) |\n\n do_parse!(tag!(\"imm\") >> size: parse_size >> (make_sized(size, |s| InstructionToken::Imm(s))) ) |\n\n do_parse!(tag!(\"moffs\") >> size: parse_size >> (make_sized(size,\n\n |s| InstructionToken::Offset(s))) ) |\n\n tag!(\"Sreg\") => { |_| InstructionToken::Reg(RegType::Segment, OperandSize::Word) } |\n", "file_path": "gen_defs/src/main.rs", "rank": 28, "score": 164802.63863106494 }, { "content": "fn random_reg_8() -> Reg { random_of(&[Reg::BL, Reg::CL, Reg::DL]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 29, "score": 162900.2938423208 }, { "content": "fn build_test_instructions(def: &InstructionDefinition, addr_size: OperandSize) -> Vec<Instruction> {\n\n let op_combinations = make_operand_combinations(def);\n\n op_combinations.into_iter().filter(filter_op_combination)\n\n .map(|op_c| build_test_instruction(def, op_c, addr_size)).collect()\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 30, "score": 162652.4451438027 }, { "content": "fn encode(instr: &Instruction, def: &InstructionDefinition, addr_size: OperandSize)\n\n -> io::Result<Vec<u8>> {\n\n // Write instruction to file\n\n let mut test_file = File::create(\"test.s\")?;\n\n write!(test_file, \".intel_syntax noprefix\\n\")?;\n\n write!(test_file, \".code{}\\n\", match addr_size {\n\n OperandSize::Word => \"16\",\n\n OperandSize::Dword => \"32\",\n\n OperandSize::Qword => \"64\",\n\n _ => panic!(\"Invalid addressing size.\")\n\n })?;\n\n write_instruction(instr, def, &mut test_file)?;\n\n write!(test_file, \"\\n\")?;\n\n\n\n // Run assembler\n\n let as_result = Command::new(\"as\")\n\n .args(&[\"test.s\", \"-o\", \"test.out\"])\n\n .spawn()?\n\n .wait()?;\n\n if !as_result.success() {\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 31, "score": 162273.9636502332 }, { "content": "fn build_test_operand(instr: &mut Instruction, instr_def: &InstructionDefinition,\n\n def: &OperandDefinition, addr_size: OperandSize) -> Operand {\n\n match def.op_type {\n\n OperandType::Reg(reg_type) =>\n\n Operand::Direct(random_reg(reg_type, def.size, addr_size, instr_def)),\n\n OperandType::Mem(size) => random_mem(size.unwrap_or(def.size), addr_size),\n\n OperandType::Imm => random_imm(def.size),\n\n OperandType::Offset => Operand::Offset(rand_value_of_size(def.size), Some(def.size), None),\n\n OperandType::Rel(op_size) => random_imm(op_size), // TODO Is this correct?\n\n OperandType::Mib => random_mib(def.size, addr_size),\n\n OperandType::Bcst(bcst_size) => random_mem(bcst_size, addr_size),\n\n OperandType::Fixed(fixed_op) => random_fixed(fixed_op),\n\n OperandType::Constant => unimplemented!(), // TODO What is this?\n\n _ => unreachable!() // Set(_) should be split apart already\n\n }\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 32, "score": 162273.9636502332 }, { "content": "fn build_test_instruction(def: &InstructionDefinition, op_defs: Vec<OperandDefinition>,\n\n addr_size: OperandSize) -> Instruction {\n\n\n\n let mut instr = Instruction {\n\n mnemonic: def.mnemonic.clone(),\n\n .. Default::default()\n\n };\n\n\n\n let first_op_not_mem = op_defs.iter().next().map(|o| !o.op_type.is_mem()).unwrap_or(true);\n\n if def.allow_mask && first_op_not_mem { instr.mask = Some(random_mask()); }\n\n if def.allow_merge_mode && first_op_not_mem { instr.merge_mode = Some(MergeMode::Zero) }\n\n\n\n if op_defs.iter().all(|d| !d.op_type.is_mem()) {\n\n if def.allow_rounding & op_defs.iter().all(\n\n |op_def| if let OperandType::Reg(_) = op_def.op_type { true } else { false })\n\n { instr.rounding_mode = Some(random_rounding_mode()); }\n\n else if def.allow_sae { instr.sae = true; }\n\n }\n\n\n\n let broadcast_size = op_defs.iter().filter_map(\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 33, "score": 159824.04682653173 }, { "content": "fn size_seg_helper<F, W: Write>(f: &mut W, size: Option<OperandSize>, seg: Option<SegmentReg>,\n\n action: F) -> io::Result<()> where F: Fn(&mut W) -> io::Result<()> {\n\n write_size(f, size)?;\n\n write_seg(f, seg)?;\n\n action(f)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 34, "score": 157747.55135516648 }, { "content": "fn run_test(instr: &Instruction, expected: &[u8], addr_size: OperandSize) {\n\n let mut buffer = Cursor::new(Vec::new());\n\n instr\n\n .encode(&mut buffer, Mode::from_size(addr_size).unwrap())\n\n .expect(\"Encoding failed\");\n\n if &buffer.get_ref()[..] != expected {\n\n println!(\"Test failed.\");\n\n print!(\"Output: [\");\n\n output_hex_array(buffer.get_ref());\n\n println!(\"]\");\n\n print!(\"Expected: [\");\n\n output_hex_array(expected);\n\n println!(\"]\");\n\n panic!(\n\n \"Failure. Mode: {:?}.\\nInstruction: {:?}.\\n\",\n\n addr_size, instr\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/test/mod.rs", "rank": 35, "score": 157115.87695852754 }, { "content": "fn make_sized<F>(size: OperandSize, constructor: F) -> InstructionToken\n\n where F: Fn(OperandSize) -> InstructionToken {\n\n constructor(size)\n\n}\n\n \n\nnamed!(parse_opcode<Vec<OpcodeToken>>, many1!(parse_opcode_token));\n\n\n\nnamed!(parse_opcode_token<OpcodeToken>, ws!(alt_complete!(\n\n tag!(\"NP\") => { |_| OpcodeToken::NoPrefix } |\n\n alt_complete!(\n\n tag_no_case!(\"REX.W + \") |\n\n tag_no_case!(\"REX.W\" )\n\n ) => { |_| OpcodeToken::RexW } |\n\n alt_complete!(\n\n tag!(\"REX.R + \") | // TODO?\n\n tag!(\"REX + \") |\n\n tag!(\"REX\")\n\n ) => { |_| OpcodeToken::Rex } |\n\n alt_complete!(tag!(\"ib\") | tag!(\"imm8\")) => { |_| OpcodeToken::Imm(OperandSize::Byte) } |\n\n tag!(\"iw\") => { |_| OpcodeToken::Imm(OperandSize::Word) } |\n", "file_path": "gen_defs/src/main.rs", "rank": 36, "score": 154389.40443161488 }, { "content": "fn get_operand_size_prefix(behavior: OperandSizePrefixBehavior, mode: Mode) -> bool {\n\n match behavior {\n\n OperandSizePrefixBehavior::Always => true,\n\n OperandSizePrefixBehavior::Never => false,\n\n OperandSizePrefixBehavior::RealOnly => mode == Mode::Real,\n\n OperandSizePrefixBehavior::NotReal => mode != Mode::Real,\n\n }\n\n}\n\n\n", "file_path": "src/encoding.rs", "rank": 37, "score": 151618.58642173297 }, { "content": "use instruction::{Reg, RegScale, SegmentReg};\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug, Copy)]\n\npub enum Operand {\n\n Direct(Reg),\n\n Indirect(Reg, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectDisplaced(Reg, u64, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectScaledIndexed(Reg, Reg, RegScale, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectScaledIndexedDisplaced(\n\n Reg,\n\n Reg,\n\n RegScale,\n\n u64,\n\n Option<OperandSize>,\n\n Option<SegmentReg>,\n\n ),\n\n IndirectScaledDisplaced(Reg, RegScale, u64, Option<OperandSize>, Option<SegmentReg>),\n\n Memory(u64, Option<OperandSize>, Option<SegmentReg>),\n\n Offset(u64, Option<OperandSize>, Option<SegmentReg>),\n\n Literal8(u8),\n", "file_path": "src/operand.rs", "rank": 38, "score": 148979.25732576803 }, { "content": " match *self {\n\n Operand::Direct(reg) => reg.is_sse(),\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_avx(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_avx(),\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_far_pointer(&self) -> bool {\n\n match *self {\n\n Operand::MemoryAndSegment16(..) | Operand::MemoryAndSegment32(..) => true,\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "src/operand.rs", "rank": 39, "score": 148973.2114401971 }, { "content": " Operand::Direct(reg) => reg.is_fpu(),\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_flags(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_flags(),\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_mmx(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_mmx(),\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_sse(&self) -> bool {\n", "file_path": "src/operand.rs", "rank": 40, "score": 148972.92572047026 }, { "content": " Operand::Literal32(_) => Some(OperandSize::Dword),\n\n Operand::Literal64(_) => Some(OperandSize::Qword),\n\n Operand::MemoryAndSegment16(..) | Operand::MemoryAndSegment32(..) => None, // TODO?\n\n }\n\n }\n\n\n\n pub fn is_scaled_indexed(&self) -> bool {\n\n match *self {\n\n Operand::IndirectScaledIndexed(..) | Operand::IndirectScaledIndexedDisplaced(..) => {\n\n true\n\n }\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn segment_reg(&self) -> Option<SegmentReg> {\n\n match *self {\n\n Operand::Indirect(_, _, seg)\n\n | Operand::IndirectDisplaced(_, _, _, seg)\n\n | Operand::IndirectScaledIndexed(_, _, _, _, seg)\n", "file_path": "src/operand.rs", "rank": 41, "score": 148972.435815647 }, { "content": " Literal16(u16),\n\n Literal32(u32),\n\n Literal64(u64),\n\n MemoryAndSegment16(u16, u16),\n\n MemoryAndSegment32(u16, u32),\n\n}\n\n\n\nimpl Operand {\n\n pub fn size(&self) -> Option<OperandSize> {\n\n match *self {\n\n Operand::Direct(reg) => Some(reg.size()),\n\n Operand::Indirect(_, size, _)\n\n | Operand::IndirectDisplaced(_, _, size, _)\n\n | Operand::IndirectScaledIndexed(_, _, _, size, _)\n\n | Operand::IndirectScaledIndexedDisplaced(_, _, _, _, size, _)\n\n | Operand::IndirectScaledDisplaced(_, _, _, size, _)\n\n | Operand::Memory(_, size, _)\n\n | Operand::Offset(_, size, _) => size,\n\n Operand::Literal8(_) => Some(OperandSize::Byte),\n\n Operand::Literal16(_) => Some(OperandSize::Word),\n", "file_path": "src/operand.rs", "rank": 42, "score": 148972.07722323175 }, { "content": " pub fn bits(&self) -> u32 {\n\n match *self {\n\n OperandSize::Unsized => 0, // TODO?\n\n OperandSize::Byte => 8,\n\n OperandSize::Word => 16,\n\n OperandSize::Dword => 32,\n\n OperandSize::Far16 => 32,\n\n OperandSize::Fword => 48,\n\n OperandSize::Far32 => 48,\n\n OperandSize::Qword => 64,\n\n OperandSize::Tbyte => 80,\n\n OperandSize::Far64 => 80,\n\n OperandSize::Xmmword => 128,\n\n OperandSize::Ymmword => 256,\n\n OperandSize::Zmmword => 512,\n\n }\n\n }\n\n\n\n pub fn from_bits(bits: u32) -> Option<OperandSize> {\n\n Some(match bits {\n", "file_path": "src/operand.rs", "rank": 43, "score": 148967.3260113847 }, { "content": " }\n\n\n\n pub fn is_memory(&self) -> bool {\n\n match *self {\n\n Operand::Indirect(..)\n\n | Operand::IndirectDisplaced(..)\n\n | Operand::IndirectScaledIndexed(..)\n\n | Operand::IndirectScaledIndexedDisplaced(..)\n\n | Operand::IndirectScaledDisplaced(..)\n\n | Operand::Memory(..)\n\n | Operand::Offset(..) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_fixed_memory(&self) -> bool {\n\n match *self {\n\n Operand::Memory(..) | Operand::Offset(..) | Operand::MemoryAndSegment16(..) => true,\n\n Operand::MemoryAndSegment32(..) => true,\n\n _ => false,\n", "file_path": "src/operand.rs", "rank": 44, "score": 148967.10099164132 }, { "content": " 8 => OperandSize::Byte,\n\n 16 => OperandSize::Word,\n\n 32 => OperandSize::Dword,\n\n 48 => OperandSize::Fword,\n\n 64 => OperandSize::Qword,\n\n 80 => OperandSize::Tbyte,\n\n 128 => OperandSize::Xmmword,\n\n 256 => OperandSize::Ymmword,\n\n 512 => OperandSize::Zmmword,\n\n 0 => OperandSize::Unsized,\n\n _ => return None,\n\n })\n\n }\n\n\n\n pub fn is_valid_literal(&self, v: u64) -> bool {\n\n let signed = v as i64; // TODO?\n\n match *self {\n\n OperandSize::Byte => {\n\n signed >= i8::min_value() as i64 && signed <= i8::max_value() as i64\n\n }\n", "file_path": "src/operand.rs", "rank": 45, "score": 148966.88905043798 }, { "content": " }\n\n }\n\n\n\n pub fn is_far(&self) -> bool {\n\n match *self {\n\n Operand::MemoryAndSegment16(..) => true,\n\n Operand::MemoryAndSegment32(..) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_literal(&self) -> bool {\n\n match *self {\n\n Operand::Literal8(_)\n\n | Operand::Literal16(_)\n\n | Operand::Literal32(_)\n\n | Operand::Literal64(_) => true,\n\n _ => false,\n\n }\n\n }\n", "file_path": "src/operand.rs", "rank": 46, "score": 148966.66128331324 }, { "content": " | Operand::IndirectScaledIndexedDisplaced(_, _, _, _, _, seg)\n\n | Operand::IndirectScaledDisplaced(_, _, _, _, seg)\n\n | Operand::Memory(_, _, seg)\n\n | Operand::Offset(_, _, seg) => seg,\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn is_direct(&self) -> bool {\n\n match *self {\n\n Operand::Direct(..) => true,\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_offset(&self) -> bool {\n\n match *self {\n\n Operand::Offset(..) => true,\n\n _ => false,\n\n }\n", "file_path": "src/operand.rs", "rank": 47, "score": 148966.24534807052 }, { "content": "\n\n pub fn get_literal(&self) -> Option<u64> {\n\n match *self {\n\n Operand::Literal8(val) => Some(val as u64),\n\n Operand::Literal16(val) => Some(val as u64),\n\n Operand::Literal32(val) => Some(val as u64),\n\n Operand::Literal64(val) => Some(val),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn is_general(&self) -> bool {\n\n match *self {\n\n Operand::Direct(r) => r.is_general(),\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn is_fpu(&self) -> bool {\n\n match *self {\n", "file_path": "src/operand.rs", "rank": 48, "score": 148965.59864181123 }, { "content": " OperandSize::Word => {\n\n signed >= i16::min_value() as i64 && signed <= i16::max_value() as i64\n\n }\n\n OperandSize::Dword => {\n\n signed >= i32::min_value() as i64 && signed <= i32::max_value() as i64\n\n }\n\n OperandSize::Qword => {\n\n signed >= i64::min_value() as i64 && signed <= i64::max_value() as i64\n\n }\n\n _ => false,\n\n }\n\n }\n\n}\n", "file_path": "src/operand.rs", "rank": 49, "score": 148963.74247508022 }, { "content": "\n\n#[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)]\n\npub enum OperandSize {\n\n // Order here is important because of derive(Ord)\n\n Unsized,\n\n Byte, // 8-bit\n\n Word, // 16-bit\n\n Dword, // 32-bit\n\n Far16, // 16:16\n\n Fword, // 48-bit\n\n Far32, // 16:32\n\n Qword, // 64-bit\n\n Tbyte, // 80-bit\n\n Far64, // 16:64\n\n Xmmword, // 128-bit\n\n Ymmword, // 256-bit\n\n Zmmword, // 512-bit\n\n}\n\n\n\nimpl OperandSize {\n", "file_path": "src/operand.rs", "rank": 50, "score": 148961.8857754559 }, { "content": "fn write_test<W: Write>(instr: &Instruction, encoded: &[u8], addr_size: OperandSize,\n\n writer: &mut W, test_count: &mut HashMap<String, u32>) -> io::Result<()> {\n\n let test_num = test_count.entry(instr.mnemonic.clone()).or_insert(0);\n\n *test_num += 1;\n\n\n\n write!(writer, \"#[test]\\nfn {}_{}() {{\\n run_test(&{:?}, &{:?}, {:?})\\n}}\\n\\n\",\n\n instr.mnemonic.to_lowercase(), test_num, instr, encoded, addr_size)\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 51, "score": 146749.65030305646 }, { "content": "use ::instruction::{Reg, RegScale, SegmentReg};\n\n\n\n#[derive(PartialEq, Eq, Clone, Debug, Copy)]\n\npub enum Operand {\n\n Direct(Reg),\n\n Indirect(Reg, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectDisplaced(Reg, u64, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectScaledIndexed(Reg, Reg, RegScale, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectScaledIndexedDisplaced(Reg, Reg, RegScale, u64, Option<OperandSize>, Option<SegmentReg>),\n\n IndirectScaledDisplaced(Reg, RegScale, u64, Option<OperandSize>, Option<SegmentReg>),\n\n Memory(u64, Option<OperandSize>, Option<SegmentReg>),\n\n Offset(u64, Option<OperandSize>, Option<SegmentReg>),\n\n Literal8(u8),\n\n Literal16(u16),\n\n Literal32(u32),\n\n Literal64(u64),\n\n MemoryAndSegment16(u16, u16),\n\n MemoryAndSegment32(u16, u32),\n\n}\n\n\n", "file_path": "gen_defs/src/operand.rs", "rank": 52, "score": 142723.24425068015 }, { "content": " }\n\n \n\n pub fn is_mmx(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_mmx(),\n\n _ => false\n\n }\n\n }\n\n \n\n pub fn is_sse(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_sse(),\n\n _ => false\n\n }\n\n }\n\n \n\n pub fn is_avx(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_avx(),\n\n _ => false\n", "file_path": "gen_defs/src/operand.rs", "rank": 53, "score": 142717.8336514773 }, { "content": "\n\n pub fn is_general(&self) -> bool {\n\n match *self {\n\n Operand::Direct(r) => r.is_general(),\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_fpu(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_fpu(),\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_flags(&self) -> bool {\n\n match *self {\n\n Operand::Direct(reg) => reg.is_flags(),\n\n _ => false\n\n }\n", "file_path": "gen_defs/src/operand.rs", "rank": 54, "score": 142717.52715914403 }, { "content": "impl Operand {\n\n pub fn size(&self) -> Option<OperandSize> {\n\n match *self {\n\n Operand::Direct(reg) => Some(reg.size()),\n\n Operand::Indirect(_, size, _) |\n\n Operand::IndirectDisplaced(_, _, size, _) |\n\n Operand::IndirectScaledIndexed(_, _, _, size, _) |\n\n Operand::IndirectScaledIndexedDisplaced(_, _, _, _, size, _) |\n\n Operand::IndirectScaledDisplaced(_, _, _, size, _) |\n\n Operand::Memory(_, size, _) |\n\n Operand::Offset(_, size, _)\n\n => size,\n\n Operand::Literal8(_) => Some(OperandSize::Byte),\n\n Operand::Literal16(_) => Some(OperandSize::Word),\n\n Operand::Literal32(_) => Some(OperandSize::Dword),\n\n Operand::Literal64(_) => Some(OperandSize::Qword),\n\n Operand::MemoryAndSegment16(..) |\n\n Operand::MemoryAndSegment32(..)\n\n => None // TODO?\n\n }\n", "file_path": "gen_defs/src/operand.rs", "rank": 55, "score": 142717.49898862652 }, { "content": " }\n\n\n\n pub fn is_scaled_indexed(&self) -> bool {\n\n match *self {\n\n Operand::IndirectScaledIndexed(..) |\n\n Operand::IndirectScaledIndexedDisplaced(..) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn segment_reg(&self) -> Option<SegmentReg> {\n\n match *self {\n\n Operand::Indirect(_, _, seg) |\n\n Operand::IndirectDisplaced(_, _, _, seg) |\n\n Operand::IndirectScaledIndexed(_, _, _, _, seg) |\n\n Operand::IndirectScaledIndexedDisplaced(_, _, _, _, _, seg) |\n\n Operand::IndirectScaledDisplaced(_, _, _, _, seg) |\n\n Operand::Memory(_, _, seg) |\n\n Operand::Offset(_, _, seg)\n\n => seg,\n", "file_path": "gen_defs/src/operand.rs", "rank": 56, "score": 142717.0212356353 }, { "content": " OperandSize::Qword => 64,\n\n OperandSize::Tbyte => 80,\n\n OperandSize::Far64 => 80,\n\n OperandSize::Xmmword => 128,\n\n OperandSize::Ymmword => 256,\n\n OperandSize::Zmmword => 512,\n\n }\n\n }\n\n\n\n pub fn from_bits(bits: u32) -> Option<OperandSize> {\n\n Some(match bits {\n\n 8 => OperandSize::Byte,\n\n 16 => OperandSize::Word,\n\n 32 => OperandSize::Dword,\n\n 48 => OperandSize::Fword,\n\n 64 => OperandSize::Qword,\n\n 80 => OperandSize::Tbyte,\n\n 128 => OperandSize::Xmmword,\n\n 256 => OperandSize::Ymmword,\n\n 512 => OperandSize::Zmmword,\n\n 0 => OperandSize::Unsized,\n\n _ => return None\n\n })\n\n }\n\n}\n", "file_path": "gen_defs/src/operand.rs", "rank": 57, "score": 142712.53206804473 }, { "content": " Operand::Indirect(..) |\n\n Operand::IndirectDisplaced(..) |\n\n Operand::IndirectScaledIndexed(..) |\n\n Operand::IndirectScaledIndexedDisplaced(..) |\n\n Operand::IndirectScaledDisplaced(..) |\n\n Operand::Memory(..) |\n\n Operand::Offset(..) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_fixed_memory(&self) -> bool {\n\n match *self {\n\n Operand::Memory(..) |\n\n Operand::Offset(..) |\n\n Operand::MemoryAndSegment16(..) => true,\n\n Operand::MemoryAndSegment32(..) => true,\n\n _ => false\n\n }\n\n }\n", "file_path": "gen_defs/src/operand.rs", "rank": 58, "score": 142712.24022779043 }, { "content": "\n\n pub fn is_literal(&self) -> bool {\n\n match *self {\n\n Operand::Literal8(_) |\n\n Operand::Literal16(_) |\n\n Operand::Literal32(_) |\n\n Operand::Literal64(_) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn get_literal(&self) -> Option<u64> {\n\n match *self {\n\n Operand::Literal8(val) => Some(val as u64),\n\n Operand::Literal16(val) => Some(val as u64),\n\n Operand::Literal32(val) => Some(val as u64),\n\n Operand::Literal64(val) => Some(val),\n\n _ => None\n\n }\n\n }\n", "file_path": "gen_defs/src/operand.rs", "rank": 59, "score": 142711.6965745186 }, { "content": " Fword, // 48-bit\n\n Far32, // 16:32\n\n Qword, // 64-bit\n\n Tbyte, // 80-bit\n\n Far64, // 16:64\n\n Xmmword, // 128-bit\n\n Ymmword, // 256-bit\n\n Zmmword, // 512-bit\n\n}\n\n\n\nimpl OperandSize {\n\n pub fn bits(&self) -> u32 {\n\n match *self {\n\n OperandSize::Unsized => 0, // TODO?\n\n OperandSize::Byte => 8,\n\n OperandSize::Word => 16,\n\n OperandSize::Dword => 32,\n\n OperandSize::Far16 => 32,\n\n OperandSize::Fword => 48,\n\n OperandSize::Far32 => 48,\n", "file_path": "gen_defs/src/operand.rs", "rank": 60, "score": 142711.57715564728 }, { "content": " }\n\n }\n\n\n\n pub fn is_far_pointer(&self) -> bool {\n\n match *self {\n\n Operand::MemoryAndSegment16(..) |\n\n Operand::MemoryAndSegment32(..) => true,\n\n _ => false\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd)]\n\npub enum OperandSize {\n\n // Order here is important because of derive(Ord)\n\n Unsized,\n\n Byte, // 8-bit\n\n Word, // 16-bit\n\n Dword, // 32-bit\n\n Far16, // 16:16\n", "file_path": "gen_defs/src/operand.rs", "rank": 61, "score": 142708.97863029726 }, { "content": " _ => None\n\n }\n\n }\n\n\n\n pub fn is_direct(&self) -> bool {\n\n match *self {\n\n Operand::Direct(..) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_offset(&self) -> bool {\n\n match *self {\n\n Operand::Offset(..) => true,\n\n _ => false\n\n }\n\n }\n\n\n\n pub fn is_memory(&self) -> bool {\n\n match *self {\n", "file_path": "gen_defs/src/operand.rs", "rank": 62, "score": 142708.88711108148 }, { "content": "pub fn emit_tests_helper(instr: &InstructionDefinition, addr_size: OperandSize, output_dir: &str,\n\n test_count: &mut HashMap<String, u32>) -> io::Result<()> {\n\n if should_skip_instr(instr) { return Ok(()); }\n\n\n\n let test_instrs = build_test_instructions(instr, addr_size);\n\n\n\n let enc_result: io::Result<Vec<Vec<u8>>> = \n\n test_instrs.iter().map(|i| encode(i, instr, addr_size)).collect();\n\n let bytes = enc_result?;\n\n let mut writer = OpenOptions::new()\n\n .append(true)\n\n .create(true)\n\n .open(format!(\"{}/{}\", output_dir, instr.mnemonic.to_lowercase()))\n\n .unwrap();\n\n\n\n test_instrs.iter().zip(bytes.iter()).fold(Ok(()),\n\n |res, (i, b)| res.and(write_test(i, b, addr_size, &mut writer, test_count)))\n\n}\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 63, "score": 142063.11425631508 }, { "content": "fn random_reg_16() -> Reg\n\n { random_of(&[Reg::BX, Reg::CX, Reg::DX, Reg::SI, Reg::DI, Reg::SP, Reg::BP]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 64, "score": 140822.9760655049 }, { "content": "fn random_reg_64() -> Reg\n\n { random_of(&[Reg::RBX, Reg::RCX, Reg::RDX, Reg::RSI, Reg::RDI, Reg::RSP, Reg::RBP]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 65, "score": 140822.9760655049 }, { "content": "fn random_reg_32() -> Reg\n\n { random_of(&[Reg::EBX, Reg::ECX, Reg::EDX, Reg::ESI, Reg::EDI, Reg::ESP, Reg::EBP]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 66, "score": 140822.9760655049 }, { "content": "fn random_fpu_reg() -> Reg\n\n { random_of(&[Reg::ST1, Reg::ST2, Reg::ST3, Reg::ST4, Reg::ST5, Reg::ST6, Reg::ST7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 67, "score": 139862.96414637 }, { "content": "fn random_mask_reg() -> Reg\n\n { random_of(&[Reg::K1, Reg::K2, Reg::K3, Reg::K4, Reg::K5, Reg::K6, Reg::K7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 68, "score": 139862.96414637 }, { "content": "fn random_reg_16_no_stack() -> Reg\n\n { random_of(&[Reg::AX, Reg::BX, Reg::CX, Reg::DX, Reg::SI, Reg::DI]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 69, "score": 139862.96414637 }, { "content": "fn random_mmx_reg() -> Reg\n\n { random_of(&[Reg::MM0, Reg::MM1, Reg::MM2, Reg::MM3, Reg::MM4, Reg::MM5, Reg::MM6, Reg::MM7]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 70, "score": 139862.96414637 }, { "content": "fn random_reg_64_no_stack() -> Reg\n\n { random_of(&[Reg::RAX, Reg::RBX, Reg::RCX, Reg::RDX, Reg::RSI, Reg::RDI]) }\n\n\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 71, "score": 139862.96414637 }, { "content": "fn random_reg_32_no_stack() -> Reg\n\n { random_of(&[Reg::EAX, Reg::EBX, Reg::ECX, Reg::EDX, Reg::ESI, Reg::EDI]) }\n", "file_path": "gen_defs/src/gen_tests.rs", "rank": 72, "score": 139862.96414637 }, { "content": "#[test]\n\nfn operand_type_b() {\n\n decode_helper(\n\n &vec![0xC4, 0xE3, 0x79, 0x32, 0xCA, 0x05],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::KSHIFTLB,\n\n Operand::Direct(Reg::K1),\n\n Operand::Direct(Reg::K2),\n\n Operand::Literal8(5),\n\n ),\n\n ); // KSHIFTLB K1, K2, 5\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 73, "score": 139760.6125409626 }, { "content": "#[test]\n\nfn operand_type_w() {\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Real,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n decode_helper(\n\n &vec![0xC8, 0x05, 0x00, 0x06],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ENTER,\n\n Operand::Literal16(0x5),\n\n Operand::Literal8(0x06),\n\n ),\n\n ); // ENTER 5, 6\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 74, "score": 139760.6125409626 }, { "content": "#[test]\n\nfn operand_type_p() {\n\n decode_helper(\n\n &vec![0x9A, 0xAB, 0x89, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::MemoryAndSegment32(0x0123, 0x456789AB),\n\n ),\n\n ); // CALL 0x0123:0x456789AB\n\n decode_helper(\n\n &vec![0x66, 0x9A, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::CALL, Operand::MemoryAndSegment16(0x0123, 0x4567)),\n\n ); // CALL 0x0123:0x4567\n\n decode_helper(\n\n &vec![0x9A, 0x67, 0x45, 0x23, 0x01],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::CALL, Operand::MemoryAndSegment16(0x0123, 0x4567)),\n\n ); // CALL 0x0123:0x4567\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 75, "score": 139760.6125409626 }, { "content": "#[test]\n\nfn operand_type_a() {\n\n decode_helper(\n\n &vec![0x66, 0x62, 0x00],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BOUND,\n\n Operand::Direct(Reg::AX),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // BOUND AX, [EAX]\n\n decode_helper(\n\n &vec![0x62, 0x00],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::BOUND,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Unsized), None),\n\n ),\n\n ); // BOUND EAX, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 76, "score": 139760.6125409626 }, { "content": "pub fn encode_operand(\n\n buffer: &mut InstructionBuffer,\n\n def: &OperandDefinition,\n\n op: &Option<Operand>,\n\n mode: Mode,\n\n addr_size: OperandSize,\n\n) -> Result<(), InstructionEncodingError> {\n\n if let OperandType::Fixed(_) = def.op_type {\n\n return Ok(());\n\n }\n\n\n\n match def.encoding {\n\n OperandEncoding::ModRmReg => {\n\n if let Some(Operand::Direct(reg)) = *op {\n\n buffer.mod_rm_reg = Some(reg.get_reg_code());\n\n\n\n if reg.needs_rex() && buffer.composite_prefix.is_none() {\n\n buffer.composite_prefix = Some(::instruction_buffer::CompositePrefix::Rex);\n\n }\n\n } else {\n", "file_path": "src/encoding.rs", "rank": 77, "score": 139760.6125409626 }, { "content": "#[test]\n\nfn operand_type_q() {\n\n decode_helper(\n\n &vec![0xFF, 0x20],\n\n Mode::Long,\n\n &Instruction::new1(\n\n Mnemonic::JMP,\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // JMP QWORD PTR [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 78, "score": 139760.6125409626 }, { "content": "#[test]\n\nfn operand_type_d() {\n\n decode_helper(\n\n &vec![0x0F, 0x6E, 0xD0],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::MOVD,\n\n Operand::Direct(Reg::MM2),\n\n Operand::Direct(Reg::EAX),\n\n ),\n\n ); // MOVD MM2, EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 79, "score": 139760.6125409626 }, { "content": "#[test]\n\nfn operand_type_v() {\n\n decode_helper(\n\n &vec![0x40],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::AX)),\n\n ); // INC AX\n\n decode_helper(\n\n &vec![0x66, 0x40],\n\n Mode::Real,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::EAX)),\n\n ); // INC EAX\n\n decode_helper(\n\n &vec![0x66, 0x40],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::AX)),\n\n ); // INC AX\n\n decode_helper(\n\n &vec![0x40],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::INC, Operand::Direct(Reg::EAX)),\n\n ); // INC EAX\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 80, "score": 139760.6125409626 }, { "content": "// Test decoding of the operand size prefix.\n\nfn operand_size_prefix() {\n\n decode_helper(\n\n &vec![0xC5, 0xE9, 0x58, 0x08],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::VADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // VADDPD XMM1, XMM2, [EAX]\n\n}\n\n\n\n#[test]\n", "file_path": "src/test/decode.rs", "rank": 81, "score": 136957.80468055952 }, { "content": "#[test]\n\nfn operand_type_er() {\n\n decode_helper(\n\n &vec![0xDB, 0x28],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FLD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // FLD TBYTE PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 82, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_ps() {\n\n decode_helper(\n\n &vec![0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDPS XMM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // ADDPS XMM1, [EAX]1\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 83, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_sd() {\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDSD XMM1, XMM2\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // ADDSD XMM1, [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 84, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_pi() {\n\n decode_helper(\n\n &vec![0x0F, 0x2A, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTPI2PS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::MM2),\n\n ),\n\n ); // CVTPI2PS XMM1, MM2\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 85, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_dr() {\n\n decode_helper(\n\n &vec![0xDC, 0x00],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FADD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // FADD QWORD PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 86, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_dq() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x38, 0x00, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::PSHUFB,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // PSHUFB XMM1, [EAX]\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x38, 0x00, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::PSHUFB,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // PSHUFB XMM1, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 87, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_bss() {\n\n decode_helper(\n\n &vec![0x6A, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::PUSH, Operand::Literal8(0x12)),\n\n ); // PUSH 0x12\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 88, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_pd() {\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDPD XMM1, XMM2\n\n decode_helper(\n\n &vec![0x66, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDPD,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Xmmword), None),\n\n ),\n\n ); // ADDPD XMM1, [EAX]1\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 89, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_psq() {\n\n decode_helper(\n\n &vec![0x0F, 0x2C, 0xCA],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTTPS2PI,\n\n Operand::Direct(Reg::MM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // CVTTPS2PI MM1, XMM2\n\n decode_helper(\n\n &vec![0x0F, 0x2C, 0x08],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CVTTPS2PI,\n\n Operand::Direct(Reg::MM1),\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Qword), None),\n\n ),\n\n ); // CVTTPS2PI MM1, [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 90, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_ds() {\n\n decode_helper(\n\n &vec![0xE8, 0x78, 0x56, 0x34, 0x12],\n\n Mode::Protected,\n\n &Instruction::new1(Mnemonic::CALL, Operand::Offset(0x12345678, None, None)),\n\n ); // CALL 0x12345678\n\n}\n\n\n\n// I've temporarily disabled this test as the decoding logic will need to lookahead in order to\n\n// distiguish between a standalone FWAIT instruction and an instruction prefixed with 0x9B.\n\n// #[test]\n\n// fn operand_type_e() {\n\n// decode_helper(&vec![0x9B, 0xD9, 0x30], Mode::Protected, &Instruction::new1(Mnemonic::FSTENV, Operand::Indirect(Reg::EAX, None, None))); // FSTENV [EAX]\n\n// }\n\n\n", "file_path": "src/test/decode.rs", "rank": 91, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_ptp() {\n\n decode_helper(\n\n &vec![0xFF, 0x10],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // CALL DWORD PTR [EAX]\n\n decode_helper(\n\n &vec![0xFF, 0x18],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Fword), None),\n\n ),\n\n ); // CALL FWORD PTR [EAX]\n\n\n\n // TODO I'm not 100% sure this is correct. It seems to be from the Intel docs, but GCC won't\n\n // seem to accept this form?\n\n decode_helper(\n\n &vec![0x48, 0xFF, 0x18],\n\n Mode::Long,\n\n &Instruction::new1(\n\n Mnemonic::CALL,\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // CALL TBYTE PTR [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 92, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_di() {\n\n decode_helper(\n\n &vec![0xDA, 0x00],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FIADD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // FIADD DWORD PTR [EAX}\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 93, "score": 136944.33809244708 }, { "content": "fn get_operand_addr_size(\n\n operand: &Option<Operand>,\n\n) -> Result<Option<OperandSize>, InstructionEncodingError> {\n\n operand\n\n .map(|op| match op {\n\n // TODO - Should offset/memory be here?\n\n Operand::Indirect(reg, ..)\n\n | Operand::IndirectScaledDisplaced(reg, ..)\n\n | Operand::IndirectDisplaced(reg, ..) => Ok(Some(reg.size())),\n\n Operand::IndirectScaledIndexed(base, index, ..)\n\n | Operand::IndirectScaledIndexedDisplaced(base, index, ..) => {\n\n let base_size = base.size();\n\n let index_size = index.size();\n\n if base_size == index_size {\n\n Ok(Some(base_size))\n\n } else {\n\n Err(InstructionEncodingError::InvalidAddressing)\n\n }\n\n }\n\n _ => Ok(None),\n\n })\n\n .unwrap_or(Ok(None))\n\n}\n\n\n", "file_path": "src/encoding.rs", "rank": 94, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_bs() {\n\n decode_helper(\n\n &vec![0x6B, 0xC3, 0x12],\n\n Mode::Protected,\n\n &Instruction::new3(\n\n Mnemonic::IMUL,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Direct(Reg::EBX),\n\n Operand::Literal8(0x12),\n\n ),\n\n ); // IMUL EAX, EBX, 0x12\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 95, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_ss() {\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x58, 0xCA],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Direct(Reg::XMM2),\n\n ),\n\n ); // ADDSS XMM1, XMM2\n\n decode_helper(\n\n &vec![0xF3, 0x0F, 0x58, 0x08],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::ADDSS,\n\n Operand::Direct(Reg::XMM1),\n\n Operand::Indirect(Reg::RAX, Some(OperandSize::Dword), None),\n\n ),\n\n ); // ADDSS XMM1, [RAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 96, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_qp() {\n\n decode_helper(\n\n &vec![0x48, 0xCF],\n\n Mode::Long,\n\n &Instruction::new0(Mnemonic::IRETQ),\n\n ); // IRETQ\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 97, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_dqp() {\n\n decode_helper(\n\n &vec![0xF2, 0x48, 0x0F, 0x38, 0xF0, 0xC0],\n\n Mode::Long,\n\n &Instruction::new2(\n\n Mnemonic::CRC32,\n\n Operand::Direct(Reg::RAX),\n\n Operand::Direct(Reg::AL),\n\n ),\n\n ); // CRC32 RAX, AL\n\n decode_helper(\n\n &vec![0xF2, 0x0F, 0x38, 0xF0, 0xC0],\n\n Mode::Protected,\n\n &Instruction::new2(\n\n Mnemonic::CRC32,\n\n Operand::Direct(Reg::EAX),\n\n Operand::Direct(Reg::AL),\n\n ),\n\n ); // CRC32 EAX, AL\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 98, "score": 136944.33809244708 }, { "content": "#[test]\n\nfn operand_type_bcd() {\n\n decode_helper(\n\n &vec![0xDF, 0x20],\n\n Mode::Protected,\n\n &Instruction::new1(\n\n Mnemonic::FBLD,\n\n Operand::Indirect(Reg::EAX, Some(OperandSize::Tbyte), None),\n\n ),\n\n ); // FBLD [EAX]\n\n}\n\n\n", "file_path": "src/test/decode.rs", "rank": 99, "score": 136944.33809244708 } ]
Rust
cargo/vendor/memmap-0.6.1/src/windows.rs
mfarrugi/cargo-raze-example-stdx
e4283e299d298cea9a534d0623cb4f3b54ef36f5
extern crate kernel32; extern crate winapi; use std::{io, mem, ptr}; use std::fs::File; use std::os::raw::c_void; use std::os::windows::io::{AsRawHandle, RawHandle}; pub struct MmapInner { file: Option<File>, ptr: *mut c_void, len: usize, copy: bool, } impl MmapInner { pub fn new( file: &File, protect: winapi::DWORD, access: winapi::DWORD, offset: usize, len: usize, copy: bool, ) -> io::Result<MmapInner> { let alignment = offset % allocation_granularity(); let aligned_offset = offset - alignment; let aligned_len = len + alignment; unsafe { let handle = kernel32::CreateFileMappingW( file.as_raw_handle(), ptr::null_mut(), protect, 0, 0, ptr::null(), ); if handle == ptr::null_mut() { return Err(io::Error::last_os_error()); } let ptr = kernel32::MapViewOfFile( handle, access, (aligned_offset >> 16 >> 16) as winapi::DWORD, (aligned_offset & 0xffffffff) as winapi::DWORD, aligned_len as winapi::SIZE_T, ); kernel32::CloseHandle(handle); if ptr == ptr::null_mut() { Err(io::Error::last_os_error()) } else { Ok(MmapInner { file: Some(file.try_clone()?), ptr: ptr.offset(alignment as isize), len: len as usize, copy: copy, }) } } } pub fn map(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let write = protection_supported(file.as_raw_handle(), winapi::PAGE_READWRITE); let exec = protection_supported(file.as_raw_handle(), winapi::PAGE_EXECUTE_READ); let mut access = winapi::FILE_MAP_READ; let protection = match (write, exec) { (true, true) => { access |= winapi::FILE_MAP_WRITE | winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_READWRITE } (true, false) => { access |= winapi::FILE_MAP_WRITE; winapi::PAGE_READWRITE } (false, true) => { access |= winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_READ } (false, false) => winapi::PAGE_READONLY, }; let mut inner = MmapInner::new(file, protection, access, offset, len, false)?; if write || exec { inner.make_read_only()?; } Ok(inner) } pub fn map_exec(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let write = protection_supported(file.as_raw_handle(), winapi::PAGE_READWRITE); let mut access = winapi::FILE_MAP_READ | winapi::FILE_MAP_EXECUTE; let protection = if write { access |= winapi::FILE_MAP_WRITE; winapi::PAGE_EXECUTE_READWRITE } else { winapi::PAGE_EXECUTE_READ }; let mut inner = MmapInner::new(file, protection, access, offset, len, false)?; if write { inner.make_exec()?; } Ok(inner) } pub fn map_mut(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let exec = protection_supported(file.as_raw_handle(), winapi::PAGE_EXECUTE_READ); let mut access = winapi::FILE_MAP_READ | winapi::FILE_MAP_WRITE; let protection = if exec { access |= winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_READWRITE } else { winapi::PAGE_READWRITE }; let mut inner = MmapInner::new(file, protection, access, offset, len, false)?; if exec { inner.make_mut()?; } Ok(inner) } pub fn map_copy(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let exec = protection_supported(file.as_raw_handle(), winapi::PAGE_EXECUTE_READWRITE); let mut access = winapi::FILE_MAP_COPY; let protection = if exec { access |= winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_WRITECOPY } else { winapi::PAGE_WRITECOPY }; let mut inner = MmapInner::new(file, protection, access, offset, len, true)?; if exec { inner.make_mut()?; } Ok(inner) } pub fn map_anon(len: usize, _stack: bool) -> io::Result<MmapInner> { unsafe { let handle = kernel32::CreateFileMappingW( winapi::INVALID_HANDLE_VALUE, ptr::null_mut(), winapi::PAGE_EXECUTE_READWRITE, (len >> 16 >> 16) as winapi::DWORD, (len & 0xffffffff) as winapi::DWORD, ptr::null(), ); if handle == ptr::null_mut() { return Err(io::Error::last_os_error()); } let access = winapi::FILE_MAP_ALL_ACCESS | winapi::FILE_MAP_EXECUTE; let ptr = kernel32::MapViewOfFile(handle, access, 0, 0, len as winapi::SIZE_T); kernel32::CloseHandle(handle); if ptr == ptr::null_mut() { return Err(io::Error::last_os_error()); } let mut old = 0; let result = kernel32::VirtualProtect( ptr, len as winapi::SIZE_T, winapi::PAGE_READWRITE, &mut old, ); if result != 0 { Ok(MmapInner { file: None, ptr: ptr, len: len as usize, copy: false, }) } else { Err(io::Error::last_os_error()) } } } pub fn flush(&self, offset: usize, len: usize) -> io::Result<()> { self.flush_async(offset, len)?; if let Some(ref file) = self.file { file.sync_data()?; } Ok(()) } pub fn flush_async(&self, offset: usize, len: usize) -> io::Result<()> { let result = unsafe { kernel32::FlushViewOfFile(self.ptr.offset(offset as isize), len as winapi::SIZE_T) }; if result != 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } fn virtual_protect(&mut self, protect: winapi::DWORD) -> io::Result<()> { unsafe { let alignment = self.ptr as usize % allocation_granularity(); let ptr = self.ptr.offset(-(alignment as isize)); let aligned_len = self.len as winapi::SIZE_T + alignment as winapi::SIZE_T; let mut old = 0; let result = kernel32::VirtualProtect(ptr, aligned_len, protect, &mut old); if result != 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } } pub fn make_read_only(&mut self) -> io::Result<()> { self.virtual_protect(winapi::PAGE_READONLY) } pub fn make_exec(&mut self) -> io::Result<()> { if self.copy { self.virtual_protect(winapi::PAGE_EXECUTE_WRITECOPY) } else { self.virtual_protect(winapi::PAGE_EXECUTE_READ) } } pub fn make_mut(&mut self) -> io::Result<()> { if self.copy { self.virtual_protect(winapi::PAGE_WRITECOPY) } else { self.virtual_protect(winapi::PAGE_READWRITE) } } #[inline] pub fn ptr(&self) -> *const u8 { self.ptr as *const u8 } #[inline] pub fn mut_ptr(&mut self) -> *mut u8 { self.ptr as *mut u8 } #[inline] pub fn len(&self) -> usize { self.len } } impl Drop for MmapInner { fn drop(&mut self) { let alignment = self.ptr as usize % allocation_granularity(); unsafe { let ptr = self.ptr.offset(-(alignment as isize)); assert!( kernel32::UnmapViewOfFile(ptr) != 0, "unable to unmap mmap: {}", io::Error::last_os_error() ); } } } unsafe impl Sync for MmapInner {} unsafe impl Send for MmapInner {} fn protection_supported(handle: RawHandle, protection: winapi::DWORD) -> bool { unsafe { let handle = kernel32::CreateFileMappingW(handle, ptr::null_mut(), protection, 0, 0, ptr::null()); if handle == ptr::null_mut() { return false; } kernel32::CloseHandle(handle); true } } fn allocation_granularity() -> usize { unsafe { let mut info = mem::zeroed(); kernel32::GetSystemInfo(&mut info); return info.dwAllocationGranularity as usize; } }
extern crate kernel32; extern crate winapi; use std::{io, mem, ptr}; use std::fs::File; use std::os::raw::c_void; use std::os::windows::io::{AsRawHandle, RawHandle}; pub struct MmapInner { file: Option<File>, ptr: *mut c_void, len: usize, copy: bool, } impl MmapInner { pub fn new( file: &File, protect: winapi::DWORD, access: winapi::DWORD, offset: usize, len: usize, copy: bool, ) -> io::Result<MmapInner> { let alignment = offset % allocation_granularity(); let aligned_offset = offset - alignment; let aligned_len = len + alignment; unsafe { let handle = kernel32::CreateFileMappingW( file.as_raw_handle(), ptr::null_mut(), protect, 0, 0, ptr::null(), ); if handle == ptr::null_mut() { return Err(io::Error::last_os_error()); } let ptr = kernel32::MapViewOfFile( handle, access, (aligned_offset >> 16 >> 16) as winapi::DWORD, (aligned_offset & 0xffffffff) as winapi::DWORD, aligned_len as winapi::SIZE_T, ); kernel32::CloseHandle(handle); if ptr == ptr::null_mut() { Err(io::Error::last_os_error()) } else { Ok(MmapInner { file: Some(file.try_clone()?), ptr: ptr.offset(alignment as isize), len: len as usize, copy: copy, }) } } } pub fn map(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let write = protection_supported(file.as_raw_handle(), winapi::PAGE_READWRITE); let exec = protection_supported(file.as_raw_handle(), winapi::PAGE_EXECUTE_READ); let mut access = winapi::FILE_MAP_READ; let protection = match (write, exec) { (true, true) => { access |= winapi::FILE_MAP_WRITE | winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_READWRITE } (true, false) => { access |= winapi::FILE_MAP_WRITE; winapi::PAGE_READWRITE } (false, true) => { access |= winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_READ } (false, false) => winapi::PAGE_READONLY, }; let mut inner = MmapInner::new(file, protection, access, offset, len, false)?; if write || exec { inner.make_read_only()?; } Ok(inner) }
pub fn map_mut(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let exec = protection_supported(file.as_raw_handle(), winapi::PAGE_EXECUTE_READ); let mut access = winapi::FILE_MAP_READ | winapi::FILE_MAP_WRITE; let protection = if exec { access |= winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_READWRITE } else { winapi::PAGE_READWRITE }; let mut inner = MmapInner::new(file, protection, access, offset, len, false)?; if exec { inner.make_mut()?; } Ok(inner) } pub fn map_copy(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let exec = protection_supported(file.as_raw_handle(), winapi::PAGE_EXECUTE_READWRITE); let mut access = winapi::FILE_MAP_COPY; let protection = if exec { access |= winapi::FILE_MAP_EXECUTE; winapi::PAGE_EXECUTE_WRITECOPY } else { winapi::PAGE_WRITECOPY }; let mut inner = MmapInner::new(file, protection, access, offset, len, true)?; if exec { inner.make_mut()?; } Ok(inner) } pub fn map_anon(len: usize, _stack: bool) -> io::Result<MmapInner> { unsafe { let handle = kernel32::CreateFileMappingW( winapi::INVALID_HANDLE_VALUE, ptr::null_mut(), winapi::PAGE_EXECUTE_READWRITE, (len >> 16 >> 16) as winapi::DWORD, (len & 0xffffffff) as winapi::DWORD, ptr::null(), ); if handle == ptr::null_mut() { return Err(io::Error::last_os_error()); } let access = winapi::FILE_MAP_ALL_ACCESS | winapi::FILE_MAP_EXECUTE; let ptr = kernel32::MapViewOfFile(handle, access, 0, 0, len as winapi::SIZE_T); kernel32::CloseHandle(handle); if ptr == ptr::null_mut() { return Err(io::Error::last_os_error()); } let mut old = 0; let result = kernel32::VirtualProtect( ptr, len as winapi::SIZE_T, winapi::PAGE_READWRITE, &mut old, ); if result != 0 { Ok(MmapInner { file: None, ptr: ptr, len: len as usize, copy: false, }) } else { Err(io::Error::last_os_error()) } } } pub fn flush(&self, offset: usize, len: usize) -> io::Result<()> { self.flush_async(offset, len)?; if let Some(ref file) = self.file { file.sync_data()?; } Ok(()) } pub fn flush_async(&self, offset: usize, len: usize) -> io::Result<()> { let result = unsafe { kernel32::FlushViewOfFile(self.ptr.offset(offset as isize), len as winapi::SIZE_T) }; if result != 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } fn virtual_protect(&mut self, protect: winapi::DWORD) -> io::Result<()> { unsafe { let alignment = self.ptr as usize % allocation_granularity(); let ptr = self.ptr.offset(-(alignment as isize)); let aligned_len = self.len as winapi::SIZE_T + alignment as winapi::SIZE_T; let mut old = 0; let result = kernel32::VirtualProtect(ptr, aligned_len, protect, &mut old); if result != 0 { Ok(()) } else { Err(io::Error::last_os_error()) } } } pub fn make_read_only(&mut self) -> io::Result<()> { self.virtual_protect(winapi::PAGE_READONLY) } pub fn make_exec(&mut self) -> io::Result<()> { if self.copy { self.virtual_protect(winapi::PAGE_EXECUTE_WRITECOPY) } else { self.virtual_protect(winapi::PAGE_EXECUTE_READ) } } pub fn make_mut(&mut self) -> io::Result<()> { if self.copy { self.virtual_protect(winapi::PAGE_WRITECOPY) } else { self.virtual_protect(winapi::PAGE_READWRITE) } } #[inline] pub fn ptr(&self) -> *const u8 { self.ptr as *const u8 } #[inline] pub fn mut_ptr(&mut self) -> *mut u8 { self.ptr as *mut u8 } #[inline] pub fn len(&self) -> usize { self.len } } impl Drop for MmapInner { fn drop(&mut self) { let alignment = self.ptr as usize % allocation_granularity(); unsafe { let ptr = self.ptr.offset(-(alignment as isize)); assert!( kernel32::UnmapViewOfFile(ptr) != 0, "unable to unmap mmap: {}", io::Error::last_os_error() ); } } } unsafe impl Sync for MmapInner {} unsafe impl Send for MmapInner {} fn protection_supported(handle: RawHandle, protection: winapi::DWORD) -> bool { unsafe { let handle = kernel32::CreateFileMappingW(handle, ptr::null_mut(), protection, 0, 0, ptr::null()); if handle == ptr::null_mut() { return false; } kernel32::CloseHandle(handle); true } } fn allocation_granularity() -> usize { unsafe { let mut info = mem::zeroed(); kernel32::GetSystemInfo(&mut info); return info.dwAllocationGranularity as usize; } }
pub fn map_exec(len: usize, file: &File, offset: usize) -> io::Result<MmapInner> { let write = protection_supported(file.as_raw_handle(), winapi::PAGE_READWRITE); let mut access = winapi::FILE_MAP_READ | winapi::FILE_MAP_EXECUTE; let protection = if write { access |= winapi::FILE_MAP_WRITE; winapi::PAGE_EXECUTE_READWRITE } else { winapi::PAGE_EXECUTE_READ }; let mut inner = MmapInner::new(file, protection, access, offset, len, false)?; if write { inner.make_exec()?; } Ok(inner) }
function_block-full_function
[ { "content": "fn main() {\n\n env_logger::init().unwrap();\n\n\n\n info!(\"All is well.\"); \n\n}\n", "file_path": "example/example.rs", "rank": 0, "score": 15112.634085037585 }, { "content": "fn main() {\n\n use std::io::Read;\n\n\n\n let mut resp = reqwest::get(\"https://www.rust-lang.org\").unwrap();\n\n assert!(resp.status().is_success());\n\n\n\n let mut content = String::new();\n\n resp.read_to_string(&mut content);\n\n println!(\"{}\", content);\n\n}\n", "file_path": "example/http/http.rs", "rank": 1, "score": 14540.384839861434 }, { "content": "\n\n#[macro_use] extern crate failure;\n\n#[macro_use] extern crate log;\n\nextern crate env_logger;\n\n\n\n#[derive(Debug, Fail)]\n", "file_path": "example/example.rs", "rank": 2, "score": 2.7741067689468926 }, { "content": "extern crate reqwest;\n\n\n", "file_path": "example/http/http.rs", "rank": 3, "score": 1.8675757520888447 } ]
Rust
src/services/mod.rs
eduardocanellas/lagoinha-rs
1b6fbc47bcc9801e1b9e0b55808a0e0619742fa0
pub mod cepla; pub mod correios; pub mod viacep; extern crate serde; use serde::{Deserialize, Serialize}; #[derive(Deserialize, Serialize, Debug)] pub struct Address { pub cep: String, pub address: String, pub details: String, pub neighborhood: String, pub state: String, pub city: String, } pub trait Addressable { fn to_address(&self) -> Address; } impl Addressable for viacep::Address { fn to_address(&self) -> Address { let addr = Address { cep: self.cep.clone(), address: self.address.clone(), details: self.details.clone(), neighborhood: self.neighborhood.clone(), state: self.state.clone(), city: self.city.clone(), }; addr } } impl Addressable for correios::Address { fn to_address(&self) -> Address { let addr = Address { cep: self.cep.clone(), address: self.address.clone(), details: "".to_string(), neighborhood: self.neighborhood.clone(), state: self.state.clone(), city: self.city.clone(), }; addr } } impl Addressable for cepla::Address { fn to_address(&self) -> Address { let addr = Address { cep: self.cep.clone(), address: self.address.clone(), details: self.details.clone(), neighborhood: self.neighborhood.clone(), state: self.state.clone(), city: self.city.clone(), }; addr } } #[cfg(test)] mod tests { use super::cepla; use super::correios; use super::viacep; use super::Addressable; #[test] fn viacep_conversion() { let viac_addr = viacep::Address { cep: "70150-903".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), city: "Brasília".to_string(), state: "DF".to_string(), unidade: "".to_string(), ibge: "5300108".to_string(), gia: "".to_string(), }; let viac_addr = viac_addr.to_address(); let addr = super::Address { cep: "70150-903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; assert_eq!(addr.address, viac_addr.address); assert_eq!(addr.state, viac_addr.state); assert_eq!(addr.neighborhood, viac_addr.neighborhood); assert_eq!(addr.city, viac_addr.city); assert_eq!(addr.cep, viac_addr.cep); assert_eq!(addr.details, viac_addr.details); } #[test] fn correios_conversion() { let corr_addr = correios::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), }; let corr_addr = corr_addr.to_address(); let addr = super::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; assert_eq!(addr.address, corr_addr.address); assert_eq!(addr.state, corr_addr.state); assert_eq!(addr.neighborhood, corr_addr.neighborhood); assert_eq!(addr.city, corr_addr.city); assert_eq!(addr.cep, corr_addr.cep); } #[test] fn cepla_conversion() { let cepl_addr = cepla::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; let cepl_addr = cepl_addr.to_address(); let addr = super::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; assert_eq!(addr.address, cepl_addr.address); assert_eq!(addr.state, cepl_addr.state); assert_eq!(addr.neighborhood, cepl_addr.neighborhood); assert_eq!(addr.city, cepl_addr.city); assert_eq!(addr.cep, cepl_addr.cep); assert_eq!(addr.details, cepl_addr.details); } }
pub mod cepla; pub mod correios; pub mod viacep; extern crate serde; use serde::{Deserialize, Serialize}; #[derive(Deserialize, Serialize, Debug)] pub struct Address { pub cep: String, pub address: String, pub details: String, pub neighborhood: String, pub state: String, pub city: String, } pub trait Addressable { fn to_address(&self) -> Address; } impl Addressable for viacep::Address { fn to_address(&self) -> Address { let addr = Address { cep: self.cep.clone(), address: self.address.clone(), details: self.details.clone(), neighborhood: self.neighborhood.clone(), state: self.state.clone(), city: self.city.clone(), }; addr } } impl Addressable for correios::Address { fn to_address(&self) -> Address { let addr = Address { cep: self.cep.clone(), address: self.address.clone(), details: "".to_string(), neighborhood: self.neighborhood.clone(), state: self.state.clone(), city: self.city.clone(), }; addr } } impl Addressable for cepla::Address { fn to_address(&self) -> Address { let addr = Address { cep: self.cep.clone(), address: self.address.clone(), details: self.details.clone(), neighborhood: self.neighborhood.clone(), state: self.state.clone(), city: self.city.clone(), }; addr } } #[cfg(test)] mod tests { use super::cepla; use super::correios; use super::viacep; use super::Addressable; #[test] fn viacep_conversion() { let viac_addr = viacep::Address { cep: "70150-903".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), city: "Brasília".to_string(), state: "DF".to_string(), unidade: "".to_string(), ibge: "5300108".to_string(), gia: "".to_string(), }; let viac_addr = viac_addr.to_address(); let addr = super::Address { cep: "70150-903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; assert_eq!(addr.address, viac_addr.address); assert_eq!(addr.state, viac_addr.state); assert_eq!(addr.neighborhood, viac_addr.neighborhood); assert_eq!(addr.city, viac_addr.city); assert_eq!(addr.cep, viac_addr.cep); assert_eq!(addr.details, viac_addr.details); } #[test] fn correios_conversion() { let corr_addr = correios::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), }; let corr_addr = corr_addr.to_address(); let addr = super::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; assert_eq!(addr.address, corr_addr.address); assert_eq!(addr.state, corr_addr.state); assert_eq!(addr.neighborhood, corr_addr.neighborhood); assert_eq!(addr.city, corr_addr.city); assert_eq!(addr.cep, corr_addr.cep); } #[test
ails); } }
] fn cepla_conversion() { let cepl_addr = cepla::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; let cepl_addr = cepl_addr.to_address(); let addr = super::Address { cep: "70150903".to_string(), state: "DF".to_string(), city: "Brasília".to_string(), neighborhood: "Zona Cívico-Administrativa".to_string(), address: "SPP".to_string(), details: "Palácio da Alvorada (Residência Oficial do Presidente da República)" .to_string(), }; assert_eq!(addr.address, cepl_addr.address); assert_eq!(addr.state, cepl_addr.state); assert_eq!(addr.neighborhood, cepl_addr.neighborhood); assert_eq!(addr.city, cepl_addr.city); assert_eq!(addr.cep, cepl_addr.cep); assert_eq!(addr.details, cepl_addr.det
random
[ { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut cep: &str = \"20940040\".as_ref();\n\n if args.len() >= 2 {\n\n cep = &args[1][..];\n\n }\n\n let addr = async_std::task::block_on(lagoinha::get_address(cep, None));\n\n println!(\"{:#?}\", addr);\n\n}\n", "file_path": "examples/get_address.rs", "rank": 1, "score": 52829.636756760985 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nstruct ReturnTag {\n\n #[serde(rename = \"return\")]\n\n pub return_tag: Address,\n\n}\n\n\n\n/// Address struct used to deserialize the results from the correios API\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct Address {\n\n #[serde(rename = \"cep\", default = \"String::new\")]\n\n pub cep: String,\n\n #[serde(rename = \"uf\", default = \"String::new\")]\n\n pub state: String,\n\n #[serde(rename = \"cidade\", default = \"String::new\")]\n\n pub city: String,\n\n #[serde(rename = \"bairro\", default = \"String::new\")]\n\n pub neighborhood: String,\n\n #[serde(rename = \"end\", default = \"String::new\")]\n\n pub address: String,\n\n}\n\n\n", "file_path": "src/services/correios.rs", "rank": 2, "score": 48388.42251867929 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nstruct BodyTag {\n\n #[serde(rename = \"Body\")]\n\n pub body_tag: ConsultTag,\n\n}\n\n\n", "file_path": "src/services/correios.rs", "rank": 3, "score": 48388.42251867929 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nstruct ConsultTag {\n\n #[serde(rename = \"consultaCEPResponse\")]\n\n pub consult_tag: ReturnTag,\n\n}\n\n\n", "file_path": "src/services/correios.rs", "rank": 4, "score": 48388.42251867929 }, { "content": "// examples/get_address.rs\n\n//!Run `run --example get_address yourcep` to run this example\n\nuse lagoinha;\n\n\n\nuse std::env;\n\n\n", "file_path": "examples/get_address.rs", "rank": 5, "score": 23188.52397965868 }, { "content": " #[test]\n\n fn valid_cepla() {\n\n let resaddr = async_std::task::block_on(super::request(\"70150903\")).unwrap();\n\n\n\n let addr = super::Address {\n\n cep: \"70150903\".to_string(),\n\n state: \"DF\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n address: \"SPP\".to_string(),\n\n details: \"Palácio da Alvorada (Residência Oficial do Presidente da República)\"\n\n .to_string(),\n\n };\n\n\n\n assert_eq!(addr.address, resaddr.address);\n\n assert_eq!(addr.state, resaddr.state);\n\n assert_eq!(addr.neighborhood, resaddr.neighborhood);\n\n assert_eq!(addr.city, resaddr.city);\n\n assert_eq!(addr.cep, resaddr.cep);\n\n assert_eq!(addr.details, resaddr.details);\n", "file_path": "src/services/cepla.rs", "rank": 6, "score": 22813.82710921463 }, { "content": " }\n\n\n\n #[test]\n\n fn valid_cepla_with_dash() {\n\n let resaddr = async_std::task::block_on(super::request(\"70150-903\")).unwrap();\n\n\n\n let addr = super::Address {\n\n cep: \"70150903\".to_string(),\n\n state: \"DF\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n address: \"SPP\".to_string(),\n\n details: \"Palácio da Alvorada (Residência Oficial do Presidente da República)\"\n\n .to_string(),\n\n };\n\n\n\n assert_eq!(addr.address, resaddr.address);\n\n assert_eq!(addr.state, resaddr.state);\n\n assert_eq!(addr.neighborhood, resaddr.neighborhood);\n\n assert_eq!(addr.city, resaddr.city);\n", "file_path": "src/services/cepla.rs", "rank": 7, "score": 22813.568758971014 }, { "content": "\n\n/// Address struct used to deserialize the results from the cepla API\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Address {\n\n #[serde(rename = \"cep\", default = \"String::new\")]\n\n pub cep: String,\n\n #[serde(rename = \"uf\", default = \"String::new\")]\n\n pub state: String,\n\n #[serde(rename = \"cidade\", default = \"String::new\")]\n\n pub city: String,\n\n #[serde(rename = \"bairro\", default = \"String::new\")]\n\n pub neighborhood: String,\n\n #[serde(rename = \"logradouro\", default = \"String::new\")]\n\n pub address: String,\n\n #[serde(rename = \"aux\", default = \"String::new\")]\n\n pub details: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/services/cepla.rs", "rank": 8, "score": 22809.04935491074 }, { "content": " assert_eq!(addr.cep, resaddr.cep);\n\n assert_eq!(addr.details, resaddr.details);\n\n }\n\n\n\n use crate::error::Kind;\n\n use crate::error::Source;\n\n #[test]\n\n fn invalid_input_viacep() {\n\n let resaddr = async_std::task::block_on(super::request(\"123\"));\n\n assert!(resaddr.is_err());\n\n resaddr\n\n .map_err(|err| {\n\n assert_eq!(err.source, Source::Cepla);\n\n assert_eq!(\n\n std::mem::discriminant(&err.kind),\n\n std::mem::discriminant(&Kind::BodyParsingError {\n\n error: \"\".to_owned(),\n\n body: \"\".to_owned(),\n\n })\n\n );\n\n })\n\n .ok();\n\n }\n\n}\n", "file_path": "src/services/cepla.rs", "rank": 9, "score": 22795.20385747771 }, { "content": "//! CepLá service: http://cep.la/\n\n//!\n\n//! This service has an out os [spec](https://tools.ietf.org/html/rfc2616#section-4.2) header implementation,\n\n//! and does not comply with the [RFC2616](https://tools.ietf.org/html/rfc2616#section-4.2).\n\n//! This causes an issue when using it with libraries, like Hyper, because they parse all headers to lower case.\n\n//! To solve this issue, the title_case_headers(true) option was used.\n\n\n\nuse crate::error::Error;\n\nuse crate::error::Kind;\n\nuse crate::error::Source::Cepla;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse isahc::{config::Configurable, ReadResponseExt, Request, RequestExt};\n\n\n\n/// request function runs the API call to cepla service\n\npub async fn request(cep: &str) -> Result<Address, Error> {\n\n let uri = format!(\"http://cep.la/{}\", cep);\n\n let req = Request::get(uri)\n\n .title_case_headers(true)\n", "file_path": "src/services/cepla.rs", "rank": 10, "score": 22793.95150880859 }, { "content": "\n\n let address = serde_json::from_reader(body);\n\n match address {\n\n Ok(address) => return Ok(address),\n\n Err(e) => {\n\n let str_body = response.text();\n\n let str_body = match str_body {\n\n Ok(str_body) => str_body,\n\n Err(e) => \"Failed to produce string body \".to_owned() + e.to_string().as_str(),\n\n };\n\n return Err(Error {\n\n kind: Kind::BodyParsingError {\n\n error: e.to_string(),\n\n body: str_body.to_string(),\n\n },\n\n source: Cepla,\n\n });\n\n }\n\n };\n\n}\n", "file_path": "src/services/cepla.rs", "rank": 11, "score": 22788.52520267897 }, { "content": " .header(\"Accept\", \"application/json\")\n\n .body(())\n\n .or(Err(Error {\n\n kind: Kind::UnexpectedLibraryError,\n\n source: Cepla,\n\n }))?;\n\n\n\n let mut response = req.send().or(Err(Error {\n\n kind: Kind::MissingBodyError,\n\n source: Cepla,\n\n }))?;\n\n\n\n match response.status().as_u16() {\n\n 200..=299 => (),\n\n 400..=499 => {\n\n return Err(Error {\n\n kind: Kind::ClientError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Cepla,\n", "file_path": "src/services/cepla.rs", "rank": 12, "score": 22782.515508510103 }, { "content": " });\n\n }\n\n 500..=599 => {\n\n return Err(Error {\n\n kind: Kind::ServerError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Cepla,\n\n });\n\n }\n\n _ => {\n\n return Err(Error {\n\n kind: Kind::UnknownServerError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Cepla,\n\n });\n\n }\n\n }\n\n let body = response.body_mut();\n", "file_path": "src/services/cepla.rs", "rank": 13, "score": 22782.409223671842 }, { "content": " assert_eq!(addr.state, resaddr.state);\n\n assert_eq!(addr.unidade, resaddr.unidade);\n\n assert_eq!(addr.ibge, resaddr.ibge);\n\n assert_eq!(addr.gia, resaddr.gia);\n\n }\n\n\n\n #[test]\n\n fn valid_viacep_with_dash() {\n\n let resaddr = async_std::task::block_on(super::request(\"70150-903\")).unwrap();\n\n\n\n let addr = super::Address {\n\n cep: \"70150-903\".to_string(),\n\n address: \"SPP\".to_string(),\n\n details: \"\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n state: \"DF\".to_string(),\n\n unidade: \"\".to_string(),\n\n ibge: \"5300108\".to_string(),\n\n gia: \"\".to_string(),\n", "file_path": "src/services/viacep.rs", "rank": 18, "score": 22704.3046868987 }, { "content": " };\n\n\n\n assert_eq!(addr.cep, resaddr.cep);\n\n assert_eq!(addr.address, resaddr.address);\n\n assert_eq!(addr.details, resaddr.details);\n\n assert_eq!(addr.neighborhood, resaddr.neighborhood);\n\n assert_eq!(addr.city, resaddr.city);\n\n assert_eq!(addr.state, resaddr.state);\n\n assert_eq!(addr.unidade, resaddr.unidade);\n\n assert_eq!(addr.ibge, resaddr.ibge);\n\n assert_eq!(addr.gia, resaddr.gia);\n\n }\n\n\n\n use crate::error::Error;\n\n use crate::error::Kind;\n\n use crate::error::Source;\n\n #[test]\n\n fn invalid_input_viacep() {\n\n let resaddr = async_std::task::block_on(super::request(\"123\"));\n\n assert!(resaddr.is_err());\n", "file_path": "src/services/viacep.rs", "rank": 20, "score": 22703.741674861336 }, { "content": " pub address: String,\n\n #[serde(rename = \"complemento\", default = \"String::new\")]\n\n pub details: String,\n\n #[serde(rename = \"bairro\", default = \"String::new\")]\n\n pub neighborhood: String,\n\n #[serde(rename = \"uf\", default = \"String::new\")]\n\n pub state: String,\n\n #[serde(rename = \"localidade\", default = \"String::new\")]\n\n pub city: String,\n\n #[serde(rename = \"unidade\", default = \"String::new\")]\n\n pub unidade: String,\n\n #[serde(rename = \"ibge\", default = \"String::new\")]\n\n pub ibge: String,\n\n #[serde(rename = \"gia\", default = \"String::new\")]\n\n pub gia: String,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n", "file_path": "src/services/viacep.rs", "rank": 21, "score": 22702.781728859067 }, { "content": " fn valid_viacep() {\n\n let resaddr = async_std::task::block_on(super::request(\"70150903\")).unwrap();\n\n\n\n let addr = super::Address {\n\n cep: \"70150-903\".to_string(),\n\n address: \"SPP\".to_string(),\n\n details: \"\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n state: \"DF\".to_string(),\n\n unidade: \"\".to_string(),\n\n ibge: \"5300108\".to_string(),\n\n gia: \"\".to_string(),\n\n };\n\n\n\n assert_eq!(addr.cep, resaddr.cep);\n\n assert_eq!(addr.address, resaddr.address);\n\n assert_eq!(addr.details, resaddr.details);\n\n assert_eq!(addr.neighborhood, resaddr.neighborhood);\n\n assert_eq!(addr.city, resaddr.city);\n", "file_path": "src/services/viacep.rs", "rank": 22, "score": 22702.45188245661 }, { "content": " Ok(str_body) => str_body,\n\n Err(_) => \"Failed to produce string body \".to_owned() + e.to_string().as_str(),\n\n };\n\n return Err(Error {\n\n kind: Kind::BodyParsingError {\n\n error: e.to_string(),\n\n body: str_body.to_string(),\n\n },\n\n source: Viacep,\n\n });\n\n }\n\n };\n\n}\n\n\n\n/// Address struct used to deserialize the results from the viacep API\n\n#[derive(Deserialize, Serialize, Debug)]\n\npub struct Address {\n\n #[serde(rename = \"cep\", default = \"String::new\")]\n\n pub cep: String,\n\n #[serde(rename = \"logradouro\", default = \"String::new\")]\n", "file_path": "src/services/viacep.rs", "rank": 25, "score": 22694.201826855846 }, { "content": "//! Viacep service: https://viacep.com.br/\n\n\n\nuse crate::error::Error;\n\nuse crate::error::Kind;\n\nuse crate::error::Source::Viacep;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse isahc::{ReadResponseExt, Request, RequestExt};\n\n\n\n/// request function runs the API call to Viacep service\n\npub async fn request(cep: &str) -> Result<Address, Error> {\n\n let uri = format!(\"https://viacep.com.br/ws/{}/json/\", cep);\n\n let req = Request::get(uri)\n\n .header(\"Accept\", \"application/json\")\n\n .body(())\n\n .or(Err(Error {\n\n kind: Kind::UnexpectedLibraryError,\n\n source: Viacep,\n\n }))?;\n", "file_path": "src/services/viacep.rs", "rank": 27, "score": 22690.963473871772 }, { "content": " },\n\n source: Viacep,\n\n });\n\n }\n\n _ => {\n\n return Err(Error {\n\n kind: Kind::UnknownServerError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Viacep,\n\n });\n\n }\n\n }\n\n let body = response.body_mut();\n\n let address = serde_json::from_reader(body);\n\n match address {\n\n Ok(address) => return Ok(address),\n\n Err(e) => {\n\n let str_body = response.text();\n\n let str_body = match str_body {\n", "file_path": "src/services/viacep.rs", "rank": 28, "score": 22681.882373271645 }, { "content": "\n\n let mut response = req.send().or(Err(Error {\n\n kind: Kind::MissingBodyError,\n\n source: Viacep,\n\n }))?;\n\n\n\n match response.status().as_u16() {\n\n 200..=299 => (),\n\n 400..=499 => {\n\n return Err(Error {\n\n kind: Kind::ClientError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Viacep,\n\n });\n\n }\n\n 500..=599 => {\n\n return Err(Error {\n\n kind: Kind::ServerError {\n\n code: response.status().as_u16(),\n", "file_path": "src/services/viacep.rs", "rank": 29, "score": 22677.436435099622 }, { "content": " resaddr\n\n .map_err(|err| {\n\n assert_eq!(\n\n err,\n\n Error {\n\n source: Source::Viacep,\n\n kind: Kind::ClientError { code: 400 }\n\n }\n\n )\n\n })\n\n .ok();\n\n }\n\n}\n", "file_path": "src/services/viacep.rs", "rank": 30, "score": 22677.26480460059 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n fn valid_correios() {\n\n let resaddr = async_std::task::block_on(super::request(\"70150903\")).unwrap();\n\n\n\n let addr = super::Address {\n\n cep: \"70150903\".to_string(),\n\n state: \"DF\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n address: \"SPP\".to_string(),\n\n };\n\n\n\n assert_eq!(addr.cep, resaddr.cep);\n\n assert_eq!(addr.state, resaddr.state);\n\n assert_eq!(addr.city, resaddr.city);\n\n assert_eq!(addr.neighborhood, resaddr.neighborhood);\n\n assert_eq!(addr.address, resaddr.address);\n\n }\n", "file_path": "src/services/correios.rs", "rank": 31, "score": 22484.825000701927 }, { "content": "\n\n #[test]\n\n fn valid_correios_with_dash() {\n\n let resaddr = async_std::task::block_on(super::request(\"70150-903\")).unwrap();\n\n\n\n let addr = super::Address {\n\n cep: \"70150903\".to_string(),\n\n state: \"DF\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n address: \"SPP\".to_string(),\n\n };\n\n\n\n assert_eq!(addr.cep, resaddr.cep);\n\n assert_eq!(addr.state, resaddr.state);\n\n assert_eq!(addr.city, resaddr.city);\n\n assert_eq!(addr.neighborhood, resaddr.neighborhood);\n\n assert_eq!(addr.address, resaddr.address);\n\n }\n\n\n", "file_path": "src/services/correios.rs", "rank": 32, "score": 22482.45485370842 }, { "content": "//! Correios service: http://www.buscacep.correios.com.br/sistemas/buscacep/BuscaCepEndereco.cfm\n\n\n\nuse isahc::{ReadResponseExt, Request, RequestExt};\n\n\n\nuse crate::error::Error;\n\nuse crate::error::Kind;\n\nuse crate::error::Source::Correios;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// request function runs the API call to correios service\n\npub async fn request(cep: &str) -> Result<Address, Error> {\n\n let payload = format!(\n\n r#\"\n\n <soapenv:Envelope xmlns:soapenv=\"http://schemas.xmlsoap.org/soap/envelope/\" xmlns:cli=\"http://cliente.bean.master.sigep.bsb.correios.com.br/\">\n\n <soapenv:Header/>\n\n <soapenv:Body>\n\n <cli:consultaCEP>\n\n <cep>{}</cep>\n\n </cli:consultaCEP>\n", "file_path": "src/services/correios.rs", "rank": 33, "score": 22475.249557381285 }, { "content": " Ok(str_body) => str_body,\n\n Err(_) => \"Failed to produce string body \".to_owned() + e.to_string().as_str(),\n\n };\n\n return Err(Error {\n\n kind: Kind::BodyParsingError {\n\n error: e.to_string(),\n\n body: str_body.to_string(),\n\n },\n\n source: Correios,\n\n });\n\n }\n\n };\n\n}\n\n\n\n// these structs are used to define the entire path to the XML. There must be a better way to do this...\n\n// only the Address struct is useful.\n\n#[derive(Deserialize, Serialize, Debug)]\n", "file_path": "src/services/correios.rs", "rank": 34, "score": 22472.21477803514 }, { "content": " use crate::error::Error;\n\n use crate::error::Kind;\n\n use crate::error::Source;\n\n #[test]\n\n fn invalid_input_correios() {\n\n let resaddr = async_std::task::block_on(super::request(\"123\"));\n\n assert!(resaddr.is_err());\n\n resaddr\n\n .map_err(|err| {\n\n assert_eq!(\n\n err,\n\n Error {\n\n source: Source::Correios,\n\n kind: Kind::ServerError { code: 500 }\n\n }\n\n )\n\n })\n\n .ok();\n\n }\n\n}\n", "file_path": "src/services/correios.rs", "rank": 35, "score": 22469.566460070106 }, { "content": " _ => {\n\n return Err(Error {\n\n kind: Kind::UnknownServerError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Correios,\n\n });\n\n }\n\n }\n\n\n\n let body = response.body_mut();\n\n\n\n let correios_data: Result<BodyTag, serde_xml_rs::Error> = serde_xml_rs::from_reader(body);\n\n match correios_data {\n\n Ok(correios_data) => {\n\n return Ok(correios_data.body_tag.consult_tag.return_tag);\n\n }\n\n Err(e) => {\n\n let str_body = response.text();\n\n let str_body = match str_body {\n", "file_path": "src/services/correios.rs", "rank": 36, "score": 22465.022084895678 }, { "content": " </soapenv:Body>\n\n </soapenv:Envelope>\n\n \"#,\n\n cep\n\n );\n\n\n\n let req = Request::post(\n\n \"https://apps.correios.com.br/SigepMasterJPA/AtendeClienteService/AtendeCliente?wsdl\",\n\n )\n\n .header(\"content-type\", \"application/soap+xml;charset=utf-8\")\n\n .header(\"cache-control\", \"no-cache\")\n\n .body(payload)\n\n .or(Err(Error {\n\n kind: Kind::UnexpectedLibraryError,\n\n source: Correios,\n\n }))?;\n\n\n\n let mut response = req.send().or(Err(Error {\n\n kind: Kind::MissingBodyError,\n\n source: Correios,\n", "file_path": "src/services/correios.rs", "rank": 37, "score": 22463.657991830383 }, { "content": " }))?;\n\n\n\n match response.status().as_u16() {\n\n 200..=299 => (),\n\n 400..=499 => {\n\n return Err(Error {\n\n kind: Kind::ClientError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Correios,\n\n });\n\n }\n\n 500..=599 => {\n\n return Err(Error {\n\n kind: Kind::ServerError {\n\n code: response.status().as_u16(),\n\n },\n\n source: Correios,\n\n });\n\n }\n", "file_path": "src/services/correios.rs", "rank": 38, "score": 22462.294144223946 }, { "content": "// examples/get_address.rs\n\n//!Run `run --example get_address_tokio yourcep` to run this example\n\nuse lagoinha;\n\n\n\nuse std::env;\n\nuse tokio;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut cep: &str = \"20940040\".as_ref();\n\n if args.len() >= 2 {\n\n cep = &args[1][..];\n\n }\n\n let addr = lagoinha::get_address(cep, None).await;\n\n println!(\"{:#?}\", addr);\n\n}\n", "file_path": "examples/get_address_tokio.rs", "rank": 39, "score": 22043.595409209865 }, { "content": " e3: format!(\"{}\", error_list[2]),\n\n },\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::error;\n\n\n\n #[tokio::test]\n\n async fn test_channels_tokio() {\n\n let addr = super::services::Address {\n\n cep: \"70150903\".to_string(),\n\n address: \"SPP\".to_string(),\n\n details: \"\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n state: \"DF\".to_string(),\n\n };\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 27.10616399385183 }, { "content": " let recv_addr = super::get_address(\"70150903\", None).await.unwrap();\n\n assert_eq!(addr.city, recv_addr.city);\n\n assert_eq!(addr.state, recv_addr.state);\n\n assert_eq!(addr.neighborhood, recv_addr.neighborhood);\n\n // the other fields, like cep can come with different formating\n\n }\n\n\n\n #[test]\n\n fn test_channels_async_std() {\n\n let addr = super::services::Address {\n\n cep: \"70150903\".to_string(),\n\n address: \"SPP\".to_string(),\n\n details: \"\".to_string(),\n\n neighborhood: \"Zona Cívico-Administrativa\".to_string(),\n\n city: \"Brasília\".to_string(),\n\n state: \"DF\".to_string(),\n\n };\n\n\n\n let recv_addr = async_std::task::block_on(super::get_address(\"70150903\", None)).unwrap();\n\n assert_eq!(addr.city, recv_addr.city);\n", "file_path": "src/lib.rs", "rank": 41, "score": 22.236233686284415 }, { "content": "#![crate_name = \"lagoinha\"]\n\n//! Lagoinha is a library that retrieves Addresses from the Brazilian Postal Code (CEP) using multiple APIs asynchronously, and returns the result from the first one to respond.\n\n//! It uses async/.await and the Futures library for its asyncronous features, and can be used with most runtimes.\n\n//!\n\n//! # Services\n\n//!\n\n//! Currently the services used are : correios, viacep and cepla\n\n//! It is expected to support adding a custom service to the pool in the future, and the ability to disable the default ones.\n\n//!\n\n//! While the default http library is Hyper, the CepLá service has an issue with its header implementation, and so the curl library was used. More information in the docs for this service.\n\n//!\n\n//! # Example\n\n//! ```\n\n//!extern crate lagoinha;\n\n//!extern crate tokio;\n\n//!\n\n//!#[tokio::main]\n\n//!async fn main() { \n\n//! let addr = lagoinha::get_address(\"70150903\", None).await;\n\n//! println!(\"{:#?}\", addr);\n", "file_path": "src/lib.rs", "rank": 42, "score": 16.959834755582975 }, { "content": "use std::error::Error as StdError;\n\nuse std::fmt;\n\n#[derive(PartialEq, Debug)]\n\n/// Source represents from what component the error came (core lib, or the respective services)\n\npub enum Source {\n\n Viacep,\n\n Correios,\n\n Cepla,\n\n LagoinhaLib,\n\n}\n\n\n\nimpl fmt::Display for Source {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Source::Viacep => write!(f, \"Viacep\"),\n\n Source::Correios => write!(f, \"Correios\"),\n\n Source::Cepla => write!(f, \"Cepla\"),\n\n Source::LagoinhaLib => write!(f, \"Lagoinha\"),\n\n }\n\n }\n", "file_path": "src/error.rs", "rank": 43, "score": 15.394955219143341 }, { "content": "// examples/standalone_services.rs\n\n//!Run `run --example standalone_services yourcep` to run this example\n\nuse lagoinha;\n\n// optional trait for standard type conversion\n\nuse lagoinha::services::Addressable;\n\n\n\nuse std::env;\n\nuse tokio;\n\n#[tokio::main]\n\nasync fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n let mut cep: &str = \"20940040\".as_ref();\n\n print!(\"{}\", args.len());\n\n if args.len() >= 2 {\n\n cep = &args[1][..];\n\n }\n\n let addr = lagoinha::services::viacep::request(cep).await;\n\n println!(\"\\nviacep\");\n\n println!(\"{:#?}\", addr);\n\n // optinal to_address from Addressable trait converts specific address to general address\n", "file_path": "examples/standalone_services.rs", "rank": 44, "score": 14.668530433460692 }, { "content": "<p align=\"center\">\n\n <!-- <img src=\"assets/logo.png\" width=\"100px\" /> -->\n\n <h3 align=\"center\">\n\n Lagoinha-rs\n\n </h3>\n\n <p align=\"center\">\n\n Rust library that returns addresses from the Brazilian Postal Code (CEP) <br/>\n\n using the following APIs: Correios, ViaCEP, Cepla\n\n </p>\n\n <p align=\"center\">\n\n Readme in <a href=\"README-pt.md\">Português</a>\n\n </p>\n\n</p>\n\n\n\n> Rust project inspired by https://github.com/IgorHalfeld/lagoinha used to retrieve Addresses from the Brazilian Postal Code (CEP)\n\n\n\n✔️ - Contributions and reviews are appreciated !\n\n\n\n---\n\n\n\n![CI](https://github.com/auyer/lagoinha-rs/workflows/CI/badge.svg)\n\n[![crates.io](https://meritbadge.herokuapp.com/lagoinha)](https://crates.io/crates/lagoinha)\n\n[![API docs](https://docs.rs/lagoinha/badge.svg)](https://docs.rs/lagoinha)\n\n[![MIT licensed](https://img.shields.io/badge/license-MIT-blue.svg)](./LICENSE)\n\n\n\nLagoinha is a package that uses public APIs to fetch addresses using the Brazilian Postal Code (CEP). This package concurrently calls all the supported APIs and returns the first result to arrive.\n\n\n\n### Diagram:\n\n\n\n![lagoinha call fluxogram](.github/assets/lagoinha-calls.png)\n\n\n\n### Why this name ?\n\n\n\nIt means \"little pond\". It is a Brazillian meme ! Check the [vídeo](https://www.youtube.com/watch?v=C1Sd_RWF5ks)!\n\n\n\n### Instalation\n\n\n\n```toml\n\nlagoinha-rs = \"0.2\"\n\n```\n\n\n\n### How to use it\n\n```rust\n\nuse lagoinha;\n\nuse async_std;\n\n\n\nfn main() {\n\n let addr = async_std::task::block_on(lagoinha::get_address(\"CEP_GOES_HERE\"));\n\n println!(\"{:#?}\", addr);\n\n}\n\n```\n\nThe first method should be similar for any async runtime.\n\n\n\nOr in an Async function (example using Tokio):\n\n\n\n```rust\n\nuse lagoinha;\n\nuse tokio;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let addr = lagoinha::get_address(\"CEP_GOES_HERE\", None).await;\n\n println!(\"{:#?}\", addr);\n\n}\n\n```\n\n\n\n### Run Examples\n\n\n\nCheck the [examples folder](examples/) !\n\nTo run them, use the commands below.\n\n\n\n```bash\n", "file_path": "README.md", "rank": 45, "score": 13.889754969273843 }, { "content": " println!(\"\\n--converted:\");\n\n println!(\"{:#?}\", addr.unwrap().to_address());\n\n\n\n let addr = lagoinha::services::correios::request(cep).await;\n\n println!(\"\\ncorreios\");\n\n println!(\"{:#?}\", addr);\n\n // optinal to_address from Addressable trait converts specific address to general address\n\n println!(\"\\n--converted:\");\n\n println!(\"{:#?}\", addr.unwrap().to_address());\n\n\n\n let addr = lagoinha::services::cepla::request(cep).await;\n\n println!(\"\\ncepla\");\n\n println!(\"{:#?}\", addr);\n\n // optinal to_address from Addressable trait converts specific address to general address\n\n println!(\"\\n--converted:\");\n\n println!(\"{:#?}\", addr.unwrap().to_address());\n\n}\n", "file_path": "examples/standalone_services.rs", "rank": 46, "score": 13.466116085769743 }, { "content": " None => 2 as u64,\n\n };\n\n\n\n let (tx, mut rx) = mpsc::channel::<Result<services::Address, Error>>(3);\n\n\n\n select! {\n\n () = service_channel_request(services::viacep::request(cep), error_timeout, tx.clone()).fuse() => \"viacep\",\n\n () = service_channel_request(services::correios::request(cep), error_timeout, tx.clone()).fuse() => \"correios\",\n\n () = service_channel_request(services::cepla::request(cep), error_timeout, tx.clone()).fuse() => \"cepla\",\n\n };\n\n\n\n let mut error_list: Vec<Error> = Vec::new();\n\n\n\n for _ in 0..3 {\n\n let read = rx.try_next();\n\n match read {\n\n Ok(read_address) => match read_address {\n\n Some(read_address) => match read_address {\n\n Ok(addr) => return Ok(addr),\n\n Err(e) => error_list.push(e),\n", "file_path": "src/lib.rs", "rank": 47, "score": 12.374813981666108 }, { "content": "<p align=\"center\">\n\n <!-- <img src=\"assets/logo.png\" width=\"100px\" /> -->\n\n <h3 align=\"center\">\n\n Lagoinha-rs\n\n </h3>\n\n <p align=\"center\">\n\n Lib Rust que retorna um endereço a partir do CEP <br/>\n\n utilizando as APIs públicas : Correios, ViaCEP, Ceplá\n\n </p>\n\n <p align=\"center\">\n\n Readme in <a href=\"README.md\">English</a>\n\n </p>\n\n</p>\n\n\n\n> Rust project inspired by https://github.com/IgorHalfeld/lagoinha used to retrieve Addresses from the Brazilian Postal Code (CEP)\n\n\n\n✔️ - Contributions and reviews are appreciated !\n\n\n\n---\n\n\n\n![CI](https://github.com/auyer/lagoinha-rs/workflows/CI/badge.svg)\n\n\n\nLagoinha é basicamente um pacote que usa as APIs públicas para pegar o endereço com base em um CEP. O que o pacote faz, é disparar pra todas as APIs ao mesmo tempo e retornar com o resultado da primeira API que responder.\n\n\n\n### Diagrama de funcionamento\n\n\n\n![lagoinha call fluxogram](.github/assets/lagoinha-calls.png)\n\n\n\n### Por que esse nome\n\n\n\nÉ simples, veja o [vídeo](https://www.youtube.com/watch?v=C1Sd_RWF5ks)!\n\n(onde é que eu tô, lagoinha, CEP, endereço...)\n\n\n\n### Instalação\n\n\n\n```toml\n\nlagoinha-rs = \"0.2\"\n\n```\n\n\n\n### How to use it\n\n```rust\n\nuse lagoinha;\n\nuse async_std;\n\n\n\nfn main() {\n\n let addr = async_std::task::block_on(lagoinha::get_address(\"CEP_GOES_HERE\", None));\n\n println!(\"{:#?}\", addr);\n\n}\n\n```\n\nO primeiro método deve ser similar para qualquer *async runtime*.\n\n\n\nOuem uma função Async (examplo usando Tokio):\n\n\n\n```rust\n\nuse lagoinha;\n\nuse tokio;\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let addr = lagoinha::get_address(\"CEP_GOES_HERE\", None).await;\n\n println!(\"{:#?}\", addr);\n\n}\n\n```\n\n\n\n### Run Examples\n\n\n\nCheck the [examples folder](examples/) !\n\nTo run them, use the commands below.\n\n\n\n```bash\n\n# these examples can be run with a specific CEP (or leave blank for default value)\n\ncargo run --example get_address 20940040\n\ncargo run --example get_address_tokio 20940040\n\ncargo run --example standalone_services 20940040\n\n\n\n```\n", "file_path": "README-pt.md", "rank": 48, "score": 11.674688082893386 }, { "content": "//!}\n\n//!```\n\n//!\n\n\n\npub mod error;\n\npub mod services;\n\nuse error::Error;\n\nuse error::Source::LagoinhaLib;\n\nuse services::Address;\n\nuse services::Addressable;\n\n\n\nuse async_std::task;\n\nuse futures::channel::mpsc;\n\nuse futures::{future::FutureExt, select, sink::SinkExt, Future};\n\nuse std::time::Duration;\n\n\n\nconst SEND_ERROR: &str =\n\n \"Failed awaiting channel send. This should not happen. Please contact the developer\";\n\n\n\nasync fn service_channel_request<Fut, Addr>(\n", "file_path": "src/lib.rs", "rank": 49, "score": 11.33519374078013 }, { "content": " assert_eq!(addr.state, recv_addr.state);\n\n assert_eq!(addr.neighborhood, recv_addr.neighborhood);\n\n // the other fields, like cep can come with different formating\n\n }\n\n\n\n // variant_eq is a test helper that checks if a and b are the same Enum variants, disregarding its values\n\n fn variant_eq<T>(a: &T, b: &T) -> bool {\n\n std::mem::discriminant(a) == std::mem::discriminant(b)\n\n }\n\n\n\n #[test]\n\n fn all_services_error() {\n\n let err = error::Error {\n\n source: error::Source::LagoinhaLib,\n\n kind: error::Kind::AllServicesReturnedErrors {\n\n e1: \"\".to_owned(),\n\n e2: \"\".to_owned(),\n\n e3: \"\".to_owned(),\n\n },\n\n };\n", "file_path": "src/lib.rs", "rank": 50, "score": 10.495420074772687 }, { "content": "# these examples can be run with a specific CEP (or leave blank for default value)\n\ncargo run --example get_address 20940040\n\ncargo run --example get_address_tokio 20940040\n\ncargo run --example standalone_services 20940040\n\n\n\n```\n\n\n\n### Note on the HTTP Client\n\n\n\nThis library uses [isahc](https://github.com/sagebind/isahc) as its http client because:\n\n1) It works in any async backend, and \n\n2) It offers a configuration option for Title-Case headers (necessary for CepLá)\n\n\n\n\n\n---\n\n\n\n## Todo\n\n\n\n- [x] Get Started\n\n- [x] Viacep service\n\n- [x] Correios service\n\n- [x] CepLá service\n\n- [x] Separate Two languages in README.md\n\n- [ ] Documentation\n\n- [x] Invest in better error handling\n\n- [x] Unhappy path testing\n\n- [ ] Validate input\n\n- [ ] Different compilation features\n\n- [ ] Abstractions: this will allow for mocking, and testing all paths without calls to the APIs\n\n- [ ] Allow user to implement custom services, and opt out of any of the defaults\n\n\n\n<!-- logo by [@nelsonsecco](https://twitter.com/nelsonsecco) -->\n", "file_path": "README.md", "rank": 51, "score": 9.062878645960495 }, { "content": " f: Fut,\n\n error_timeout: u64,\n\n mut tx: mpsc::Sender<Result<services::Address, Error>>,\n\n) where\n\n Fut: Future<Output = Result<Addr, Error>>,\n\n Addr: Addressable,\n\n{\n\n let addr = f.await;\n\n match addr {\n\n Ok(addr) => {\n\n tx.send(Ok(addr.to_address()))\n\n .await\n\n .map_err(|e| println!(\"{} with error: {}\", SEND_ERROR, e.to_string()))\n\n .ok();\n\n }\n\n Err(err) => {\n\n tx.send(Err(err))\n\n .await\n\n .map_err(|e| println!(\"{} with error: {}\", SEND_ERROR, e.to_string()))\n\n .ok();\n", "file_path": "src/lib.rs", "rank": 52, "score": 7.334537822392406 }, { "content": "}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Error {\n\n /// Source represents from what component the error came (core lib, or the respective services)\n\n pub source: Source,\n\n /// Kind indicates the error type\n\n pub kind: Kind,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum Kind {\n\n /// UnknownServerError represents unmapped server errors, with the received code\n\n UnknownServerError { code: u16 },\n\n /// ServerError represents status codes in the 5xx range\n\n ServerError { code: u16 },\n\n /// ServerError represents status codes in the 4xx range\n\n ClientError { code: u16 },\n\n /// BodyParsingError represents an error where the received body does not match with the expected schema\n\n BodyParsingError { error: String, body: String },\n", "file_path": "src/error.rs", "rank": 53, "score": 6.77081499343548 }, { "content": "### Nota sobre o HTTP Client\n\n\n\nEssa lib usa [isahc](https://github.com/sagebind/isahc) com seu cliente http porquê:\n\n1) Ela funciona em qualquer runtime async,\n\n2) Ela oferece opção de configuração para Title-Case headers (necessário para CepLá)\n\n\n\n\n\n---\n\n\n\n## Todo\n\n\n\n- [x] Get Started\n\n- [x] Viacep service\n\n- [x] Correios service\n\n- [x] CepLá service\n\n- [x] Separate Two languages in README.md\n\n- [ ] Documentation\n\n- [x] Invest in better error handling\n\n- [ ] Unhappy path testing\n\n- [ ] Validate input\n\n- [ ] Different compilation features\n\n- [ ] Abstractions: this will allow for mocking, and testing all paths without calls to the APIs\n\n- [ ] Allow user to implement custom services, and opt out of any of the defaults\n\n\n\n<!-- logo by [@nelsonsecco](https://twitter.com/nelsonsecco) -->\n", "file_path": "README-pt.md", "rank": 54, "score": 6.37643713741771 }, { "content": " task::sleep(Duration::from_secs(error_timeout)).await;\n\n }\n\n }\n\n}\n\n\n\n/// get_address runs concurrent calls to available services requesting the address related to the provided `cep`,\n\n/// and with a error_timeout in seconds in case some services fail.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `cep` - A str pointer slice that holds the Brazilian postal code.\n\n/// * `error_timeout` - Option<u64> timeout in seconds in case some services come to fail. It defaults to 2 if None is provided, and has a minimum value of 1.\n\n/// This prevents early failures from cancelling possible success results from other services.\n\n///\n\npub async fn get_address(cep: &str, error_timeout: Option<u64>) -> Result<Address, Error> {\n\n let error_timeout = match error_timeout {\n\n Some(cd) => match cd {\n\n cd if cd < 2 => 1,\n\n cd => cd,\n\n },\n", "file_path": "src/lib.rs", "rank": 55, "score": 5.690392934360398 }, { "content": " /// AllServicesReturnedErrors indicates that each one of the called services returned an error\n\n AllServicesReturnedErrors { e1: String, e2: String, e3: String },\n\n /// MissingBodyError indicates that the respose had a missing body\n\n MissingBodyError,\n\n /// InputError is unused at the momment, but is intended to represent an error with the input\n\n InputError,\n\n /// UnexpectedLibraryError represents an unkown error in the library code\n\n UnexpectedLibraryError,\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self.kind {\n\n Kind::UnknownServerError { code } => {\n\n write!(\n\n f,\n\n \"Received an unknown error from server with code {} from service {}.\",\n\n code, self.source\n\n )\n\n }\n", "file_path": "src/error.rs", "rank": 56, "score": 3.926550301623141 }, { "content": " \"Received a result without a body from service {}.\",\n\n self.source\n\n )\n\n }\n\n Kind::InputError => {\n\n write!(f, \"The CEP is malformatted. It should follow this templates: 12345-678 or 12345678\")\n\n }\n\n Kind::UnexpectedLibraryError => {\n\n write!(f,\"Received an unexpected error from the library from service {}. Please send an issue in GitHub.\", self.source)\n\n }\n\n Kind::AllServicesReturnedErrors { e1, e2, e3 } => {\n\n write!(\n\n f,\n\n \"All services returned an error. \\n: {}, \\n: {}, \\n: {}\",\n\n e1, e2, e3\n\n )\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl StdError for Error {}\n", "file_path": "src/error.rs", "rank": 57, "score": 3.44005149135182 }, { "content": "\n\n let recv_err = async_std::task::block_on(super::get_address(\"123\", None)).unwrap_err();\n\n assert!(variant_eq(&recv_err.kind, &err.kind));\n\n assert!(variant_eq(&recv_err.source, &err.source));\n\n }\n\n\n\n #[tokio::test]\n\n async fn all_services_error_tokio() {\n\n let err = error::Error {\n\n source: error::Source::LagoinhaLib,\n\n kind: error::Kind::AllServicesReturnedErrors {\n\n e1: \"\".to_owned(),\n\n e2: \"\".to_owned(),\n\n e3: \"\".to_owned(),\n\n },\n\n };\n\n\n\n let recv_err = super::get_address(\"123\", None).await.unwrap_err();\n\n assert!(variant_eq(&recv_err.kind, &err.kind));\n\n assert!(variant_eq(&recv_err.source, &err.source));\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 58, "score": 3.100016995614282 } ]
Rust
src/operation.rs
Tak-Iwamoto/rusty-gql
8244d844f827a524cd91ba815fe5d113f3499927
use std::{ collections::HashMap, ops::Deref, sync::{Arc, Mutex}, }; use graphql_parser::{ query::{Definition, Document, FragmentDefinition, SelectionSet, VariableDefinition}, schema::Directive, }; use crate::{error::GqlError, Variables}; #[derive(Debug)] pub struct OperationInner<'a> { pub operation_type: OperationType, pub directives: Vec<Directive<'a, String>>, pub variable_definitions: Vec<VariableDefinition<'a, String>>, pub selection_set: SelectionSet<'a, String>, pub fragment_definitions: HashMap<String, FragmentDefinition<'a, String>>, pub errors: Mutex<Vec<GqlError>>, pub variables: Variables, } #[derive(Debug)] pub struct Operation<'a>(Arc<OperationInner<'a>>); impl<'a> Operation<'a> { pub fn new(operation: OperationInner<'a>) -> Operation<'a> { Operation(Arc::new(operation)) } } impl<'a> Deref for Operation<'a> { type Target = OperationInner<'a>; fn deref(&self) -> &Self::Target { &self.0 } } #[derive(Clone, Debug)] struct OperationDefinition<'a> { operation_type: OperationType, directives: Vec<Directive<'a, String>>, variable_definitions: Vec<VariableDefinition<'a, String>>, selection_set: SelectionSet<'a, String>, } #[derive(Clone, Debug)] pub enum OperationType { Query, Mutation, Subscription, } impl ToString for OperationType { fn to_string(&self) -> String { match self { OperationType::Query => String::from("Query"), OperationType::Mutation => String::from("Mutation"), OperationType::Subscription => String::from("Subscription"), } } } pub fn get_operation_definitions<'a>( doc: &'a Document<'a, String>, ) -> Vec<&'a graphql_parser::query::Definition<'a, String>> { doc.definitions .iter() .filter(|def| matches!(def, Definition::Operation(_))) .collect::<Vec<_>>() } pub fn build_operation<'a>( doc: &'a Document<'a, String>, operation_name: Option<String>, variables: Variables, ) -> Result<Operation<'a>, GqlError> { let mut fragment_definitions = HashMap::new(); for def in &doc.definitions { if let Definition::Fragment(fragment) = def { let name = fragment.name.to_string(); fragment_definitions.insert(name, fragment.to_owned()); } } if operation_name.is_none() && get_operation_definitions(doc).len() > 1 { return Err(GqlError::new( "Must provide operation name if multiple operation exist", None, )); }; let mut operation_definitions: HashMap<String, OperationDefinition> = HashMap::new(); let no_name_key = "no_operation_name"; for definition in doc.clone().definitions { if let Definition::Operation(operation) = definition { match operation { graphql_parser::query::OperationDefinition::SelectionSet(selection_set) => { operation_definitions.insert( no_name_key.to_string(), OperationDefinition { operation_type: OperationType::Query, selection_set, directives: vec![], variable_definitions: vec![], }, ); } graphql_parser::query::OperationDefinition::Query(query) => { let query_name = query.name.unwrap_or_else(|| no_name_key.to_string()); operation_definitions.insert( query_name, OperationDefinition { operation_type: OperationType::Query, selection_set: query.selection_set, directives: query.directives, variable_definitions: query.variable_definitions, }, ); } graphql_parser::query::OperationDefinition::Mutation(mutation) => { let mutation_name = mutation.name.unwrap_or_else(|| no_name_key.to_string()); operation_definitions.insert( mutation_name, OperationDefinition { operation_type: OperationType::Mutation, selection_set: mutation.selection_set, directives: mutation.directives, variable_definitions: mutation.variable_definitions, }, ); } graphql_parser::query::OperationDefinition::Subscription(subscription) => { let subscription_name = subscription.name.unwrap_or_else(|| no_name_key.to_string()); operation_definitions.insert( subscription_name, OperationDefinition { operation_type: OperationType::Subscription, selection_set: subscription.selection_set, directives: subscription.directives, variable_definitions: subscription.variable_definitions, }, ); } } } } match operation_name { Some(name) => { let target_def = operation_definitions.get(name.as_str()); match target_def { Some(definition) => { let definition = definition.clone(); Ok(Operation(Arc::new(OperationInner { operation_type: definition.operation_type, fragment_definitions, directives: definition.directives, variable_definitions: definition.variable_definitions, selection_set: definition.selection_set, errors: Default::default(), variables, }))) } None => Err(GqlError::new( format!("operationName: {} is not contained in query", name), None, )), } } None => match operation_definitions.get(&no_name_key.to_string()) { Some(definition) => { let definition = definition.clone(); Ok(Operation(Arc::new(OperationInner { operation_type: definition.operation_type, fragment_definitions, directives: definition.directives, variable_definitions: definition.variable_definitions, selection_set: definition.selection_set, errors: Default::default(), variables, }))) } None => match operation_definitions.values().next() { Some(definition) => { let definition = definition.clone(); Ok(Operation(Arc::new(OperationInner { operation_type: definition.operation_type, fragment_definitions, directives: definition.directives, variable_definitions: definition.variable_definitions, selection_set: definition.selection_set, errors: Default::default(), variables, }))) } None => Err(GqlError::new("operation does not exist", None)), }, }, } } #[cfg(test)] mod tests { use crate::operation::build_operation; #[test] fn build_single_operation() { let parsed_query = graphql_parser::parse_query::<String>(r#"query GetPerson { persons { name age } }"#) .unwrap(); let operation = build_operation(&parsed_query, None, Default::default()); assert!(operation.is_ok()); assert_eq!(operation.unwrap().operation_type.to_string(), "Query"); } #[test] fn build_multiple_operation() { let parsed_query = graphql_parser::parse_query::<String>( r#"query GetPerson { persons { name age } } query GetPet { pets { name kind } }"#, ) .unwrap(); let operation = build_operation( &parsed_query, Some("GetPerson".to_string()), Default::default(), ); assert!(operation.is_ok()); assert_eq!(operation.unwrap().operation_type.to_string(), "Query"); } #[test] fn fails_build_multiple_operation_without_operation_name() { let parsed_query = graphql_parser::parse_query::<String>( r#"query GetPerson { persons { name age } } query GetPet { pets { name kind } }"#, ) .unwrap(); let operation = build_operation(&parsed_query, None, Default::default()); assert!(operation.is_err()); } }
use std::{ collections::HashMap, ops::Deref, sync::{Arc, Mutex}, }; use graphql_parser::{ query::{Definition, Document, FragmentDefinition, SelectionSet, VariableDefinition}, schema::Directive, }; use crate::{error::GqlError, Variables}; #[derive(Debug)] pub struct OperationInner<'a> { pub operation_type: OperationType, pub directives: Vec<Directive<'a, String>>, pub variable_definitions: Vec<VariableDefinition<'a, String>>, pub selection_set: SelectionSet<'a, String>, pub fragment_definitions: HashMap<String, FragmentDefinition<'a, String>>, pub errors: Mutex<Vec<GqlError>>, pub variables: Variables, } #[derive(Debug)] pub struct Operation<'a>(Arc<OperationInner<'a>>); impl<'a> Operation<'a> { pub fn new(operation: OperationInner<'a>) -> Operation<'a> { Operation(Arc::new(operation)) } } impl<'a> Deref for Operation<'a> { type Target = OperationInner<'a>; fn deref(&self) -> &Self::Target { &self.0 } } #[derive(Clone, Debug)] struct OperationDefinition<'a> { operation_type: OperationType, directives: Vec<Directive<'a, String>>, variable_definitions: Vec<VariableDefinition<'a, String>>, selection_set: SelectionSet<'a, String>, } #[derive(Clone, Debug)] pub enum OperationType { Query, Mutation, Subscription, } impl ToString for OperationType { fn to_string(&self) -> String {
} } pub fn get_operation_definitions<'a>( doc: &'a Document<'a, String>, ) -> Vec<&'a graphql_parser::query::Definition<'a, String>> { doc.definitions .iter() .filter(|def| matches!(def, Definition::Operation(_))) .collect::<Vec<_>>() } pub fn build_operation<'a>( doc: &'a Document<'a, String>, operation_name: Option<String>, variables: Variables, ) -> Result<Operation<'a>, GqlError> { let mut fragment_definitions = HashMap::new(); for def in &doc.definitions { if let Definition::Fragment(fragment) = def { let name = fragment.name.to_string(); fragment_definitions.insert(name, fragment.to_owned()); } } if operation_name.is_none() && get_operation_definitions(doc).len() > 1 { return Err(GqlError::new( "Must provide operation name if multiple operation exist", None, )); }; let mut operation_definitions: HashMap<String, OperationDefinition> = HashMap::new(); let no_name_key = "no_operation_name"; for definition in doc.clone().definitions { if let Definition::Operation(operation) = definition { match operation { graphql_parser::query::OperationDefinition::SelectionSet(selection_set) => { operation_definitions.insert( no_name_key.to_string(), OperationDefinition { operation_type: OperationType::Query, selection_set, directives: vec![], variable_definitions: vec![], }, ); } graphql_parser::query::OperationDefinition::Query(query) => { let query_name = query.name.unwrap_or_else(|| no_name_key.to_string()); operation_definitions.insert( query_name, OperationDefinition { operation_type: OperationType::Query, selection_set: query.selection_set, directives: query.directives, variable_definitions: query.variable_definitions, }, ); } graphql_parser::query::OperationDefinition::Mutation(mutation) => { let mutation_name = mutation.name.unwrap_or_else(|| no_name_key.to_string()); operation_definitions.insert( mutation_name, OperationDefinition { operation_type: OperationType::Mutation, selection_set: mutation.selection_set, directives: mutation.directives, variable_definitions: mutation.variable_definitions, }, ); } graphql_parser::query::OperationDefinition::Subscription(subscription) => { let subscription_name = subscription.name.unwrap_or_else(|| no_name_key.to_string()); operation_definitions.insert( subscription_name, OperationDefinition { operation_type: OperationType::Subscription, selection_set: subscription.selection_set, directives: subscription.directives, variable_definitions: subscription.variable_definitions, }, ); } } } } match operation_name { Some(name) => { let target_def = operation_definitions.get(name.as_str()); match target_def { Some(definition) => { let definition = definition.clone(); Ok(Operation(Arc::new(OperationInner { operation_type: definition.operation_type, fragment_definitions, directives: definition.directives, variable_definitions: definition.variable_definitions, selection_set: definition.selection_set, errors: Default::default(), variables, }))) } None => Err(GqlError::new( format!("operationName: {} is not contained in query", name), None, )), } } None => match operation_definitions.get(&no_name_key.to_string()) { Some(definition) => { let definition = definition.clone(); Ok(Operation(Arc::new(OperationInner { operation_type: definition.operation_type, fragment_definitions, directives: definition.directives, variable_definitions: definition.variable_definitions, selection_set: definition.selection_set, errors: Default::default(), variables, }))) } None => match operation_definitions.values().next() { Some(definition) => { let definition = definition.clone(); Ok(Operation(Arc::new(OperationInner { operation_type: definition.operation_type, fragment_definitions, directives: definition.directives, variable_definitions: definition.variable_definitions, selection_set: definition.selection_set, errors: Default::default(), variables, }))) } None => Err(GqlError::new("operation does not exist", None)), }, }, } } #[cfg(test)] mod tests { use crate::operation::build_operation; #[test] fn build_single_operation() { let parsed_query = graphql_parser::parse_query::<String>(r#"query GetPerson { persons { name age } }"#) .unwrap(); let operation = build_operation(&parsed_query, None, Default::default()); assert!(operation.is_ok()); assert_eq!(operation.unwrap().operation_type.to_string(), "Query"); } #[test] fn build_multiple_operation() { let parsed_query = graphql_parser::parse_query::<String>( r#"query GetPerson { persons { name age } } query GetPet { pets { name kind } }"#, ) .unwrap(); let operation = build_operation( &parsed_query, Some("GetPerson".to_string()), Default::default(), ); assert!(operation.is_ok()); assert_eq!(operation.unwrap().operation_type.to_string(), "Query"); } #[test] fn fails_build_multiple_operation_without_operation_name() { let parsed_query = graphql_parser::parse_query::<String>( r#"query GetPerson { persons { name age } } query GetPet { pets { name kind } }"#, ) .unwrap(); let operation = build_operation(&parsed_query, None, Default::default()); assert!(operation.is_err()); } }
match self { OperationType::Query => String::from("Query"), OperationType::Mutation => String::from("Mutation"), OperationType::Subscription => String::from("Subscription"), }
if_condition
[ { "content": "pub fn get_type_name(ty: &Type<'_, String>) -> String {\n\n match ty {\n\n Type::NamedType(named_type) => named_type.to_string(),\n\n Type::ListType(list) => get_type_name(list),\n\n Type::NonNullType(non_null) => get_type_name(non_null),\n\n }\n\n}\n\n\n", "file_path": "src/validation/utils.rs", "rank": 0, "score": 244683.5712404701 }, { "content": "pub fn generate_enum(derive_input: &DeriveInput) -> Result<TokenStream, syn::Error> {\n\n let self_ty = &derive_input.ident;\n\n let crate_name = quote! { rusty_gql };\n\n\n\n let type_name = self_ty.unraw().to_string();\n\n\n\n let (impl_generics, _, where_clause) = &derive_input.generics.split_for_impl();\n\n\n\n let enum_data = match &derive_input.data {\n\n syn::Data::Enum(v) => v,\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n &derive_input.ident,\n\n \"Enum type must be enum rust type\",\n\n ));\n\n }\n\n };\n\n\n\n let mut resolve_fields = Vec::new();\n\n let mut resolve_selection_sets = Vec::new();\n", "file_path": "macro/src/enum_type.rs", "rank": 1, "score": 221670.567296295 }, { "content": "pub fn read_schemas(dir: &Path) -> std::io::Result<Vec<String>> {\n\n let mut schemas = Vec::new();\n\n if dir.is_dir() {\n\n for entry in std::fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_dir() {\n\n read_schemas(&path)?;\n\n } else {\n\n let content = std::fs::read_to_string(path)?;\n\n schemas.push(content);\n\n }\n\n }\n\n }\n\n Ok(schemas)\n\n}\n", "file_path": "src/lib.rs", "rank": 2, "score": 208292.18367103388 }, { "content": "fn new_file_content(enum_def: &EnumType) -> String {\n\n let mut scope = Scope::new();\n\n let enum_scope = scope.new_enum(&enum_def.name).vis(\"pub\");\n\n enum_scope.derive(\"GqlEnum\");\n\n enum_scope.derive(\"Copy\");\n\n enum_scope.derive(\"Clone\");\n\n enum_scope.derive(\"Eq\");\n\n enum_scope.derive(\"PartialEq\");\n\n\n\n for value in &enum_def.values {\n\n enum_scope.new_variant(&value.name);\n\n }\n\n\n\n format!(\"{}\\n\\n{}\", use_gql_definitions(), scope.to_string())\n\n}\n\n\n", "file_path": "cli/src/code_generate/type_definition/enum_file.rs", "rank": 3, "score": 194444.15888096252 }, { "content": "pub fn build_enum_value_introspection(value: &EnumTypeValue) -> __EnumValue {\n\n __EnumValue {\n\n detail: value.clone(),\n\n }\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[GqlType(internal)]\n\nimpl __EnumValue {\n\n async fn name(&self) -> &str {\n\n self.detail.name.as_str()\n\n }\n\n\n\n async fn description(&self) -> Option<&str> {\n\n self.detail.description.as_deref()\n\n }\n\n\n\n async fn isDeprecated(&self) -> bool {\n\n self.detail.is_deprecated()\n\n }\n\n}\n", "file_path": "src/types/introspection/enum_value.rs", "rank": 4, "score": 194161.27681892287 }, { "content": "fn dir_location_str(location: &DirectiveLocation) -> String {\n\n match location {\n\n DirectiveLocation::Query => \"QUERY\".to_string(),\n\n DirectiveLocation::Mutation => \"MUTATION\".to_string(),\n\n DirectiveLocation::Subscription => \"SUBSCRIPTION\".to_string(),\n\n DirectiveLocation::Field => \"FIELD\".to_string(),\n\n DirectiveLocation::FragmentDefinition => \"FRAGMENT_DEFINITION\".to_string(),\n\n DirectiveLocation::FragmentSpread => \"FRAGMENT_SPREAD\".to_string(),\n\n DirectiveLocation::InlineFragment => \"INLINE_FRAGMENT\".to_string(),\n\n DirectiveLocation::Schema => \"SCHEMA\".to_string(),\n\n DirectiveLocation::Scalar => \"SCALAR\".to_string(),\n\n DirectiveLocation::Object => \"OBJECT\".to_string(),\n\n DirectiveLocation::FieldDefinition => \"FIELD_DEFINITION\".to_string(),\n\n DirectiveLocation::ArgumentDefinition => \"ARGUMENT_DEFINITION\".to_string(),\n\n DirectiveLocation::Interface => \"INTERFACE\".to_string(),\n\n DirectiveLocation::Union => \"UNION\".to_string(),\n\n DirectiveLocation::Enum => \"ENUM\".to_string(),\n\n DirectiveLocation::EnumValue => \"ENUM_VALUE\".to_string(),\n\n DirectiveLocation::InputObject => \"INPUT_OBJECT\".to_string(),\n\n DirectiveLocation::InputFieldDefinition => \"INPUT_FIELD_DEFINITION\".to_string(),\n", "file_path": "src/types/introspection/directive.rs", "rank": 5, "score": 192197.89509898392 }, { "content": "pub fn playground_html(endpoint: &str, subscription_endpoint: Option<&str>) -> String {\n\n r#\"\n\n <html>\n\n <head>\n\n <title>rusty gql</title>\n\n <link href=\"https://unpkg.com/graphiql/graphiql.min.css\" rel=\"stylesheet\" />\n\n </head>\n\n <body style=\"margin: 0;\">\n\n <div id=\"graphiql\" style=\"height: 100vh;\"></div>\n\n\n\n <script src=\"//unpkg.com/[email protected]/browser/client.js\"></script>\n\n <script src=\"//unpkg.com/[email protected]/browser/client.js\"></script>\n\n <script\n\n crossorigin\n\n src=\"https://unpkg.com/react/umd/react.production.min.js\"\n\n ></script>\n\n <script\n\n crossorigin\n\n src=\"https://unpkg.com/react-dom/umd/react-dom.production.min.js\"\n\n ></script>\n", "file_path": "src/playground_html.rs", "rank": 6, "score": 187608.85603938095 }, { "content": "pub fn referenced_variables<'a>(value: &'a Value<'a, String>) -> Vec<&'a str> {\n\n let mut vars = Vec::new();\n\n referenced_variables_to_vec(value, &mut vars);\n\n vars\n\n}\n\n\n", "file_path": "src/validation/utils.rs", "rank": 7, "score": 186990.7804071089 }, { "content": "fn sync_file(file_src: &str, enum_def: &EnumType) -> String {\n\n let syntax = syn::parse_file(file_src).expect(\"Failed to parse a enum file\");\n\n\n\n let mut variants = Vec::new();\n\n let mut use_items = Vec::new();\n\n let mut other_items = Vec::new();\n\n let enum_name: TokenStream = enum_def.name.parse().unwrap();\n\n let mut attributes: TokenStream = Default::default();\n\n\n\n for item in &syntax.items {\n\n if let syn::Item::Enum(enum_item) = item {\n\n let attrs = &enum_item.attrs;\n\n attributes = quote! {#(#attrs)*};\n\n let ident = &enum_item.ident;\n\n let enum_ident = ident.unraw().to_string();\n\n\n\n if enum_ident.eq(&enum_def.name) {\n\n let mut visited = HashSet::new();\n\n\n\n for variant in &enum_item.variants {\n", "file_path": "cli/src/code_generate/type_definition/enum_file.rs", "rank": 8, "score": 185320.87654488874 }, { "content": "pub fn is_result_type(return_type: &Type) -> bool {\n\n if let Type::Path(ty_path) = return_type {\n\n if ty_path.path.segments.last().unwrap().ident == \"Result\" {\n\n return true;\n\n }\n\n }\n\n false\n\n}\n", "file_path": "macro/src/utils.rs", "rank": 9, "score": 183758.56156410812 }, { "content": "pub fn gql_value_ty_to_rust_ty(gql_value: &GqlValueType) -> String {\n\n value_ty_to_str(gql_value, true)\n\n}\n\n\n", "file_path": "cli/src/code_generate/util.rs", "rank": 10, "score": 183336.39112483297 }, { "content": "type ContainerType = Container<Query, Mutation, EmptySubscription>;\n\n\n\nasync fn gql_handler(container: Extension<ContainerType>, req: GqlRequest) -> GqlResponse {\n\n let result = execute(&container, req.0).await;\n\n GqlResponse::from(result)\n\n}\n\n\n\nasync fn gql_playground() -> impl IntoResponse {\n\n response::Html(playground_html(\"/\", None))\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n let schema_docs = read_schemas(Path::new(\"./examples/axum/schemas\")).unwrap();\n\n let schema_docs: Vec<&str> = schema_docs.iter().map(|s| &**s).collect();\n\n\n\n let container = Container::new(\n\n schema_docs.as_slice(),\n\n Query,\n\n Mutation,\n", "file_path": "examples/axum/src/main.rs", "rank": 11, "score": 175279.959873348 }, { "content": "struct SerializeStructVariant(String, BTreeMap<String, GqlValue>);\n\n\n\nimpl ser::SerializeStructVariant for SerializeStructVariant {\n\n type Ok = GqlValue;\n\n type Error = SerializerError;\n\n\n\n #[inline]\n\n fn serialize_field<T: ?Sized>(\n\n &mut self,\n\n key: &'static str,\n\n value: &T,\n\n ) -> Result<(), Self::Error>\n\n where\n\n T: ser::Serialize,\n\n {\n\n let value = value.serialize(Serializer)?;\n\n self.1.insert(key.to_string(), value);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n\n let mut map = BTreeMap::new();\n\n map.insert(self.0, GqlValue::Object(self.1));\n\n Ok(GqlValue::Object(map))\n\n }\n\n}\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 12, "score": 174166.00906146204 }, { "content": "pub fn schema_content(path: &str) -> String {\n\n std::fs::read_to_string(path).unwrap()\n\n}\n\n\n", "file_path": "src/test_utils.rs", "rank": 13, "score": 172131.665775343 }, { "content": "fn cannot_serialize_except_string() -> SerializerError {\n\n SerializerError(\"key must be a string\".to_string())\n\n}\n", "file_path": "src/types/value/serializer.rs", "rank": 14, "score": 166682.97538559465 }, { "content": "pub fn build_schema(\n\n schema_documents: &[&str],\n\n custom_directives: HashMap<&'static str, Box<dyn CustomDirective>>,\n\n) -> Result<Schema, GqlError> {\n\n let mut queries = HashMap::new();\n\n let mut mutations = HashMap::new();\n\n let mut subscriptions = HashMap::new();\n\n let mut type_definitions = HashMap::new();\n\n let mut directives = HashMap::new();\n\n let mut extensions = Vec::new();\n\n let mut schema_definition = None;\n\n let mut interfaces = HashMap::new();\n\n\n\n type_definitions.insert(\n\n \"String\".to_string(),\n\n TypeDefinition::Scalar(ScalarType::string_scalar()),\n\n );\n\n type_definitions.insert(\n\n \"Int\".to_string(),\n\n TypeDefinition::Scalar(ScalarType::int_scalar()),\n", "file_path": "src/types/schema.rs", "rank": 15, "score": 163637.27699561248 }, { "content": "pub fn generate_type(\n\n item_impl: &mut ItemImpl,\n\n args: &[NestedMeta],\n\n) -> Result<TokenStream, syn::Error> {\n\n let self_ty = &item_impl.self_ty;\n\n let crate_name = if is_internal(args) {\n\n quote! { crate }\n\n } else {\n\n quote! { rusty_gql }\n\n };\n\n\n\n let type_name = match self_ty.as_ref() {\n\n syn::Type::Path(path) => path.path.segments.last().unwrap().ident.unraw().to_string(),\n\n _ => {\n\n return Err(syn::Error::new_spanned(&self_ty, \"Invalid struct\"));\n\n }\n\n };\n\n\n\n let (impl_generics, _, where_clause) = &item_impl.generics.split_for_impl();\n\n\n", "file_path": "macro/src/ty.rs", "rank": 16, "score": 163637.2769956125 }, { "content": "struct SerializeStruct(BTreeMap<String, GqlValue>);\n\n\n\nimpl ser::SerializeStruct for SerializeStruct {\n\n type Ok = GqlValue;\n\n type Error = SerializerError;\n\n\n\n #[inline]\n\n fn serialize_field<T: ?Sized>(\n\n &mut self,\n\n key: &'static str,\n\n value: &T,\n\n ) -> Result<(), Self::Error>\n\n where\n\n T: ser::Serialize,\n\n {\n\n let value = value.serialize(Serializer)?;\n\n self.0.insert(key.to_string(), value);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n\n Ok(GqlValue::Object(self.0))\n\n }\n\n}\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 17, "score": 162772.40125275115 }, { "content": "pub fn is_context_type(arg: &FnArg) -> bool {\n\n let mut is_context = false;\n\n if let FnArg::Typed(pat) = arg {\n\n if let Type::Reference(TypeReference { elem, .. }) = &*pat.ty {\n\n if let Type::Path(path) = elem.as_ref() {\n\n is_context = path.path.segments.last().unwrap().ident == \"Context\";\n\n }\n\n }\n\n }\n\n is_context\n\n}\n\n\n", "file_path": "macro/src/utils.rs", "rank": 18, "score": 155112.1590099868 }, { "content": "struct SerializeTupleVariant(String, Vec<GqlValue>);\n\n\n\nimpl ser::SerializeTupleVariant for SerializeTupleVariant {\n\n type Ok = GqlValue;\n\n type Error = SerializerError;\n\n\n\n #[inline]\n\n fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>\n\n where\n\n T: ser::Serialize,\n\n {\n\n let value = value.serialize(Serializer)?;\n\n self.1.push(value);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n\n let mut map = BTreeMap::new();\n\n map.insert(self.0, GqlValue::List(self.1));\n\n Ok(GqlValue::Object(map))\n\n }\n\n}\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 19, "score": 154253.0096421395 }, { "content": "pub fn is_default_item_use(item_use: &ItemUse) -> bool {\n\n if let syn::UseTree::Path(use_path) = &item_use.tree {\n\n let ident = use_path.ident.unraw().to_string();\n\n if ident.eq(\"rusty_gql\") {\n\n return true;\n\n }\n\n\n\n if ident.eq(\"crate\") {\n\n if let syn::UseTree::Path(child_path) = &*use_path.tree {\n\n let ident = child_path.ident.unraw().to_string();\n\n if ident.eq(\"graphql\") {\n\n return true;\n\n }\n\n }\n\n }\n\n }\n\n false\n\n}\n", "file_path": "cli/src/code_generate/util.rs", "rank": 20, "score": 154145.2069250526 }, { "content": "pub fn get_operation_def_position(operation_definition: &OperationDefinition<'_, String>) -> Pos {\n\n match operation_definition {\n\n OperationDefinition::SelectionSet(selection_set) => selection_set.span.0,\n\n OperationDefinition::Query(query) => query.position,\n\n OperationDefinition::Mutation(mutation) => mutation.position,\n\n OperationDefinition::Subscription(subscription) => subscription.position,\n\n }\n\n}\n\n\n", "file_path": "src/validation/utils.rs", "rank": 21, "score": 153839.72908368232 }, { "content": "pub fn get_interface_impl_object_map(\n\n type_definitions: &HashMap<String, TypeDefinition>,\n\n) -> HashMap<String, Vec<String>> {\n\n let mut map = HashMap::new();\n\n\n\n for ty_def in type_definitions.values() {\n\n if let TypeDefinition::Object(obj) = ty_def {\n\n for interface_name in &obj.implements_interfaces {\n\n map.entry(interface_name.to_string())\n\n .or_insert_with(Vec::new)\n\n .push(obj.name.to_string());\n\n }\n\n }\n\n }\n\n map\n\n}\n\n\n", "file_path": "cli/src/code_generate/util.rs", "rank": 22, "score": 152157.7704781475 }, { "content": "fn gql_file_types() -> Vec<String> {\n\n vec![\n\n \"query\".to_string(),\n\n \"mutation\".to_string(),\n\n \"resolver\".to_string(),\n\n \"directive\".to_string(),\n\n \"scalar\".to_string(),\n\n \"input\".to_string(),\n\n ]\n\n}\n\nasync fn create_root_mod_file(path: &str) -> tokio::io::Result<()> {\n\n let filenames = gql_file_types();\n\n create_file(RootModFile { path, filenames }).await\n\n}\n\n\n\nasync fn create_root_dirs(path: &str) -> Result<Vec<()>, Error> {\n\n let mut futures = Vec::new();\n\n for name in gql_file_types() {\n\n futures.push(tokio::fs::create_dir_all(format!(\"{}/{}\", path, name)));\n\n }\n\n try_join_all(futures).await\n\n}\n\n\n\npub(crate) fn use_gql_definitions() -> &'static str {\n\n r#\"#![allow(warnings, unused)]\n\nuse crate::graphql::*;\n\nuse rusty_gql::*;\"#\n\n}\n", "file_path": "cli/src/code_generate/mod.rs", "rank": 23, "score": 151054.36574953556 }, { "content": "pub fn generate_interface(derive_input: &DeriveInput) -> Result<TokenStream, syn::Error> {\n\n let self_ty = &derive_input.ident;\n\n let crate_name = quote! { rusty_gql };\n\n\n\n let (impl_generics, _, where_clause) = &derive_input.generics.split_for_impl();\n\n\n\n let union_data = match &derive_input.data {\n\n syn::Data::Enum(v) => v,\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n &derive_input.ident,\n\n \"Union type must be enum rust type\",\n\n ));\n\n }\n\n };\n\n\n\n let mut introspection_type_names = Vec::new();\n\n let mut collect_all_fields = Vec::new();\n\n\n\n for variant in &union_data.variants {\n", "file_path": "macro/src/interface.rs", "rank": 24, "score": 150534.80017994402 }, { "content": "pub fn generate_union(derive_input: &DeriveInput) -> Result<TokenStream, syn::Error> {\n\n let self_ty = &derive_input.ident;\n\n let crate_name = quote! { rusty_gql };\n\n\n\n let type_name = self_ty.unraw().to_string();\n\n\n\n let (impl_generics, _, where_clause) = &derive_input.generics.split_for_impl();\n\n\n\n let union_data = match &derive_input.data {\n\n syn::Data::Enum(v) => v,\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n &derive_input.ident,\n\n \"Union type must be enum rust type\",\n\n ));\n\n }\n\n };\n\n\n\n let mut introspection_type_names = Vec::new();\n\n let mut collect_all_fields = Vec::new();\n", "file_path": "macro/src/union.rs", "rank": 25, "score": 150534.80017994402 }, { "content": "pub fn generate_scalar(derive_input: &DeriveInput) -> Result<TokenStream, syn::Error> {\n\n let self_ty = &derive_input.ident;\n\n let crate_name = quote! { rusty_gql };\n\n\n\n let type_name = self_ty.unraw().to_string();\n\n\n\n let (impl_generics, _, where_clause) = &derive_input.generics.split_for_impl();\n\n\n\n let expanded = quote! {\n\n #[#crate_name::async_trait::async_trait]\n\n impl #impl_generics #crate_name::FieldResolver for #self_ty #where_clause {\n\n async fn resolve_field(&self, ctx: &#crate_name::Context<'_>) -> #crate_name::ResolverResult<::std::option::Option<#crate_name::GqlValue>> {\n\n Ok(Some(self.to_gql_value()))\n\n }\n\n fn type_name() -> String {\n\n #type_name.to_string()\n\n }\n\n }\n\n\n\n impl #impl_generics #crate_name::CollectFields for #self_ty #where_clause {}\n", "file_path": "macro/src/scalar.rs", "rank": 26, "score": 150534.80017994402 }, { "content": "pub fn serialize_into_gql_value<T: ser::Serialize>(value: T) -> Result<GqlValue, SerializerError> {\n\n value.serialize(Serializer)\n\n}\n\n\n\nimpl ser::Serializer for Serializer {\n\n type Ok = GqlValue;\n\n\n\n type Error = SerializerError;\n\n\n\n type SerializeSeq = SerializeSeq;\n\n\n\n type SerializeTuple = SerializeTuple;\n\n\n\n type SerializeTupleStruct = SerializeTupleStruct;\n\n\n\n type SerializeTupleVariant = SerializeTupleVariant;\n\n\n\n type SerializeMap = SerializeMap;\n\n\n\n type SerializeStruct = SerializeStruct;\n", "file_path": "src/types/value/serializer.rs", "rank": 27, "score": 149864.2607392788 }, { "content": "pub fn build_input_value_introspection<'a>(\n\n schema: &'a Schema,\n\n value: &'a InputValueType,\n\n) -> __InputValue<'a> {\n\n __InputValue {\n\n schema,\n\n detail: value.clone(),\n\n }\n\n}\n\n\n\nimpl<'a> __InputValue<'a> {\n\n async fn name(&self) -> &str {\n\n self.detail.name.as_str()\n\n }\n\n\n\n async fn description(&self) -> Option<&str> {\n\n self.detail.description.as_deref()\n\n }\n\n\n\n async fn ty(&'a self) -> __Type<'a> {\n", "file_path": "src/types/introspection/input_value.rs", "rank": 28, "score": 149471.79485342896 }, { "content": "pub fn is_introspection_type_names(type_name: &str) -> bool {\n\n vec![\n\n \"__Directive\",\n\n \"__DirectiveLocation\",\n\n \"__EnumValue\",\n\n \"__Field\",\n\n \"__InputValue\",\n\n \"__Schema\",\n\n \"__Type\",\n\n \"__TypeKind\",\n\n ]\n\n .contains(&type_name)\n\n}\n\n\n", "file_path": "cli/src/code_generate/util.rs", "rank": 29, "score": 147852.71135480434 }, { "content": "pub fn introspection_sdl() -> &'static str {\n\n r#\"\n\n type __Schema {\n\n types: [__Type!]!\n\n queryType: __Type!\n\n mutationType: __Type\n\n subscriptionType: __Type\n\n directives: [__Directive!]!\n\n }\n\n\n\n type __Type {\n\n kind: __TypeKind!\n\n name: String\n\n description: String\n\n\n\n # OBJECT and INTERFACE only\n\n fields(includeDeprecated: Boolean = false): [__Field!]\n\n\n\n # OBJECT only\n\n interfaces: [__Type!]\n", "file_path": "src/types/introspection/introspection_sdl.rs", "rank": 30, "score": 146910.45157758452 }, { "content": "pub fn generate_input_object(derive_input: &DeriveInput) -> Result<TokenStream, syn::Error> {\n\n let self_ty = &derive_input.ident;\n\n let crate_name = quote! { rusty_gql };\n\n\n\n let type_name = self_ty.unraw().to_string();\n\n\n\n let (impl_generics, _, where_clause) = &derive_input.generics.split_for_impl();\n\n\n\n let struct_data = match &derive_input.data {\n\n syn::Data::Struct(v) => v,\n\n _ => {\n\n return Err(syn::Error::new_spanned(\n\n &derive_input.ident,\n\n \"Input Object type must be struct type\",\n\n ));\n\n }\n\n };\n\n\n\n let mut fields = Vec::new();\n\n let mut get_fields = Vec::new();\n", "file_path": "macro/src/input_object.rs", "rank": 31, "score": 146557.66635121842 }, { "content": "fn new_file_content(union_def: &UnionType) -> String {\n\n let mut scope = Scope::new();\n\n let union_scope = scope.new_enum(&union_def.name).vis(\"pub\");\n\n union_scope.derive(\"GqlUnion\");\n\n\n\n for value in &union_def.types {\n\n union_scope.new_variant(value).tuple(value);\n\n }\n\n\n\n format!(\"{}\\n\\n{}\", use_gql_definitions(), scope.to_string())\n\n}\n\n\n", "file_path": "cli/src/code_generate/type_definition/union_file.rs", "rank": 32, "score": 145588.16491813838 }, { "content": "fn new_file_content(scalar_def: &ScalarType) -> String {\n\n let mut scope = Scope::new();\n\n let struct_name = &scalar_def.name;\n\n let scalar_scope = scope.new_struct(struct_name).vis(\"pub\");\n\n scalar_scope.derive(\"GqlScalar\");\n\n\n\n let scalar_impl = scope.new_impl(struct_name);\n\n scalar_impl.impl_trait(\"GqlInputType\");\n\n let from_gql_value_fn = scalar_impl.new_fn(\"from_gql_value\");\n\n from_gql_value_fn.arg(\"value\", \"Option<GqlValue>\");\n\n from_gql_value_fn.ret(\"Result<Self, String>\");\n\n from_gql_value_fn.line(\"todo!()\");\n\n\n\n let to_gql_value_fn = scalar_impl.new_fn(\"to_gql_value\");\n\n to_gql_value_fn.arg_ref_self();\n\n to_gql_value_fn.ret(\"GqlValue\");\n\n to_gql_value_fn.line(\"todo!()\");\n\n\n\n format!(\"{}\\n\\n{}\", use_gql_definitions(), scope.to_string())\n\n}\n", "file_path": "cli/src/code_generate/type_definition/scalar_file.rs", "rank": 33, "score": 145588.16491813838 }, { "content": "fn new_file_content(object_def: &ObjectType) -> String {\n\n let mut struct_scope_base = Scope::new();\n\n let struct_name = &object_def.name;\n\n let struct_scope = struct_scope_base\n\n .new_struct(&struct_name.to_string())\n\n .vis(\"pub\");\n\n struct_scope.derive(\"Clone\");\n\n\n\n let mut impl_scope = Scope::new();\n\n let struct_imp = impl_scope.new_impl(&struct_name.to_string());\n\n struct_imp.r#macro(\"#[GqlType]\");\n\n\n\n for field in &object_def.fields {\n\n let field_name = &field.name;\n\n let return_ty = gql_value_ty_to_rust_ty(&field.meta_type);\n\n if is_return_primitive_ty(field) {\n\n struct_scope.field(format!(\"pub {}\", &field_name).as_str(), &return_ty);\n\n }\n\n\n\n let fn_scope = struct_imp.new_fn(field_name);\n", "file_path": "cli/src/code_generate/type_definition/object_file.rs", "rank": 34, "score": 145588.16491813838 }, { "content": "#[proc_macro_derive(GqlEnum)]\n\npub fn enum_derive(input: TokenStream) -> TokenStream {\n\n let input = &parse_macro_input!(input as DeriveInput);\n\n match generate_enum(input) {\n\n Ok(generated) => generated,\n\n Err(err) => err.to_compile_error().into(),\n\n }\n\n}\n\n\n", "file_path": "macro/src/lib.rs", "rank": 35, "score": 144260.587164993 }, { "content": "fn gql_to_rust_type_str(gql_type: &str, optional: bool) -> String {\n\n let name = match gql_type {\n\n \"Int\" => \"i32\".to_string(),\n\n \"Float\" => \"f32\".to_string(),\n\n \"String\" => \"String\".to_string(),\n\n \"Boolean\" => \"bool\".to_string(),\n\n _ => gql_type.to_string(),\n\n };\n\n if optional {\n\n format!(\"Option<{}>\", name)\n\n } else {\n\n name\n\n }\n\n}\n\n\n", "file_path": "cli/src/code_generate/util.rs", "rank": 36, "score": 143067.37209983976 }, { "content": "fn new_file_content(input_object_def: &InputObjectType) -> String {\n\n let mut scope = Scope::new();\n\n let struct_scope = scope.new_struct(&input_object_def.name).vis(\"pub\");\n\n struct_scope.derive(\"GqlInputObject\");\n\n\n\n for field in &input_object_def.fields {\n\n struct_scope.field(\n\n format!(\"pub {}\", &field.name).as_str(),\n\n gql_value_ty_to_rust_ty(&field.meta_type),\n\n );\n\n }\n\n\n\n format!(\"{}\\n\\n{}\", use_gql_definitions(), scope.to_string())\n\n}\n\n\n", "file_path": "cli/src/code_generate/type_definition/input_file.rs", "rank": 37, "score": 141936.56068318785 }, { "content": "pub fn build_schema_introspection(schema: &Schema) -> __Schema<'_> {\n\n __Schema { detail: schema }\n\n}\n\n\n\n#[allow(non_snake_case)]\n\n#[GqlType(internal)]\n\nimpl<'a> __Schema<'a> {\n\n async fn types(&self) -> Vec<__Type<'a>> {\n\n let mut result = Vec::new();\n\n for def in self.detail.type_definitions.values() {\n\n let ty = __Type::from_type_definition(self.detail, def);\n\n result.push(ty);\n\n }\n\n\n\n result\n\n }\n\n\n\n async fn queryType(&self) -> __Type<'a> {\n\n match self\n\n .detail\n", "file_path": "src/types/introspection/schema.rs", "rank": 38, "score": 140121.1422099144 }, { "content": "fn build_block_str(field: &FieldType, name: &str) -> String {\n\n let block_str = if is_return_primitive_ty(field) {\n\n if is_copy_gql_ty(field) {\n\n format!(\"self.{}\", &name)\n\n } else {\n\n format!(\"self.{}.clone()\", &name)\n\n }\n\n } else {\n\n \"todo!()\".to_string()\n\n };\n\n block_str\n\n}\n\n\n", "file_path": "cli/src/code_generate/type_definition/object_file.rs", "rank": 39, "score": 139276.89806311196 }, { "content": "fn build_gql_object(target_obj: &mut BTreeMap<String, GqlValue>, gql_value: (String, GqlValue)) {\n\n let (field_name, value) = gql_value;\n\n if let Some(prev_value) = target_obj.get_mut(&field_name) {\n\n match prev_value {\n\n GqlValue::List(target_list) => {\n\n if let GqlValue::List(list) = value {\n\n for (index, v) in list.into_iter().enumerate() {\n\n if let Some(GqlValue::Object(prev_obj)) = target_list.get_mut(index) {\n\n if let GqlValue::Object(new_obj) = v {\n\n for (key, value) in new_obj.into_iter() {\n\n build_gql_object(prev_obj, (key, value))\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n GqlValue::Object(prev_obj) => {\n\n if let GqlValue::Object(obj) = value {\n\n for map in obj.into_iter() {\n", "file_path": "src/resolver/mod.rs", "rank": 40, "score": 139131.57268373854 }, { "content": "pub fn is_gql_primitive_ty(type_name: &str) -> bool {\n\n vec![\"String\", \"Int\", \"Float\", \"Boolean\", \"ID\"].contains(&type_name)\n\n}\n\n\n", "file_path": "cli/src/code_generate/util.rs", "rank": 41, "score": 137512.2204422739 }, { "content": "fn sync_file(file_src: &str, object_def: &ObjectType) -> String {\n\n let syntax = syn::parse_file(file_src).expect(\"Failed to parse a input file\");\n\n let mut fields = Vec::new();\n\n let mut use_items = Vec::new();\n\n let mut other_items = Vec::new();\n\n let mut current_impl_items = Vec::new();\n\n let mut new_impl_items = Vec::new();\n\n let mut struct_name: TokenStream = Default::default();\n\n let mut struct_attributes: TokenStream = Default::default();\n\n let mut impl_attributes: TokenStream = Default::default();\n\n for item in &syntax.items {\n\n if let syn::Item::Struct(item_struct) = item {\n\n let ident = &item_struct.ident;\n\n let struct_ident = ident.unraw().to_string();\n\n let attrs = &item_struct.attrs;\n\n struct_name = quote! {#ident};\n\n struct_attributes = quote! {#(#attrs)*};\n\n if struct_ident.eq(&object_def.name) {\n\n let mut visited_fields = HashSet::new();\n\n for field in &item_struct.fields {\n", "file_path": "cli/src/code_generate/type_definition/object_file.rs", "rank": 42, "score": 137484.8157357453 }, { "content": "fn sync_file(file_src: &str, union_def: &UnionType) -> String {\n\n let syntax = syn::parse_file(file_src).expect(\"Failed to parse a enum file\");\n\n\n\n let mut variants = Vec::new();\n\n let mut use_items = Vec::new();\n\n let mut other_items = Vec::new();\n\n let enum_name: TokenStream = union_def.name.parse().unwrap();\n\n let mut attributes: TokenStream = Default::default();\n\n\n\n for item in &syntax.items {\n\n if let syn::Item::Enum(enum_item) = item {\n\n let attrs = &enum_item.attrs;\n\n attributes = quote! {#(#attrs)*};\n\n let ident = &enum_item.ident;\n\n let enum_ident = ident.unraw().to_string();\n\n\n\n if enum_ident.eq(&union_def.name) {\n\n let mut visited = HashSet::new();\n\n\n\n for variant in &enum_item.variants {\n", "file_path": "cli/src/code_generate/type_definition/union_file.rs", "rank": 43, "score": 137484.8157357453 }, { "content": "fn new_file_content(interface_file: &InterfaceFile) -> String {\n\n let mut scope = Scope::new();\n\n let interface_name = &interface_file.def.name;\n\n let interface_scope = scope.new_enum(interface_name).vis(\"pub\");\n\n interface_scope.derive(\"GqlInterface\");\n\n interface_scope.derive(\"Clone\");\n\n\n\n if let Some(impl_objects) = interface_file.interface_obj_map.get(interface_name) {\n\n for obj_name in impl_objects {\n\n interface_scope.new_variant(format!(\"{}({})\", obj_name, obj_name).as_str());\n\n }\n\n }\n\n\n\n let mut impl_scope = Scope::new();\n\n let interface_impl = impl_scope.new_impl(interface_name);\n\n interface_impl.r#macro(\"#[GqlType(interface)]\");\n\n\n\n for field in &interface_file.def.fields {\n\n let fn_scope = interface_impl.new_fn(&field.name);\n\n fn_scope.arg_ref_self();\n", "file_path": "cli/src/code_generate/type_definition/interface_file.rs", "rank": 44, "score": 135034.33204074562 }, { "content": "fn sync_file(file_src: &str, input_object_def: &InputObjectType) -> String {\n\n let syntax = syn::parse_file(file_src).expect(\"Failed to parse a input file\");\n\n let mut fields = Vec::new();\n\n let mut use_items = Vec::new();\n\n let mut other_items = Vec::new();\n\n let mut struct_name: TokenStream = Default::default();\n\n let mut attributes: TokenStream = Default::default();\n\n for item in &syntax.items {\n\n if let syn::Item::Struct(struct_item) = item {\n\n let ident = &struct_item.ident;\n\n let struct_ident = ident.unraw().to_string();\n\n let attrs = &struct_item.attrs;\n\n struct_name = quote! {#ident};\n\n attributes = quote! {#(#attrs)*};\n\n if struct_ident.eq(&input_object_def.name) {\n\n let mut visited = HashSet::new();\n\n for field in &struct_item.fields {\n\n let current_field_ident = field.ident.clone().unwrap().to_string();\n\n if input_object_def\n\n .fields\n", "file_path": "cli/src/code_generate/type_definition/input_file.rs", "rank": 45, "score": 134088.4673110824 }, { "content": "#[proc_macro_attribute]\n\n#[allow(non_snake_case)]\n\npub fn GqlType(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut item_impl = parse_macro_input!(input as ItemImpl);\n\n let args = parse_macro_input!(args as AttributeArgs);\n\n\n\n match generate_type(&mut item_impl, &args[..]) {\n\n Ok(generated) => generated,\n\n Err(err) => err.to_compile_error().into(),\n\n }\n\n}\n\n\n", "file_path": "macro/src/lib.rs", "rank": 46, "score": 131488.7940111935 }, { "content": "fn value_ty_to_str(gql_value: &GqlValueType, optional: bool) -> String {\n\n match gql_value {\n\n GqlValueType::NamedType(name) => gql_to_rust_type_str(name, optional),\n\n GqlValueType::ListType(list_type) => {\n\n if optional {\n\n format!(\"Option<Vec<{}>>\", value_ty_to_str(list_type, true))\n\n } else {\n\n format!(\"Vec<{}>\", value_ty_to_str(list_type, true))\n\n }\n\n }\n\n GqlValueType::NonNullType(non_null_type) => value_ty_to_str(non_null_type, false),\n\n }\n\n}\n\n\n", "file_path": "cli/src/code_generate/util.rs", "rank": 47, "score": 130582.58709330307 }, { "content": "fn referenced_variables_to_vec<'a>(value: &'a Value<'a, String>, vars: &mut Vec<&'a str>) {\n\n match value {\n\n Value::Variable(name) => {\n\n vars.push(name);\n\n }\n\n Value::List(values) => values\n\n .iter()\n\n .for_each(|value| referenced_variables_to_vec(value, vars)),\n\n Value::Object(obj) => obj\n\n .values()\n\n .for_each(|value| referenced_variables_to_vec(value, vars)),\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "src/validation/utils.rs", "rank": 48, "score": 129588.78858955036 }, { "content": "fn sync_file(file_src: &str, interface_file: &InterfaceFile) -> String {\n\n let syntax = syn::parse_file(file_src).expect(\"Failed to parse a input file\");\n\n let mut variants = Vec::new();\n\n let mut use_items = Vec::new();\n\n let mut other_items = Vec::new();\n\n let mut current_impl_items = Vec::new();\n\n let mut new_impl_items = Vec::new();\n\n let enum_name: TokenStream = interface_file.def.name.parse().unwrap();\n\n let mut enum_attrs: TokenStream = Default::default();\n\n let mut impl_attrs: TokenStream = Default::default();\n\n let interface_name = &interface_file.def.name;\n\n let implement_objects = interface_file\n\n .interface_obj_map\n\n .get(interface_name)\n\n .unwrap();\n\n\n\n for item in &syntax.items {\n\n if let syn::Item::Enum(enum_item) = item {\n\n let attrs = &enum_item.attrs;\n\n enum_attrs = quote! {#(#attrs)*};\n", "file_path": "cli/src/code_generate/type_definition/interface_file.rs", "rank": 49, "score": 126840.86399127328 }, { "content": "pub fn build_test_request(\n\n query: &str,\n\n operation_name: Option<String>,\n\n variables: Variables,\n\n) -> Request {\n\n Request {\n\n query: query.to_string(),\n\n operation_name,\n\n variables,\n\n extensions: Default::default(),\n\n }\n\n}\n\n\n\npub async fn check_gql_response<\n\n Query: SelectionSetResolver + 'static,\n\n Mutation: SelectionSetResolver + 'static,\n\n Subscription: SelectionSetResolver + 'static,\n\n>(\n\n request: Request,\n\n expected_response: &str,\n\n container: &Container<Query, Mutation, Subscription>,\n\n) {\n\n let res = execute(container, request).await;\n\n assert_eq!(serde_json::to_string(&res).unwrap(), expected_response);\n\n}\n", "file_path": "src/test_utils.rs", "rank": 51, "score": 123617.71644656024 }, { "content": "pub fn apply_validation<'a>(\n\n schema: &'a Schema,\n\n query_doc: &'a Document<'a, String>,\n\n variables: Option<&'a Variables>,\n\n operation: &'a Operation<'a>,\n\n operation_name: Option<&'a str>,\n\n) -> Result<(), Vec<GqlError>> {\n\n let mut ctx = ValidationContext::new(schema, variables, operation);\n\n let mut visitor = NewVisitor\n\n .with(rules::DefaultValueOfCorrectType::default())\n\n .with(rules::FieldsOnCorrectType::default())\n\n .with(rules::FragmentsOnCompositeTypes::default())\n\n .with(rules::KnownArgumentNames::default())\n\n .with(rules::KnownDirectives::default())\n\n .with(rules::KnownFragmentName::default())\n\n .with(rules::KnownTypeNames::default())\n\n .with(rules::NoFragmentCycles::default())\n\n .with(rules::NoUndefinedVariables::default())\n\n // .with(rules::NoUnusedFragment::default())\n\n .with(rules::NoUnusedVariables::default())\n", "file_path": "src/validation/mod.rs", "rank": 53, "score": 122763.80510844875 }, { "content": "pub fn get_fragment_definition_on_str(\n\n type_condition: Option<&TypeCondition<'_, String>>,\n\n) -> Option<String> {\n\n if let Some(TypeCondition::On(ty)) = type_condition {\n\n Some(ty.clone())\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "src/validation/utils.rs", "rank": 54, "score": 121666.22993785834 }, { "content": "pub fn check_valid_input_value(\n\n schema: &Schema,\n\n ty: &Type<'_, String>,\n\n value: &Value<'_, String>,\n\n) -> Option<String> {\n\n match ty {\n\n Type::NamedType(type_name) => {\n\n if let Value::Null = value {\n\n return None;\n\n }\n\n let type_def = schema.type_definitions.get(type_name);\n\n match type_def {\n\n Some(def) => match def {\n\n TypeDefinition::Scalar(scalar) => {\n\n if scalar.is_valid_value(value) {\n\n None\n\n } else {\n\n Some(\"Invalid type\".to_string())\n\n }\n\n }\n", "file_path": "src/validation/utils.rs", "rank": 55, "score": 121666.22993785834 }, { "content": "pub fn vader() -> Human {\n\n Human {\n\n id: ID(\"2\".to_string()),\n\n name: \"Anakin Skywalker\".to_string(),\n\n homePlanet: Some(\"Tatooine\".to_string()),\n\n height: Some(190.0),\n\n mass: Some(80.0),\n\n }\n\n}\n\n\n", "file_path": "examples/axum/src/starwars.rs", "rank": 56, "score": 120709.2331451801 }, { "content": "pub fn leia() -> Human {\n\n Human {\n\n id: ID(\"4\".to_string()),\n\n name: \"Leia Organa\".to_string(),\n\n homePlanet: None,\n\n height: None,\n\n mass: None,\n\n }\n\n}\n\n\n", "file_path": "examples/axum/src/starwars.rs", "rank": 57, "score": 120709.2331451801 }, { "content": "pub fn r2d2() -> Droid {\n\n Droid {\n\n id: ID(\"5\".to_string()),\n\n name: \"R2D2\".to_string(),\n\n primaryFunction: Some(\"support jedi\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "examples/axum/src/starwars.rs", "rank": 58, "score": 120709.2331451801 }, { "content": "pub fn han() -> Human {\n\n Human {\n\n id: ID(\"3\".to_string()),\n\n name: \"Han Solo\".to_string(),\n\n homePlanet: None,\n\n height: Some(175.0),\n\n mass: Some(70.0),\n\n }\n\n}\n\n\n", "file_path": "examples/axum/src/starwars.rs", "rank": 59, "score": 120709.2331451801 }, { "content": "pub fn luke() -> Human {\n\n Human {\n\n id: ID(\"1\".to_string()),\n\n name: \"Luke Skywalker\".to_string(),\n\n homePlanet: Some(\"Tatooine\".to_string()),\n\n height: Some(180.0),\n\n mass: Some(70.0),\n\n }\n\n}\n\n\n", "file_path": "examples/axum/src/starwars.rs", "rank": 60, "score": 120709.2331451801 }, { "content": "pub fn c3po() -> Droid {\n\n Droid {\n\n id: ID(\"6\".to_string()),\n\n name: \"C3PO\".to_string(),\n\n primaryFunction: Some(\"communication\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "examples/axum/src/starwars.rs", "rank": 61, "score": 120709.2331451801 }, { "content": "pub fn get_method_args_without_context(\n\n method: &ImplItemMethod,\n\n) -> Result<Vec<(PatIdent, Type)>, syn::Error> {\n\n let mut args = Vec::new();\n\n if method.sig.inputs.is_empty() {\n\n return Err(syn::Error::new_spanned(\n\n &method.sig,\n\n \"self must be the first argument.\",\n\n ));\n\n }\n\n\n\n for (index, arg) in method.sig.inputs.iter().enumerate() {\n\n if is_context_type(arg) {\n\n continue;\n\n }\n\n\n\n match arg {\n\n FnArg::Receiver(receiver) => {\n\n if index != 0 {\n\n return Err(syn::Error::new_spanned(\n", "file_path": "macro/src/utils.rs", "rank": 62, "score": 119810.26048467714 }, { "content": "enum TypeDetail<'a> {\n\n Named(&'a TypeDefinition),\n\n NonNull(&'a str),\n\n List(&'a str),\n\n}\n\n\n\npub struct __Type<'a> {\n\n schema: &'a Schema,\n\n detail: TypeDetail<'a>,\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\npub(crate) enum __TypeKind {\n\n Scalar,\n\n Object,\n\n Interface,\n\n Union,\n\n Enum,\n\n InputObject,\n\n List,\n", "file_path": "src/types/introspection/introspection_type.rs", "rank": 63, "score": 119560.40692198649 }, { "content": "fn axum_import_str() -> String {\n\n let statements = vec![\n\n \"mod graphql;\",\n\n \"use rusty_gql::*;\",\n\n \"use rusty_gql_axum::*;\",\n\n \"use std::{net::SocketAddr, path::Path};\",\n\n \"use axum::{routing::get, AddExtensionLayer, Router};\",\n\n \"use graphql::Query;\",\n\n \"type ContainerType = Container<Query, EmptyMutation, EmptySubscription>;\",\n\n ];\n\n statements.join(\"\\n\")\n\n}\n\n\n", "file_path": "cli/src/code_generate/project/axum/main_file.rs", "rank": 64, "score": 116428.3079479121 }, { "content": "fn axum_gql_playground() -> String {\n\n let mut scope = Scope::new();\n\n let f = scope.new_fn(\"gql_playground\");\n\n f.set_async(true);\n\n f.ret(\"impl axum::response::IntoResponse\");\n\n f.line(\"axum::response::Html(playground_html(\\\"/\\\", None))\");\n\n\n\n scope.to_string()\n\n}\n\n\n", "file_path": "cli/src/code_generate/project/axum/main_file.rs", "rank": 65, "score": 116428.3079479121 }, { "content": "fn axum_gql_handler() -> String {\n\n let mut scope = Scope::new();\n\n let f = scope.new_fn(\"gql_handler\");\n\n f.set_async(true);\n\n f.arg(\"container\", \"axum::extract::Extension<ContainerType>\");\n\n f.arg(\"req\", \"GqlRequest\");\n\n f.ret(\"GqlResponse\");\n\n f.line(\"let result = execute(&container, req.0).await;\");\n\n f.line(\"GqlResponse::from(result)\");\n\n\n\n scope.to_string()\n\n}\n\n\n", "file_path": "cli/src/code_generate/project/axum/main_file.rs", "rank": 66, "score": 116428.3079479121 }, { "content": "fn main_file_content() -> String {\n\n let contents = vec![\n\n axum_import_str(),\n\n axum_gql_handler(),\n\n axum_gql_playground(),\n\n axum_main_function(),\n\n ];\n\n contents.join(\"\\n\\n\")\n\n}\n\n\n", "file_path": "cli/src/code_generate/project/axum/main_file.rs", "rank": 67, "score": 116428.3079479121 }, { "content": "fn axum_main_function() -> String {\n\n let mut scope = Scope::new();\n\n let f = scope.new_fn(\"main\");\n\n f.set_async(true);\n\n f.line(\"let schema_docs = read_schemas(Path::new(\\\"./schema\\\")).unwrap();\");\n\n f.line(\"let schema_docs: Vec<&str> = schema_docs.iter().map(|s| &**s).collect();\");\n\n f.line(\"let container = Container::new(schema_docs.as_slice(), Query, EmptyMutation, EmptySubscription, Default::default(),).unwrap();\");\n\n f.line(\"let app = Router::new().route(\\\"/graphiql\\\", get(gql_playground)).route(\\\"/\\\", get(gql_handler).post(gql_handler)).layer(AddExtensionLayer::new(container));\");\n\n f.line(\"let addr = SocketAddr::from(([127, 0, 0, 1], 3000));\");\n\n f.line(\"axum::Server::bind(&addr).serve(app.into_make_service()).await.unwrap();\");\n\n\n\n format!(\"#[tokio::main]\\n{}\", scope.to_string())\n\n}\n", "file_path": "cli/src/code_generate/project/axum/main_file.rs", "rank": 68, "score": 116428.3079479121 }, { "content": "pub fn build_app() -> App<'static> {\n\n App::new(\"rusty-gql\")\n\n .version(crate_version!())\n\n .setting(AppSettings::DeriveDisplayOrder)\n\n .subcommand(App::new(\"new\").arg(Arg::new(\"name\").required(true).index(1)))\n\n .subcommand(App::new(\"generate\").alias(\"g\"))\n\n}\n", "file_path": "cli/src/app.rs", "rank": 69, "score": 116196.40336063376 }, { "content": "pub fn all_reviews() -> Vec<Review> {\n\n vec![\n\n Review {\n\n stars: 3,\n\n commentary: None,\n\n episode: Some(Episode::EMPIRE),\n\n },\n\n Review {\n\n stars: 5,\n\n commentary: Some(\"Great!\".to_string()),\n\n episode: Some(Episode::NEWHOPE),\n\n },\n\n Review {\n\n stars: 4,\n\n commentary: None,\n\n episode: Some(Episode::JEDI),\n\n },\n\n ]\n\n}\n", "file_path": "examples/axum/src/starwars.rs", "rank": 70, "score": 116196.40336063376 }, { "content": "pub fn visit<'a, T: Visitor<'a>>(\n\n visitor: &mut T,\n\n ctx: &mut ValidationContext<'a>,\n\n doc: &'a Document<'a, String>,\n\n operation_name: Option<&'a str>,\n\n) {\n\n visitor.enter_document(ctx, doc);\n\n visit_definitions(visitor, ctx, &doc.definitions, operation_name);\n\n visitor.exit_document(ctx, doc);\n\n}\n\n\n", "file_path": "src/validation/visitor.rs", "rank": 71, "score": 113844.53468454468 }, { "content": "pub fn is_interface(args: &[NestedMeta]) -> bool {\n\n for arg in args {\n\n if let NestedMeta::Meta(Meta::Path(path)) = arg {\n\n let ident = &path.segments.last().unwrap().ident;\n\n if ident == \"interface\" {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "macro/src/utils.rs", "rank": 72, "score": 112067.58167298647 }, { "content": "pub fn is_internal(args: &[NestedMeta]) -> bool {\n\n for arg in args {\n\n if let NestedMeta::Meta(Meta::Path(path)) = arg {\n\n let ident = &path.segments.last().unwrap().ident;\n\n if ident == \"internal\" {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "macro/src/utils.rs", "rank": 73, "score": 112067.58167298647 }, { "content": "struct Serializer;\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 74, "score": 111359.68871700543 }, { "content": "fn gql_files_path(app_name: Option<&str>) -> String {\n\n match app_name {\n\n Some(path) => format!(\"{}/src/graphql\", path),\n\n None => \"src/graphql\".to_string(),\n\n }\n\n}\n\n\n\nasync fn create_graphql_files(app_name: Option<&str>) -> Result<(), std::io::Error> {\n\n let path = app_name\n\n .map(|name| format!(\"{}/schema\", name))\n\n .unwrap_or_else(|| \"schema\".to_string());\n\n let schema_contents = visit_dirs(Path::new(&path)).await?;\n\n\n\n let schema_contents: Vec<&str> = schema_contents.iter().map(|s| &**s).collect();\n\n\n\n let gql_files_path = gql_files_path(app_name);\n\n create_gql_files(&schema_contents, &gql_files_path).await\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 75, "score": 110028.93491372738 }, { "content": "#[async_trait::async_trait]\n\npub trait CustomDirective: Send + Sync {\n\n async fn resolve_field(\n\n &self,\n\n ctx: &Context<'_>,\n\n directive_args: &BTreeMap<String, GqlValue>,\n\n resolve_fut: ResolveFut<'_>,\n\n ) -> ResolverResult<Option<GqlValue>>;\n\n}\n", "file_path": "src/custom_directive.rs", "rank": 76, "score": 109351.63251707355 }, { "content": "struct SerializeMap {\n\n map: BTreeMap<String, GqlValue>,\n\n key: Option<String>,\n\n}\n\n\n\nimpl ser::SerializeMap for SerializeMap {\n\n type Ok = GqlValue;\n\n type Error = SerializerError;\n\n\n\n #[inline]\n\n fn serialize_key<T: ?Sized>(&mut self, key: &T) -> Result<(), Self::Error>\n\n where\n\n T: ser::Serialize,\n\n {\n\n let key = key.serialize(KeySerializer)?;\n\n self.key = Some(key);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n", "file_path": "src/types/value/serializer.rs", "rank": 77, "score": 109233.07702216545 }, { "content": "struct KeySerializer;\n\n\n\nimpl serde::Serializer for KeySerializer {\n\n type Ok = String;\n\n\n\n type Error = SerializerError;\n\n\n\n type SerializeSeq = Impossible<String, SerializerError>;\n\n\n\n type SerializeTuple = Impossible<String, SerializerError>;\n\n\n\n type SerializeTupleStruct = Impossible<String, SerializerError>;\n\n\n\n type SerializeTupleVariant = Impossible<String, SerializerError>;\n\n\n\n type SerializeMap = Impossible<String, SerializerError>;\n\n\n\n type SerializeStruct = Impossible<String, SerializerError>;\n\n\n\n type SerializeStructVariant = Impossible<String, SerializerError>;\n", "file_path": "src/types/value/serializer.rs", "rank": 78, "score": 109233.07702216545 }, { "content": "#[proc_macro_derive(GqlScalar)]\n\npub fn scalar_derive(input: TokenStream) -> TokenStream {\n\n let input = &parse_macro_input!(input as DeriveInput);\n\n match generate_scalar(input) {\n\n Ok(generated) => generated,\n\n Err(err) => err.to_compile_error().into(),\n\n }\n\n}\n\n\n", "file_path": "macro/src/lib.rs", "rank": 79, "score": 108615.45189505348 }, { "content": "#[proc_macro_derive(GqlUnion)]\n\npub fn union_derive(input: TokenStream) -> TokenStream {\n\n let input = &parse_macro_input!(input as DeriveInput);\n\n match generate_union(input) {\n\n Ok(generated) => generated,\n\n Err(err) => err.to_compile_error().into(),\n\n }\n\n}\n\n\n", "file_path": "macro/src/lib.rs", "rank": 80, "score": 108615.45189505348 }, { "content": "#[proc_macro_derive(GqlInterface)]\n\npub fn interface_derive(input: TokenStream) -> TokenStream {\n\n let input = &parse_macro_input!(input as DeriveInput);\n\n match generate_interface(input) {\n\n Ok(generated) => generated,\n\n Err(err) => err.to_compile_error().into(),\n\n }\n\n}\n\n\n", "file_path": "macro/src/lib.rs", "rank": 81, "score": 108615.45189505348 }, { "content": "struct SerializeTupleStruct(Vec<GqlValue>);\n\n\n\nimpl ser::SerializeTupleStruct for SerializeTupleStruct {\n\n type Ok = GqlValue;\n\n type Error = SerializerError;\n\n\n\n #[inline]\n\n fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>\n\n where\n\n T: ser::Serialize,\n\n {\n\n let value = value.serialize(Serializer)?;\n\n self.0.push(value);\n\n Ok(())\n\n }\n\n\n\n #[inline]\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n\n Ok(GqlValue::List(self.0))\n\n }\n\n}\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 82, "score": 108481.56853995362 }, { "content": "struct GqlValueVisitor;\n\n\n\nimpl<'de> Visitor<'de> for GqlValueVisitor {\n\n type Value = GqlValue;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n formatter.write_str(\"GraphQL value\")\n\n }\n\n\n\n fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n\n Ok(GqlValue::Boolean(v))\n\n }\n\n\n\n fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n", "file_path": "src/types/value/mod.rs", "rank": 83, "score": 107215.91092567776 }, { "content": "#[proc_macro_derive(GqlInputObject)]\n\npub fn input_object_derive(input: TokenStream) -> TokenStream {\n\n let input = &parse_macro_input!(input as DeriveInput);\n\n match generate_input_object(input) {\n\n Ok(generated) => generated,\n\n Err(err) => err.to_compile_error().into(),\n\n }\n\n}\n", "file_path": "macro/src/lib.rs", "rank": 84, "score": 107007.44478204832 }, { "content": "fn cargo_toml_content(app_name: &str) -> String {\n\n r#\"[package]\n\nname = \"APP_NAME\"\n\nversion = \"0.1.2\"\n\nedition = \"2021\"\n\n\n\n[dependencies]\n\nasync-trait = \"0.1.52\"\n\naxum = {version = \"0.4.2\", features = [\"headers\"]}\n\nhyper = \"0.14.16\"\n\nrusty-gql = \"0.1.0\"\n\nrusty-gql-axum = \"0.1.0\"\n\ntokio = { version = \"1.0\", features = [\"full\"] }\n\n\"#\n\n .replace(\"APP_NAME\", app_name)\n\n}\n", "file_path": "cli/src/code_generate/project/axum/cargo_toml_file.rs", "rank": 85, "score": 105001.59900292498 }, { "content": "struct SerializeTuple(Vec<GqlValue>);\n\n\n\nimpl ser::SerializeTuple for SerializeTuple {\n\n type Ok = GqlValue;\n\n\n\n type Error = SerializerError;\n\n\n\n fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>\n\n where\n\n T: serde::Serialize,\n\n {\n\n let value = value.serialize(Serializer)?;\n\n self.0.push(value);\n\n Ok(())\n\n }\n\n\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n\n Ok(GqlValue::List(self.0))\n\n }\n\n}\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 86, "score": 99562.91112436718 }, { "content": "struct SerializeSeq(Vec<GqlValue>);\n\n\n\nimpl ser::SerializeSeq for SerializeSeq {\n\n type Ok = GqlValue;\n\n\n\n type Error = SerializerError;\n\n\n\n fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error>\n\n where\n\n T: serde::Serialize,\n\n {\n\n let value = value.serialize(Serializer)?;\n\n self.0.push(value);\n\n Ok(())\n\n }\n\n\n\n fn end(self) -> Result<Self::Ok, Self::Error> {\n\n Ok(GqlValue::List(self.0))\n\n }\n\n}\n\n\n", "file_path": "src/types/value/serializer.rs", "rank": 87, "score": 99562.91112436718 }, { "content": "fn is_default_directive(name: &str) -> bool {\n\n vec![\"skip\", \"include\", \"deprecated\"].contains(&name)\n\n}\n", "file_path": "cli/src/code_generate/directive/mod.rs", "rank": 88, "score": 98975.3339064365 }, { "content": "fn visit_directives<'a, T: Visitor<'a>>(\n\n visitor: &mut T,\n\n ctx: &mut ValidationContext<'a>,\n\n directives: &'a [Directive<'a, String>],\n\n) {\n\n for directive in directives {\n\n visitor.enter_directive(ctx, directive);\n\n\n\n let schema_directive = ctx.schema.directives.get(&directive.name);\n\n\n\n for (arg_name, arg_value) in &directive.arguments {\n\n visitor.enter_argument(ctx, arg_name, arg_value);\n\n let expected_ty = schema_directive\n\n .and_then(|dir| dir.arguments.iter().find(|arg| &arg.name == arg_name))\n\n .map(|arg| arg.meta_type.clone());\n\n ctx.with_input_type(expected_ty.clone(), |ctx| {\n\n visit_input_value(\n\n visitor,\n\n ctx,\n\n directive.position,\n\n expected_ty.clone(),\n\n arg_value,\n\n )\n\n });\n\n visitor.exit_argument(ctx, arg_name, arg_value);\n\n }\n\n visitor.exit_directive(ctx, directive);\n\n }\n\n}\n\n\n", "file_path": "src/validation/visitor.rs", "rank": 89, "score": 94517.94992348838 }, { "content": "fn visit_variable_definitions<'a, T: Visitor<'a>>(\n\n visitor: &mut T,\n\n ctx: &mut ValidationContext<'a>,\n\n variable_definitions: &'a [VariableDefinition<'a, String>],\n\n) {\n\n for def in variable_definitions {\n\n visitor.enter_variable_definition(ctx, def);\n\n visitor.exit_variable_definition(ctx, def);\n\n }\n\n}\n\n\n", "file_path": "src/validation/visitor.rs", "rank": 90, "score": 92440.30085958813 }, { "content": "pub trait GqlInputType: Send + Sync + Sized {\n\n fn from_gql_value(value: Option<GqlValue>) -> Result<Self, String>;\n\n\n\n fn to_gql_value(&self) -> GqlValue;\n\n}\n\n\n\nimpl<T: GqlInputType> GqlInputType for Arc<T> {\n\n fn from_gql_value(value: Option<GqlValue>) -> Result<Self, String> {\n\n T::from_gql_value(value).map(|v| Arc::new(v))\n\n }\n\n\n\n fn to_gql_value(&self) -> GqlValue {\n\n T::to_gql_value(self)\n\n }\n\n}\n\n\n\nimpl<T: GqlInputType> GqlInputType for Box<T> {\n\n fn from_gql_value(value: Option<GqlValue>) -> Result<Self, String> {\n\n T::from_gql_value(value).map(|v| Box::new(v))\n\n }\n\n\n\n fn to_gql_value(&self) -> GqlValue {\n\n T::to_gql_value(self)\n\n }\n\n}\n", "file_path": "src/input/mod.rs", "rank": 91, "score": 92095.4506666454 }, { "content": "use super::directive::GqlDirective;\n\nuse graphql_parser::{\n\n schema::{EnumType as ParserEnumType, EnumValue},\n\n Pos,\n\n};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct EnumType {\n\n pub name: String,\n\n pub description: Option<String>,\n\n pub position: Pos,\n\n pub directives: Vec<GqlDirective>,\n\n pub values: Vec<EnumTypeValue>,\n\n}\n\n\n\nimpl<'a> From<ParserEnumType<'a, String>> for EnumType {\n\n fn from(gql_enum: ParserEnumType<'a, String>) -> Self {\n\n let directives = GqlDirective::from_vec_directive(gql_enum.directives);\n\n\n\n let values = gql_enum\n", "file_path": "src/types/enum_type.rs", "rank": 92, "score": 91498.05894584836 }, { "content": " .any(|x| x == *name)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct EnumTypeValue {\n\n pub name: String,\n\n pub description: Option<String>,\n\n pub position: Pos,\n\n pub directives: Vec<GqlDirective>,\n\n}\n\n\n\nimpl<'a> From<EnumValue<'a, String>> for EnumTypeValue {\n\n fn from(enum_value: EnumValue<'a, String>) -> Self {\n\n let directives = enum_value\n\n .directives\n\n .into_iter()\n\n .map(GqlDirective::from)\n\n .collect();\n\n\n", "file_path": "src/types/enum_type.rs", "rank": 93, "score": 91494.09215202717 }, { "content": " EnumTypeValue {\n\n name: enum_value.name,\n\n description: enum_value.description,\n\n position: enum_value.position,\n\n directives,\n\n }\n\n }\n\n}\n\n\n\nimpl EnumTypeValue {\n\n pub fn is_deprecated(&self) -> bool {\n\n for dir in &self.directives {\n\n if dir.name == \"deprecated\" {\n\n return true;\n\n }\n\n continue;\n\n }\n\n false\n\n }\n\n}\n", "file_path": "src/types/enum_type.rs", "rank": 94, "score": 91483.85244056344 }, { "content": " .values\n\n .into_iter()\n\n .map(EnumTypeValue::from)\n\n .collect();\n\n\n\n EnumType {\n\n name: gql_enum.name,\n\n description: gql_enum.description,\n\n position: gql_enum.position,\n\n directives,\n\n values,\n\n }\n\n }\n\n}\n\n\n\nimpl EnumType {\n\n pub fn contains(&self, name: &str) -> bool {\n\n self.values\n\n .iter()\n\n .map(|v| v.name.clone())\n", "file_path": "src/types/enum_type.rs", "rank": 95, "score": 91483.40588073277 }, { "content": "fn is_copy_gql_ty(field: &FieldType) -> bool {\n\n vec![\"Int\", \"Float\", \"Boolean\"].contains(&field.meta_type.name())\n\n}\n\n\n", "file_path": "cli/src/code_generate/type_definition/object_file.rs", "rank": 96, "score": 90864.76042868228 }, { "content": "fn is_return_primitive_ty(field: &FieldType) -> bool {\n\n is_gql_primitive_ty(field.meta_type.name())\n\n}\n\n\n", "file_path": "cli/src/code_generate/type_definition/object_file.rs", "rank": 97, "score": 90864.76042868228 }, { "content": "fn get_field_by_name<'a>(object: &'a ObjectType, field_name: &str) -> Option<&'a FieldType> {\n\n object.fields.iter().find(|f| f.name.eq(field_name))\n\n}\n", "file_path": "cli/src/code_generate/type_definition/object_file.rs", "rank": 98, "score": 86055.97799133063 }, { "content": "fn get_field_by_name<'a>(interface: &'a InterfaceType, field_name: &str) -> Option<&'a FieldType> {\n\n interface.fields.iter().find(|f| f.name.eq(field_name))\n\n}\n", "file_path": "cli/src/code_generate/type_definition/interface_file.rs", "rank": 99, "score": 86055.97799133063 } ]
Rust
src/encoding/unmarshal.rs
KevinKelley/hobbits-rs
1b2a4f3c8634921a379aa70992fe000973cf130b
pub use crate::encoding::envelope::Envelope; pub use crate::encoding::EwpError; pub fn unmarshal(msg: &[u8]) -> Result<Envelope,EwpError> { let index = msg.iter().position(|&r| r == '\n' as u8); let index = index.ok_or( EwpError::new("message request must contain 2 lines") )?; let hdr = &msg[0..index]; let payload = &msg[(index+1)..]; let hdr_str = String::from_utf8(hdr.to_vec())?; let hdr_parts: Vec<&str> = hdr_str.split(' ').collect(); if hdr_parts.len() != 5 { return Err(EwpError::new("not all metadata provided")) } if hdr_parts[0] != "EWP" { return Err(EwpError::new("malformed EWP envelope: must start with 'EWP'")) } let version = hdr_parts[1]; if !version.contains('.') { return Err(EwpError::new("EWP version cannot be parsed")) } if version.parse::<f32>().is_err() { return Err(EwpError::new("version should be of the form 0.0")) } let protocol = hdr_parts[2]; if protocol != "GOSSIP" && protocol != "RPC" && protocol != "PING" { return Err(EwpError::new("communication protocol unsupported")) } let msg_hdr_len: usize = hdr_parts[3].parse() .map_err(|_| EwpError::new("incorrect metadata format, cannot parse header-length"))?; let msg_bdy_len: usize = hdr_parts[4].parse() .map_err(|_| EwpError::new("incorrect metadata format, cannot parse body-length"))?; if payload.len() != msg_hdr_len + msg_bdy_len { return Err(EwpError::new(&format!("unexpected payload size: {} != {} + {}", payload.len(), msg_hdr_len, msg_bdy_len))) } let msg_hdr = &payload[0..msg_hdr_len]; let msg_bdy = &payload[msg_hdr_len..]; Ok( Envelope { version: version.to_string(), protocol: protocol.to_string(), header: msg_hdr.to_owned(), body: msg_bdy.to_owned() }) } #[cfg(test)] mod tests { use super::{Envelope, unmarshal}; #[test] fn test_unmarshal_successful() { struct Test { message: Vec<u8>, output: Envelope } let tests: Vec<Test> = vec!( Test { message: "EWP 13.05 RPC 16 14\nthis is a headerthis is a body".to_string().into_bytes(), output: Envelope { version: "13.05".to_string(), protocol: "RPC".to_string(), header: "this is a header".to_string().into_bytes(), body: "this is a body".to_string().into_bytes(), }, }, Test { message: "EWP 13.05 GOSSIP 7 12\ntestingtesting body".to_string().into_bytes(), output: Envelope { version: "13.05".to_string(), protocol: "GOSSIP".to_string(), header: "testing".to_string().into_bytes(), body: "testing body".to_string().into_bytes(), }, }, Test { message: "EWP 1230329483.05392489 RPC 4 4\ntesttest".to_string().into_bytes(), output: Envelope { version: "1230329483.05392489".to_string(), protocol: "RPC".to_string(), header: "test".to_string().into_bytes(), body: "test".to_string().into_bytes(), }, }, ); for t in tests.iter() { let unmarshalled = unmarshal(&t.message); if let Ok(msg) = unmarshalled { println!("{}", t.output); assert!(msg == t.output); } else { assert!(false); } } } #[test] fn test_unmarshal_unsuccessful() { use super::*; struct Test { message: Vec<u8>, err: EwpError } let tests: Vec<Test> = vec!( Test { message: "EWP 13.05 RPC blahblahblah json 16 14this is a headerthis is a body".to_string().into_bytes(), err: EwpError::new("message request must contain 2 lines"), }, Test { message: "EWP 13.05 7 12\ntestingtesting body".to_string().into_bytes(), err: EwpError::new("not all metadata provided"), }, Test { message: "EWP 123032948392489 RPC 4 4\ntesttest".to_string().into_bytes(), err: EwpError::new("EWP version cannot be parsed"), }, Test { message: "EWP 123032948.392489 notrpc 4 4\ntesttest".to_string().into_bytes(), err: EwpError::new("communication protocol unsupported"), }, Test { message: "EWP 123032948.392489 GOSSIP f 4\ntesttest".to_string().into_bytes(), err: EwpError::new("incorrect metadata format, cannot parse header-length"), }, Test { message: "EWP 123032948.392489 GOSSIP 4 f\ntesttest".to_string().into_bytes(), err: EwpError::new("incorrect metadata format, cannot parse body-length"), }, ); for t in tests.iter() { let unmarshalled = unmarshal(&t.message); match unmarshalled { Ok(msg) => { println!("expected: {}", &t.err); println!("received: {}", msg); assert!(msg != msg) } Err(err) => { println!("expected: '{}'", t.err.details); println!("received: '{}'", err.details); assert!(t.err.details == err.details) } } } } }
pub use crate::encoding::envelope::Envelope; pub use crate::encoding::EwpError; pub fn unmarshal(msg: &[u8]) -> Result<Envelope,EwpError> { let index = msg.iter().position(|&r| r == '\n' as u8); let index = index.ok_or( EwpError::new("message request must contain 2 lines") )?; let hdr = &msg[0..index]; let payload = &msg[(index+1)..]; let hdr_str = String::from_utf8(hdr.to_vec())?; let hdr_parts: Vec<&str> = hdr_str.split(' ').collect(); if hdr_parts.len() != 5 { return Err(EwpError::new("not all metadata provided")) } if hdr_parts[0] != "EWP" { return Err(EwpError::new("malformed EWP envelope: must start with 'EWP'")) } let version = hdr_parts[1]; if !version.contains('.') { return Err(EwpError::new("EWP version cannot be parsed")) } if version.parse::<f32>().is_err() { return Err(EwpError::new("version should be of the form 0.0")) } let protocol = hdr_parts[2]; if protocol != "GOSSIP" && protocol != "RPC" && protocol != "PING" { return Err(EwpError::new("communication protocol unsupported")) } let msg_hdr_len: usize = hdr_parts[3].parse() .map_err(|_| EwpError::new("incorrect metadata format, cannot parse header-length"))?; let msg_bdy_len: usize = hdr_parts[4].parse() .map_err(|_| EwpError::new("incorrect metadata format, cannot parse body-length"))?; if payload.len() != msg_hdr_len + msg_bdy_len { return Err(EwpError::new(&format!("unexpected payload size: {} != {} + {}", payload.len(), msg_hdr_len, msg_bdy_len))) } let msg_hdr = &payload[0..msg_hdr_len]; let msg_bdy = &payload[msg_hdr_len..]; Ok( Envelope { version: version.to_string(), protocol: protocol.to_string(), header: msg_hdr.to_owned(), body: msg_bdy.to_owned() }) } #[cfg(test)] mod tests { use super::{Envelope, unmarshal}; #[test] fn test_unmarshal_successful() { struct Test { message: Vec<u8>, output: Envelope } let tests: Vec<Test> = vec!( Test { message: "EWP 13.05 RPC 16 14\nthis is a headerthis is a body".to_string().into_bytes(), output: Envelope { version: "13.05".to_string(), protocol: "RPC".to_string(), header: "this is a header".to_string().into_bytes(), body: "this is a body".to_string().into_bytes(), }, }, Test { message: "EWP 13.05 GOSSIP 7 12\ntestingtesting body".to_string().into_bytes(), output: Envelope { version: "13.05".to_string(), protocol: "GOSSIP".to_string(), header: "testing".to_string().into_bytes(), body: "testing body".to_string().into_bytes(), }, }, Test { message: "EWP 1230329483.05392489 RPC 4 4\ntesttest".to_string().into_bytes(), output: Envelope { version: "1230329483.05392489".to_string(), protocol: "RPC".to_string(), header: "test".to_string().into_bytes(), body: "test".to_string().into_bytes(), }, }, ); for t in tests.iter() { let unmarshalled = unmarshal(&t.message); if let Ok(msg) = unmarshalled { println!("{}", t.output); assert!(msg == t.output); } else { assert!(false); } } } #[test] fn test_unmarshal_unsuccessful() {
}
use super::*; struct Test { message: Vec<u8>, err: EwpError } let tests: Vec<Test> = vec!( Test { message: "EWP 13.05 RPC blahblahblah json 16 14this is a headerthis is a body".to_string().into_bytes(), err: EwpError::new("message request must contain 2 lines"), }, Test { message: "EWP 13.05 7 12\ntestingtesting body".to_string().into_bytes(), err: EwpError::new("not all metadata provided"), }, Test { message: "EWP 123032948392489 RPC 4 4\ntesttest".to_string().into_bytes(), err: EwpError::new("EWP version cannot be parsed"), }, Test { message: "EWP 123032948.392489 notrpc 4 4\ntesttest".to_string().into_bytes(), err: EwpError::new("communication protocol unsupported"), }, Test { message: "EWP 123032948.392489 GOSSIP f 4\ntesttest".to_string().into_bytes(), err: EwpError::new("incorrect metadata format, cannot parse header-length"), }, Test { message: "EWP 123032948.392489 GOSSIP 4 f\ntesttest".to_string().into_bytes(), err: EwpError::new("incorrect metadata format, cannot parse body-length"), }, ); for t in tests.iter() { let unmarshalled = unmarshal(&t.message); match unmarshalled { Ok(msg) => { println!("expected: {}", &t.err); println!("received: {}", msg); assert!(msg != msg) } Err(err) => { println!("expected: '{}'", t.err.details); println!("received: '{}'", err.details); assert!(t.err.details == err.details) } } } }
function_block-function_prefix_line
[ { "content": "/// Marshal takes a parsed message and encodes it to a wire protocol message\n\npub fn marshal(msg: &Envelope) -> Result<Vec<u8>, EwpError> {\n\n\n\n if msg.version == \"\" { return Err(EwpError::new(\"missing version!\")) }\n\n if msg.protocol == \"\" { return Err(EwpError::new(\"missing protocol!\")) }\n\n\n\n let header: String = format!(\"EWP {} {} {} {}\\n\",\n\n msg.version,\n\n msg.protocol,\n\n msg.header.len(),\n\n msg.body.len());\n\n\n\n let mut outbytes: Vec<u8> = header.into_bytes();\n\n outbytes.extend(&msg.header);\n\n outbytes.extend(&msg.body);\n\n\n\n return Ok(outbytes)\n\n}\n\n\n\n\n\n\n", "file_path": "src/encoding/marshal.rs", "rank": 1, "score": 97797.83964392354 }, { "content": "fn main() {\n\n\n\n // Parse command-line options:\n\n let mut opts = getopts::Options::new();\n\n opts.optopt(\"h\", \"host\", \"server to connect\", \"HOST\");\n\n opts.optopt(\"p\", \"port\", \"port\", \"PORT\");\n\n\n\n let matches = opts.parse(std::env::args().skip(1)).unwrap();\n\n let host = matches.opt_str(\"host\").unwrap_or(\"127.0.0.1\".to_string());\n\n let port = matches.opt_str(\"port\").unwrap_or(\"12345\".to_string());\n\n\n\n // Bind the server's socket.\n\n let addr = format!(\"{}:{}\", host, port).parse().unwrap();\n\n\n\n let listener = TcpListener::bind(&addr)\n\n .expect(\"unable to bind TCP listener\");\n\n\n\n println!(\"listening on {}\", addr);\n\n // Pull out a stream of sockets for incoming connections\n\n let server = listener.incoming()\n", "file_path": "src/main.rs", "rank": 10, "score": 25658.174552880897 }, { "content": "/// Package encoding implements message encoding and decoding for Hobbits, a Lightweight,\n\n/// Multiclient Wire Protocol For ETH2.0 Communications.\n\n\n\npub mod envelope;\n\npub mod marshal;\n\npub mod unmarshal;\n\n\n\n// publish the public interface to the encoding module\n\npub use envelope::Envelope;\n\npub use marshal::marshal;\n\npub use unmarshal::unmarshal;\n\n\n\n\n\n// consolidate multiple error types that we handle/report, under one type 'EwpError'\n\n\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\npub struct EwpError {\n", "file_path": "src/encoding/mod.rs", "rank": 11, "score": 18801.91427029743 }, { "content": "\n\n// wrap expected errors in our error type\n\nuse std::string::FromUtf8Error;\n\nimpl From<FromUtf8Error> for EwpError {\n\n fn from(err: FromUtf8Error) -> Self {\n\n EwpError::new(err.description())\n\n }\n\n}\n\n\n\nuse std::num::ParseIntError;\n\nimpl From<ParseIntError> for EwpError {\n\n fn from(err: ParseIntError) -> Self {\n\n EwpError::new(err.description())\n\n }\n\n}\n", "file_path": "src/encoding/mod.rs", "rank": 12, "score": 18789.885974823836 }, { "content": " details: String\n\n}\n\n\n\nimpl EwpError {\n\n pub fn new(msg: &str) -> EwpError {\n\n EwpError{details: msg.to_string()}\n\n }\n\n}\n\n\n\nimpl fmt::Display for EwpError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f,\"{}\",self.details)\n\n }\n\n}\n\n\n\nimpl Error for EwpError {\n\n fn description(&self) -> &str {\n\n &self.details\n\n }\n\n}\n", "file_path": "src/encoding/mod.rs", "rank": 13, "score": 18789.779951823 }, { "content": "\n\nextern crate hex;\n\nuse std::fmt;\n\n\n\n/// Envelope represents a parsed Hobbits message.\n\n/// See examples of unparsed and parsed messages here: https://github.com/deltap2p/hobbits/blob/master/specs/protocol.md\n\n#[derive(Clone, Hash, Default, PartialEq, Debug)]\n\npub struct Envelope {\n\n pub version: String,\n\n pub protocol: String,\n\n pub header: Vec<u8>,\n\n pub body: Vec<u8>,\n\n}\n\n\n\nimpl Envelope {\n\n\n\n pub fn new(proto: &str, hdr: &[u8], bdy: &[u8]) -> Envelope {\n\n return Envelope {\n\n version: \"0.2\".to_string(),\n\n protocol: proto.to_string(),\n", "file_path": "src/encoding/envelope.rs", "rank": 14, "score": 18618.509472214933 }, { "content": " header: hdr.to_vec(),\n\n body: bdy.to_vec()\n\n }\n\n }\n\n\n\n}\n\nimpl fmt::Display for Envelope {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"EWP {} {} {} {}\\n0x{}\\n0x{}\",\n\n self.version, self.protocol, self.header.len(), self.body.len(), hex::encode(&self.header), hex::encode(&self.body))\n\n\t}\n\n}\n", "file_path": "src/encoding/envelope.rs", "rank": 15, "score": 18614.778078618776 }, { "content": "// Turns errors into std::io::Error\n\nfn bad_data<E>(_: E) -> std::io::Error {\n\n Error::new(ErrorKind::InvalidData, \"Unable to decode input\")\n\n}\n\n\n\n// Encoding is easy, we marshal our message onto the stream and send the bytes,\n\nimpl Encoder for EwpCodec {\n\n type Item = Envelope;\n\n type Error = std::io::Error;\n\n\n\n fn encode(&mut self, msg: Self::Item, buf: &mut BytesMut) -> Result<(), Self::Error> {\n\n\n\n // properly we should marshal directly to 'buf'; will refactor later\n\n let tmp = marshal(&msg).unwrap();\n\n buf.reserve(tmp.len());\n\n buf.put(tmp);\n\n Ok(())\n\n }\n\n}\n\n\n\n// Decoding is easy, since we assume that message arrives a full packet; no need\n", "file_path": "src/encoding/codec.rs", "rank": 16, "score": 17712.403521788918 }, { "content": " encoded: Envelope,\n\n message: String\n\n }\n\n let tests: Vec<Test> = vec!(\n\n \t\tTest{\n\n \t\t\tencoded: Envelope{\n\n \t\t\t\tversion: \"13.05\".to_string(),\n\n \t\t\t\tprotocol: \"RPC\".to_string(),\n\n \t\t\t\theader: \"this is a header\".to_string().into_bytes(),\n\n \t\t\t\tbody: \"this is a body\".to_string().into_bytes(),\n\n \t\t\t},\n\n \t\t\tmessage: \"EWP 13.05 RPC 16 14\\nthis is a headerthis is a body\".to_string(),\n\n \t\t},\n\n \t\tTest{\n\n \t\t\tencoded: Envelope{\n\n \t\t\t\tversion: \"13.05\".to_string(),\n\n \t\t\t\tprotocol: \"GOSSIP\".to_string(),\n\n \t\t\t\theader: \"testing\".to_string().into_bytes(),\n\n \t\t\t\tbody: \"testing body\".to_string().into_bytes(),\n\n \t\t\t},\n", "file_path": "src/encoding/marshal.rs", "rank": 17, "score": 19.632779578186067 }, { "content": " \t\t\tmessage: \"EWP 13.05 GOSSIP 7 12\\ntestingtesting body\".to_string(),\n\n \t\t},\n\n \t\tTest{\n\n \t\t\tencoded: Envelope{\n\n \t\t\t\tversion: \"1230329483.05392489\".to_string(),\n\n \t\t\t\tprotocol: \"RPC\".to_string(),\n\n \t\t\t\theader: \"test\".to_string().into_bytes(),\n\n \t\t\t\tbody: \"test\".to_string().into_bytes(),\n\n \t\t\t},\n\n \t\t\tmessage: \"EWP 1230329483.05392489 RPC 4 4\\ntesttest\".to_string(),\n\n \t\t},\n\n \t);\n\n\n\n for t in tests.iter() {\n\n let marshalled = marshal(&t.encoded).unwrap();\n\n println!(\"{}\", t.message);\n\n assert!(marshalled == t.message.as_bytes());\n\n }\n\n }\n\n\n", "file_path": "src/encoding/marshal.rs", "rank": 18, "score": 16.804096478222462 }, { "content": " #[test]\n\n fn test_marshal_unsuccessful() {\n\n struct Test {\n\n encoded: Envelope,\n\n err: EwpError\n\n }\n\n let tests: Vec<Test> = vec!(\n\n \t\tTest{\n\n \t\t\tencoded: Envelope{\n\n \t\t\t\tversion: \"\".to_string(),\n\n \t\t\t\tprotocol: \"RPC\".to_string(),\n\n \t\t\t\theader: \"this is a header\".to_string().into_bytes(),\n\n \t\t\t\tbody: \"this is a body\".to_string().into_bytes(),\n\n \t\t\t},\n\n \t\t\terr: EwpError::new(\"cannot marshal message, version not found\"),\n\n \t\t},\n\n \t\tTest{\n\n \t\t\tencoded: Envelope{\n\n \t\t\t\tversion: \"1230329483.05392489\".to_string(),\n\n \t\t\t\tprotocol: \"\".to_string(),\n", "file_path": "src/encoding/marshal.rs", "rank": 19, "score": 16.35854205431442 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::{Envelope, marshal, EwpError};\n\n\n\n #[test]\n\n fn basic_sanity() {\n\n // - desc: 'no body'\n\n // marshalled: \"EWP 0.2 PING 0 0\\n\"\n\n let mut msg = Envelope::new(\"PING\", &vec!(), &vec!());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 0 0\\n\".as_bytes());\n\n // - desc: '10 byte body'\n\n // marshalled: \"EWP 0.2 PING 0 10\\n0123456789\"\n\n msg = Envelope::new(\"PING\", &vec!(), \"0123456789\".as_bytes());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 0 10\\n0123456789\".as_bytes());\n\n // - desc: '10 byte header'\n\n // marshalled: \"EWP 0.2 PING 10 0\\n0123456789\"\n\n msg = Envelope::new(\"PING\", \"0123456789\".as_bytes(), &vec!());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 10 0\\n0123456789\".as_bytes());\n\n // - desc: '9 byte header, 10 byte body'\n\n // marshalled: \"EWP 0.2 PING 9 10\\n9876543210123456789\"\n", "file_path": "src/encoding/marshal.rs", "rank": 20, "score": 13.706133785731353 }, { "content": " .map_err(|e| eprintln!(\"accept failed = {:?}\", e))\n\n .for_each(|sock| {\n\n\n\n // Split up the reading and writing parts of the socket.\n\n let (reader, mut writer) = sock.split();\n\n\n\n tokio::io::read_to_end(reader, vec!())\n\n .and_then(move |(_, buf)| {\n\n println!(\"AAAA received {} bytes: '{}'\", buf.len(), String::from_utf8_lossy(&buf));\n\n let rslt = unmarshal(&buf);\n\n match rslt {\n\n Ok(msg) => {\n\n println!(\"AAAA: {}\", msg);\n\n if msg.protocol == \"PING\" {\n\n let pong_msg = Envelope {\n\n protocol: \"PONG\".to_string(),\n\n version: \"0.1\".to_string(),\n\n header: msg.header,\n\n body: msg.body\n\n };\n", "file_path": "src/main.rs", "rank": 21, "score": 12.972191677344888 }, { "content": " \t\t\t\theader: \"test\".to_string().into_bytes(),\n\n \t\t\t\tbody: \"test\".to_string().into_bytes(),\n\n \t\t\t},\n\n \t\t\terr: EwpError::new(\"cannot marshal message, protocol not found\"),\n\n \t\t},\n\n \t);\n\n\n\n for t in tests.iter() {\n\n let marshalled = marshal(&t.encoded);\n\n println!(\"{}\", t.err);\n\n //assert!(marshalled == t.message.as_bytes());\n\n }\n\n }\n\n}\n", "file_path": "src/encoding/marshal.rs", "rank": 22, "score": 10.972786305676228 }, { "content": "\n\npub use super::envelope::{Envelope};\n\nuse crate::encoding::EwpError;\n\n\n\n\n\n/// Marshal takes a parsed message and encodes it to a wire protocol message\n", "file_path": "src/encoding/marshal.rs", "rank": 23, "score": 10.767071384324932 }, { "content": " // marshalled: \"EWP 0.2 PING 0 0\\n\"\n\n let mut msg = Envelope::new(\"PING\", &vec!(), &vec!());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 0 0\\n\".as_bytes());\n\n // - desc: 'FOO'\n\n // marshalled: \"EWP 0.2 FOO 0 0\\n\"\n\n msg = Envelope::new(\"FOO\", &vec!(), &vec!());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 FOO 0 0\\n\".as_bytes());\n\n // - desc: 'BAR'\n\n // marshalled: \"EWP 0.2 BAR 0 0\\n\"\n\n msg = Envelope::new(\"BAR\", &vec!(), &vec!());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 BAR 0 0\\n\".as_bytes());\n\n // - desc: 'PONG'\n\n // marshalled: \"EWP 0.2 PONG 0 0\\n\"\n\n msg = Envelope::new(\"PONG\", &vec!(), &vec!());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PONG 0 0\\n\".as_bytes());\n\n }\n\n\n\n #[test]\n\n fn test_marshal_successful() {\n\n struct Test {\n", "file_path": "src/encoding/marshal.rs", "rank": 24, "score": 9.15998310829103 }, { "content": "// to deal with reassembing partial data in multiple steps.\n\nimpl Decoder for EwpCodec {\n\n type Item = Envelope;\n\n type Error = std::io::Error;\n\n\n\n // Find the next line in buf!\n\n fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n\n\n let msg = unmarshal(buf).map_err(Error::new(ErrorKind::InvalidData, \"unparseable envelope\"))?;\n\n // success, we got a whole Envelope.\n\n let bytes_used = offset + 1 + msg.header.len() + msg.body.len();\n\n // Cut out the used bytes from the buffer so we don't return it again.\n\n let _ = buf.split_to(bytes_used);\n\n Some(msg)\n\n }\n\n}\n", "file_path": "src/encoding/codec.rs", "rank": 25, "score": 8.812554178631743 }, { "content": " msg = Envelope::new(\"PING\", \"987654321\".as_bytes(), \"0123456789\".as_bytes());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 9 10\\n9876543210123456789\".as_bytes());\n\n // - desc: '9 byte header, 10 byte body, extra newlines'\n\n // marshalled: \"EWP 0.2 PING 9 10\\n\\n876543210123456\\n89\"\n\n msg = Envelope::new(\"PING\", \"\\n87654321\".as_bytes(), \"0123456\\n89\".as_bytes());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 9 10\\n\\n876543210123456\\n89\".as_bytes());\n\n // - desc: '9 byte header, 10 byte body, extra extra newlines'\n\n // marshalled: \"EWP 0.2 PING 9 10\\n\\n87654321\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\"\n\n msg = Envelope::new(\"PING\", \"\\n87654321\".as_bytes(), \"\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\".as_bytes());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 9 10\\n\\n87654321\\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\".as_bytes());\n\n // - desc: '9 byte header, 10 byte body, control character montage'\n\n // marshalled: \"EWP 0.2 PING 9 10\\n\\n87654321\\n\\0\\a\\b\\f\\n\\r\\t\\v\\\\\"\n\n // NOTE: those aren't valid Rust control characters...\n\n msg = Envelope::new(\"PING\", \"\\n87654321\".as_bytes(), \"\\n\\0\\x0a\\x0b\\x0f\\n\\r\\t\\x01\\\\\".as_bytes());\n\n assert_eq!(marshal(&msg).unwrap(), \"EWP 0.2 PING 9 10\\n\\n87654321\\n\\0\\x0a\\x0b\\x0f\\n\\r\\t\\x01\\\\\".as_bytes());\n\n }\n\n\n\n #[test]\n\n fn different_commands() {\n\n // - desc: 'PING'\n", "file_path": "src/encoding/marshal.rs", "rank": 26, "score": 8.62328116048587 }, { "content": "extern crate bytes;\n\nextern crate tokio_codec;\n\n\n\nuse bytes::{BufMut, BytesMut};\n\nuse tokio::codec::{Decoder, Encoder};\n\nuse tokio::prelude::*;\n\nuse {Error, ErrorKind};\n\n\n\nuse crate::encoding::{Envelope, marshal, unmarshal, EwpError};\n\n\n\n// This is where we'd keep track of any extra book-keeping information\n\n// our transport needs to operate.\n\npub struct EwpCodec;\n\n\n\n// Turns errors into std::io::Error\n", "file_path": "src/encoding/codec.rs", "rank": 27, "score": 8.041521024003814 }, { "content": "# An implementation of the Hobbits protocol, in Rust.\n\n\n\n\n\n- To run unit tests: `cargo test`\n\n\n\n- To run the demo server: `cargo run` and, in another terminal, start the conformance\n\ntest like `./conformance --port 12345`\n\n\n\n- (To use a different port: `cargo run -- --port 8888` and `./conformance --port 8888`)\n\n\n\n(note: conformance may need to run as sudo, depending on whether your system allows ping)\n\n\n\n\n\nThere is still work-in-progress, to simplify usage (and to support streaming of\n\nEnvelopes over a single TCP connection); that's what `src/encoding/codec.rs` is\n\nfor, along with some other stuff not committed yet.\n", "file_path": "README.md", "rank": 28, "score": 6.802792022513735 }, { "content": "\n\nextern crate tokio;\n\nextern crate getopts;\n\n\n\nuse tokio::prelude::{Future,Stream,AsyncRead,Write};\n\nuse tokio::net::TcpListener;\n\n\n\nextern crate hobbits;\n\n\n\nuse hobbits::encoding::{marshal, unmarshal, Envelope};\n\n\n", "file_path": "src/main.rs", "rank": 29, "score": 6.440158327834765 }, { "content": " writer.write(&marshal(&pong_msg).unwrap().to_owned()).expect(\"write (or marshal failed!)\");\n\n }\n\n }\n\n Err(e) => {\n\n println!(\"ERROR: {:?}\", e);\n\n }\n\n }\n\n Ok(())\n\n })\n\n .map(|_| ())\n\n .map_err(|e| println!(\"socket error = {:?}\", e))\n\n });\n\n\n\n // Start the Tokio runtime\n\n tokio::run(server);\n\n}\n", "file_path": "src/main.rs", "rank": 30, "score": 5.392157351802746 }, { "content": "#![allow(unused_imports)]\n\n#![allow(unused_variables)]\n\n#![allow(dead_code)]\n\n\n\npub mod encoding;\n\n\n\n// in process: create a HobbitsTransport, to manage multiple connections and transport types...\n\n//pub mod server;\n", "file_path": "src/lib.rs", "rank": 31, "score": 4.45258214457933 } ]
Rust
kiln_lib/src/kafka.rs
simplybusiness/Kiln
884e96059622c72e99254ac737bee25aee964adf
use addr::{parser::DomainName, psl::List}; use openssl_probe::ProbeResult; use rdkafka::config::ClientConfig; use rdkafka::consumer::stream_consumer::StreamConsumer; use rdkafka::error::KafkaError; use rdkafka::producer::future_producer::FutureProducer; use std::fmt::Display; #[derive(Debug, Clone)] pub struct KafkaBootstrapTlsConfig(Vec<String>); #[derive(Debug)] pub enum ValidationFailureReason { Missing, PresentButEmpty, CouldNotBeParsed, } impl Display for ValidationFailureReason { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ValidationFailureReason::Missing => f.write_str("value is missing"), ValidationFailureReason::PresentButEmpty => f.write_str("value is present but empty"), ValidationFailureReason::CouldNotBeParsed => f.write_str("value could not be parsed"), } } } #[derive(thiserror::Error, Debug)] pub enum KafkaConfigError { #[error("Required environment variable {var} failed validation because {reason}")] RequiredValueValidationFailure { var: String, reason: ValidationFailureReason, }, #[error("Optional environment variable {var} failed validation because {reason}")] OptionalValueValidationFailure { var: String, reason: ValidationFailureReason, }, #[error("Kafka client could not be created")] KafkaError(#[from] KafkaError), #[error("Could not find TLS trust store")] TlsTrustStore, } pub fn get_bootstrap_config<I>(vars: &mut I) -> Result<KafkaBootstrapTlsConfig, KafkaConfigError> where I: Iterator<Item = (String, String)>, { let local_vars: Vec<(String, String)> = vars.collect(); let disable_kafka_domain_validation = match local_vars .iter() .find(|var| var.0 == "DISABLE_KAFKA_DOMAIN_VALIDATION") { None => Ok(false), Some(var) => { if var.1.is_empty() { return Err(KafkaConfigError::OptionalValueValidationFailure { var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), reason: ValidationFailureReason::PresentButEmpty, }); } else { match var.1.as_ref() { "true" => Ok(true), "false" => Ok(false), _ => Err(KafkaConfigError::OptionalValueValidationFailure { var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), reason: ValidationFailureReason::CouldNotBeParsed, }), } } } }?; let kafka_bootstrap_tls = match local_vars.iter().find(|var| var.0 == "KAFKA_BOOTSTRAP_TLS") { None => Err(KafkaConfigError::RequiredValueValidationFailure { var: "KAFKA_BOOTSTRAP_TLS".into(), reason: ValidationFailureReason::Missing, }), Some(var) => { if var.1.is_empty() { return Err(KafkaConfigError::RequiredValueValidationFailure { var: "KAFKA_BOOTSTRAP_TLS".into(), reason: ValidationFailureReason::PresentButEmpty, }); } else { let raw_hosts: Vec<String> = var.1.split(',').map(|s| s.to_owned()).collect(); let valid = raw_hosts.iter().all(|x| { let parts: Vec<&str> = x.split(':').collect(); let domain_valid = if disable_kafka_domain_validation { true } else { List.parse_domain_name(parts[0]) .map(|name| name.has_known_suffix()) .unwrap_or(false) }; let port_valid = u16::from_str_radix(parts[1], 10).is_ok(); domain_valid && port_valid }); if valid { Ok(raw_hosts) } else { Err(KafkaConfigError::RequiredValueValidationFailure { var: "KAFKA_BOOTSTRAP_TLS".into(), reason: ValidationFailureReason::CouldNotBeParsed, }) } } } }?; Ok(KafkaBootstrapTlsConfig(kafka_bootstrap_tls)) } pub fn build_kafka_producer( config: KafkaBootstrapTlsConfig, ) -> Result<FutureProducer, KafkaConfigError> { let cert_probe_result = openssl_probe::probe(); let cert_location = match cert_probe_result { ProbeResult { cert_file, .. } if cert_file.is_some() => Ok(cert_file), ProbeResult { cert_dir, .. } if cert_dir.is_some() => Ok(cert_dir), _ => Err(KafkaConfigError::TlsTrustStore), }?; ClientConfig::new() .set("metadata.broker.list", &config.0.join(",")) .set("compression.type", "gzip") .set("security.protocol", "SSL") .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) .set("message.max.bytes", "10000000") .create() .map_err(|err| err.into()) } pub fn build_kafka_consumer( config: KafkaBootstrapTlsConfig, consumer_group_name: String, ) -> Result<StreamConsumer, KafkaConfigError> { let cert_probe_result = openssl_probe::probe(); let cert_location = match cert_probe_result { ProbeResult { cert_file, .. } if cert_file.is_some() => Ok(cert_file), ProbeResult { cert_dir, .. } if cert_dir.is_some() => Ok(cert_dir), _ => Err(KafkaConfigError::TlsTrustStore), }?; ClientConfig::new() .set("metadata.broker.list", &config.0.join(",")) .set("group.id", &consumer_group_name) .set("compression.type", "gzip") .set("security.protocol", "SSL") .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) .set("fetch.message.max.bytes", "10000000") .create() .map_err(|err| err.into()) } #[cfg(test)] mod tests { use super::*; #[allow(unused_must_use)] #[tokio::test] async fn creating_kafka_producer_does_not_return_a_client_config_error() { let config = KafkaBootstrapTlsConfig(vec!["host1:1234".to_string(), "host2:1234".to_string()]); build_kafka_producer(config).unwrap(); } #[allow(unused_must_use)] #[tokio::test] async fn creating_kafka_consumer_does_not_return_a_client_config_error() { let config = KafkaBootstrapTlsConfig(vec!["host1:1234".to_string(), "host2:1234".to_string()]); build_kafka_consumer(config, "TestConsumerGroup".to_string()).unwrap(); } #[test] fn get_bootstrap_config_returns_config_when_environment_vars_present_and_valid() { let hostname = "my.kafka.host.example.com:1234,my.second.kafka.host.example.com:1234".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname)].into_iter(); let expected = vec![ "my.kafka.host.example.com:1234".to_owned(), "my.second.kafka.host.example.com:1234".to_owned(), ]; let actual = get_bootstrap_config(&mut fake_vars).expect("expected Ok(_) value"); assert_eq!(actual.0, expected); } #[test] fn get_bootstrap_config_returns_error_when_environment_vars_missing() { let mut fake_vars = std::iter::empty::<(String, String)>(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value is missing" ) } #[test] fn get_bootstrap_config_returns_error_when_environment_vars_present_but_empty() { let hostname = "".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value is present but empty" ) } #[test] fn get_bootstrap_config_returns_error_when_hostname_invalid_and_domain_validation_enabled() { let hostname = "kafka:1234".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value could not be parsed" ) } #[test] fn get_bootstrap_config_returns_configration_when_hostname_not_a_valid_domain_and_domain_validation_disabled( ) { let hostname = "kafka:1234".to_owned(); let mut fake_vars = vec![ ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), ( "DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "true".to_owned(), ), ] .into_iter(); let expected = vec![hostname.clone()]; let actual = get_bootstrap_config(&mut fake_vars).expect("expected Ok(_) value"); assert_eq!(actual.0, expected) } #[test] fn get_bootstrap_config_returns_error_when_port_number_invalid() { let hostname = "my.kafka.host.example.com:1234567".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value could not be parsed" ) } #[test] fn get_bootstrap_config_returns_error_when_disable_kafka_domain_validation_present_but_empty() { let mut fake_vars = vec![("DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "".to_owned())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Optional environment variable DISABLE_KAFKA_DOMAIN_VALIDATION failed validation because value is present but empty" ) } #[test] fn get_bootstrap_config_returns_error_when_disable_kafka_domain_validation_present_but_invalid() { let mut fake_vars = vec![( "DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "blah".to_owned(), )] .into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Optional environment variable DISABLE_KAFKA_DOMAIN_VALIDATION failed validation because value could not be parsed" ) } }
use addr::{parser::DomainName, psl::List}; use openssl_probe::ProbeResult; use rdkafka::config::ClientConfig; use rdkafka::consumer::stream_consumer::StreamConsumer; use rdkafka::error::KafkaError; use rdkafka::producer::future_producer::FutureProducer; use std::fmt::Display; #[derive(Debug, Clone)] pub struct KafkaBootstrapTlsConfig(Vec<String>); #[derive(Debug)] pub enum ValidationFailureReason { Missing, PresentButEmpty, CouldNotBeParsed, } impl Display for ValidationFailureReason { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { ValidationFailureReason::Missing => f.write_str("value is missing"), ValidationFailureReason::PresentButEmpty => f.write_str("value is present but empty"), ValidationFailureReason::CouldNotBeParsed => f.write_str("value could not be parsed"), } } } #[derive(thiserror::Error, Debug)] pub enum KafkaConfigError { #[error("Required environment variable {var} failed validation because {reason}")] RequiredValueValidationFailure { var: String, reason: ValidationFailureReason, }, #[error("Optional environment variable {var} failed validation because {reason}")] OptionalValueValidationFailure { var: String, reason: ValidationFailureReason, }, #[error("Kafka client could not be created")] KafkaError(#[from] KafkaError), #[error("Could not find TLS trust store")] TlsTrustStore, } pub fn get_bootstrap_config<I>(vars: &mut I) -> Result<KafkaBootstrapTlsConfig, KafkaConfigError> where I: Iterator<Item = (String, String)>, { let local_vars: Vec<(String, String)> = vars.collect(); let disable_kafka_domain_validation = match local_vars .iter() .find(|var| var.0 == "DISABLE_KAFKA_DOMAIN_VALIDATION") { None => Ok(false), Some(var) => { if var.1.is_empty() { return Err(KafkaConfigError::OptionalValueValidationFailure { var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), reason: ValidationFailureReason::PresentButEmpty, }); } else { match var.1.as_ref() { "true" => Ok(true), "false" => Ok(false), _ => Err(KafkaConfigError::OptionalValueValidationFailure { var: "DISABLE_KAFKA_DOMAIN_VALIDATION".into(), reason: ValidationFailureReason::CouldNotBeParsed, }), } } } }?; let kafka_bootstrap_tls = match local_vars.iter().find(|var| var.0 == "KAFKA_BOOTSTRAP_TLS") { None => Err(KafkaConfigError::RequiredValueValidationFailure { var: "KAFKA_BOOTSTRAP_TLS".into(), reason: ValidationFailureReason::Missing, }), Some(var) => { if var.1.is_empty() { return Err(KafkaConfigError::RequiredValueValidationFailure { var: "KAFKA_BOOTSTRAP_TLS".into(), reason: ValidationFailureReason::PresentButEmpty, }); } else { let raw_hosts: Vec<String> = var.1.split(',').map(|s| s.to_owned()).collect(); let valid = raw_hosts.iter().all(|x| { let parts: Vec<&str> = x.split(':').collect(); let domain_valid = if disable_kafka_domain_validation { true } else { List.parse_domain_name(parts[0]) .map(|name| name.has_known_suffix()) .unwrap_or(false) }; let port_valid = u16::from_str_radix(parts[1], 10).is_ok(); domain_valid && port_valid }); if valid { Ok(raw_hosts) } else { Err(KafkaConfigError::RequiredValueValidationFailure { var: "KAFKA_BOOTSTRAP_TLS".into(), reason: ValidationFailureReason::CouldNotBeParsed, }) } } } }?; Ok(KafkaBootstrapTlsConfig(kafka_bootstrap_tls)) } pub fn build_kafka_producer( config: KafkaBootstrapTlsConfig, ) -> Result<FutureProducer, KafkaConfigError> { let cert_probe_result = openssl_probe::probe(); let cert_location = match cert_probe_result { ProbeResult { cert_file, .. } if cert_file.is_some() => Ok(cert_file), ProbeResult { cert_dir, .. } if cert_dir.is_some() => Ok(cert_dir), _ => Err(KafkaConfigError::TlsTrustStore), }?; ClientConfig::new() .set("metadata.broker.list", &config.0.join(",")) .set("compression.type", "gzip") .set("security.protocol", "SSL") .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) .set("message.max.bytes", "10000000") .create() .map_err(|err| err.into()) } pub fn build_kafka_consumer( config: KafkaBootstrapTlsConfig, consumer_group_name: String, ) -> Result<StreamConsumer, KafkaConfigError> { let cert_probe_result = openssl_probe::probe(); let cert_location = match cert_probe_result { ProbeResult { cert_file, .. } if cert_file.is_some() => Ok(cert_file), ProbeResult { cert_dir, .. } if cert_dir.is_some() => Ok(cert_dir), _ => Err(KafkaConfigError::TlsTrustStore), }?; ClientConfig::new() .set("metadata.broker.list", &config.0.join(",")) .set("group.id", &consumer_group_name) .set("compression.type", "gzip") .set("security.protocol", "SSL") .set("ssl.cipher.suites", "ECDHE-ECDSA-AES256-GCM-SHA384,ECDHE-RSA-AES256-GCM-SHA384,ECDHE-ECDSA-AES128-GCM-SHA256,ECDHE-RSA-AES128-GCM-SHA256") .set("ssl.ca.location", cert_location.unwrap().to_string_lossy()) .set("fetch.message.max.bytes", "10000000") .create() .map_err(|err| err.into()) } #[cfg(test)] mod tests { use super::*; #[allow(unused_must_use)] #[tokio::test] async fn creating_kafka_producer_does_not_return_a_client_config_error() { let config = KafkaBootstrapTlsConfig(vec!["host1:1234".to_string(), "host2:1234".to_string()]); build_kafka_producer(config).unwrap(); } #[allow(unused_must_use)] #[tokio::test] async fn creating_kafka_consumer_does_not_return_a_client_config_error() { let config = KafkaBootstrapTlsConfig(vec!["host1:1234".to_string(), "host2:1234".to_string()]); build_kafka_consumer(config, "TestConsumerGroup".to_string()).unwrap(); } #[test] fn get_bootstrap_config_returns_config_when_environment_vars_present_and_valid() { let hostname = "my.kafka.host.example.com:1234,my.second.kafka.host.example.com:1234".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname)].into_iter(); let expected = vec![ "my.kafka.host.example.com:1234".to_owned(), "my.second.kafka.host.example.com:1234".to_owned(), ]; let actual = get_bootstrap_config(&mut fake_vars).expect("expected Ok(_) value"); assert_eq!(actual.0, expected); } #[test] fn get_bootstrap_config_returns_error_when_environment_vars_missing() { let mut fake_vars = std::iter::empty::<(String, String)>(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value is missing" ) } #[test] fn get_bootstrap_config_returns_error_when_environment_vars_present_but_empty() { let hostname = "".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone())].into_iter();
#[test] fn get_bootstrap_config_returns_error_when_hostname_invalid_and_domain_validation_enabled() { let hostname = "kafka:1234".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value could not be parsed" ) } #[test] fn get_bootstrap_config_returns_configration_when_hostname_not_a_valid_domain_and_domain_validation_disabled( ) { let hostname = "kafka:1234".to_owned(); let mut fake_vars = vec![ ("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone()), ( "DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "true".to_owned(), ), ] .into_iter(); let expected = vec![hostname.clone()]; let actual = get_bootstrap_config(&mut fake_vars).expect("expected Ok(_) value"); assert_eq!(actual.0, expected) } #[test] fn get_bootstrap_config_returns_error_when_port_number_invalid() { let hostname = "my.kafka.host.example.com:1234567".to_owned(); let mut fake_vars = vec![("KAFKA_BOOTSTRAP_TLS".to_owned(), hostname.clone())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value could not be parsed" ) } #[test] fn get_bootstrap_config_returns_error_when_disable_kafka_domain_validation_present_but_empty() { let mut fake_vars = vec![("DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "".to_owned())].into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Optional environment variable DISABLE_KAFKA_DOMAIN_VALIDATION failed validation because value is present but empty" ) } #[test] fn get_bootstrap_config_returns_error_when_disable_kafka_domain_validation_present_but_invalid() { let mut fake_vars = vec![( "DISABLE_KAFKA_DOMAIN_VALIDATION".to_owned(), "blah".to_owned(), )] .into_iter(); let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Optional environment variable DISABLE_KAFKA_DOMAIN_VALIDATION failed validation because value could not be parsed" ) } }
let actual = get_bootstrap_config(&mut fake_vars).expect_err("expected Err(_) value"); assert_eq!( actual.to_string(), "Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value is present but empty" ) }
function_block-function_prefix_line
[ { "content": "#[derive(Clone, SerdeValue, Serialize, Deserialize)]\n\nstruct EventType(Vec<String>);\n\n\n", "file_path": "slack-connector/src/main.rs", "rank": 1, "score": 122833.64715094889 }, { "content": "#[derive(Clone, SerdeValue, Serialize, Deserialize)]\n\nstruct EventType(Vec<String>);\n\n\n", "file_path": "report-parser/src/main.rs", "rank": 2, "score": 122833.64715094889 }, { "content": "#[derive(Clone, SerdeValue, Serialize, Deserialize)]\n\nstruct EventType(Vec<String>);\n\n\n\npub struct StructuredLogger(Rc<Inner>);\n\n\n", "file_path": "data-collector/src/lib.rs", "rank": 3, "score": 122833.64715094889 }, { "content": "fn validate_config_info(config_info: &CliConfigOptions) -> Result<(), ConfigFileError> {\n\n match &config_info.app_name {\n\n Some(app_name) => {\n\n if app_name.is_empty() {\n\n return Err(ConfigFileError::app_name_empty());\n\n }\n\n }\n\n None => return Err(ConfigFileError::app_name_unspecified()),\n\n };\n\n match &config_info.data_collector_url {\n\n Some(url) => {\n\n if url.is_empty() {\n\n return Err(ConfigFileError::data_collector_url_empty());\n\n }\n\n }\n\n None => return Err(ConfigFileError::data_collector_url_unspecified()),\n\n };\n\n\n\n Ok(())\n\n}\n", "file_path": "cli/src/main.rs", "rank": 4, "score": 106644.36154139698 }, { "content": "pub fn serialise_to_avro(report: ToolReport) -> Result<Vec<u8>, HandlerError> {\n\n let schema = Schema::parse_str(TOOL_REPORT_SCHEMA)?;\n\n let mut writer = Writer::new(&schema, Vec::new());\n\n writer.append_ser(report)?;\n\n Ok(writer.into_inner()?)\n\n}\n\n\n\n#[derive(thiserror::Error, Debug)]\n\npub enum HandlerError {\n\n #[error(\"Something went wrong while communicating with Kafka\")]\n\n KafkaError {\n\n #[from]\n\n source: KafkaError,\n\n },\n\n #[error(transparent)]\n\n ValidationError(#[from] ValidationError),\n\n #[error(\"Something went wrong while serialising payload to Apache Avro\")]\n\n AvroError(#[from] avro_rs::Error),\n\n}\n\n\n", "file_path": "data-collector/src/main.rs", "rank": 5, "score": 93077.65096226372 }, { "content": "pub fn parse_payload(body: &web::Bytes) -> Result<ToolReport, ValidationError> {\n\n if body.is_empty() {\n\n return Err(ValidationError::body_empty());\n\n }\n\n\n\n serde_json::from_slice(&body)\n\n .map_err(|_| ValidationError::body_media_type_incorrect())\n\n .and_then(|json| ToolReport::try_from(&json))\n\n}\n\n\n", "file_path": "data-collector/src/main.rs", "rank": 6, "score": 88857.83368424657 }, { "content": "struct ProgressBarDisplay {\n\n prog_channels: HashMap<std::string::String, Arc<Mutex<mpsc::Sender<ProgressChannelUpdate>>>>,\n\n multibar_arc: Arc<MultiProgress>,\n\n pull_started: bool,\n\n}\n\n\n\nstatic PBAR_FMT: &str = \"{msg} {percent}% [{wide_bar:.cyan/blue}] {bytes}/{total_bytes} eta: {eta}\";\n\n\n\nimpl ProgressBarDisplay {\n\n fn create_progress_bar(len: u64) -> ProgressBar {\n\n let progbar = ProgressBar::new(len);\n\n\n\n progbar.set_style(\n\n ProgressStyle::default_bar()\n\n .template(PBAR_FMT)\n\n .progress_chars(\"=> \"),\n\n );\n\n\n\n progbar\n\n }\n", "file_path": "cli/src/main.rs", "rank": 9, "score": 83681.33552311598 }, { "content": "#[derive(Debug, Default, Deserialize)]\n\nstruct CliConfigOptions {\n\n data_collector_url: Option<String>,\n\n app_name: Option<String>,\n\n scan_env: Option<ScanEnv>,\n\n tool_image_name: Option<String>,\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 10, "score": 83650.93672941584 }, { "content": "fn parse_kiln_toml_file(tool_work_dir: &str) -> Result<CliConfigOptions, ConfigFileError> {\n\n /* Read default kiln config file */\n\n let kiln_config_file_name =\n\n std::path::PathBuf::from(tool_work_dir.to_owned()).join(\"kiln.toml\");\n\n let mut kiln_config_file = match File::open(kiln_config_file_name) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n eprintln!(\"Error occured while opening the kiln.toml file. Please ensure you have this in your current working directory (Err: {})\", e);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n let mut config_file_str = String::new();\n\n match kiln_config_file.read_to_string(&mut config_file_str) {\n\n Ok(_s) => {\n\n let config_info: CliConfigOptions = toml::from_str(config_file_str.as_ref()).unwrap();\n\n validate_config_info(&config_info)?;\n\n Ok(config_info)\n\n }\n\n Err(e) => {\n\n eprintln!(\"Error reading kiln.toml file (Err: {})\", e);\n\n process::exit(1);\n\n }\n\n }\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 11, "score": 65921.81153243872 }, { "content": "enum Tool {\n\n BundlerAudit,\n\n Safety,\n\n}\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct DockerImage {\n\n registry: Option<Url>,\n\n repo: String,\n\n image: String,\n\n tag: String,\n\n credentials: Option<DockerCredentials>,\n\n}\n\n\n\nimpl DockerImage {\n\n #[allow(clippy::new_ret_no_self)]\n\n fn new() -> DockerImageBuilder {\n\n DockerImageBuilder {\n\n ..Default::default()\n\n }\n", "file_path": "cli/src/main.rs", "rank": 12, "score": 51119.20088271962 }, { "content": "type ProgressChannelUpdate = (String, Option<ProgressDetail>, String);\n\n\n", "file_path": "cli/src/main.rs", "rank": 13, "score": 50984.2372289388 }, { "content": "#[derive(Debug, Deserialize)]\n\nenum ScanEnv {\n\n Local,\n\n CI,\n\n}\n\n\n\nimpl fmt::Display for ScanEnv {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fmt::Debug::fmt(self, f)\n\n }\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 14, "score": 49820.78395951671 }, { "content": "#[derive(Clone, Debug)]\n\nstruct DockerCredentials {\n\n username: String,\n\n password: String,\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 15, "score": 49598.76303036245 }, { "content": "struct Inner {\n\n logger: Logger,\n\n exclude: HashSet<String>,\n\n}\n\n\n\nimpl StructuredLogger {\n\n #[must_use]\n\n pub fn new(logger: Logger) -> StructuredLogger {\n\n StructuredLogger(Rc::new(Inner {\n\n logger,\n\n exclude: HashSet::new(),\n\n }))\n\n }\n\n\n\n pub fn exclude<T: Into<String>>(mut self, path: T) -> Self {\n\n Rc::get_mut(&mut self.0)\n\n .unwrap()\n\n .exclude\n\n .insert(path.into());\n\n self\n", "file_path": "data-collector/src/lib.rs", "rank": 16, "score": 49589.730494891235 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct DockerImageBuilder {\n\n registry: Option<String>,\n\n repo: Option<String>,\n\n image: Option<String>,\n\n tag: Option<String>,\n\n credentials: Option<DockerCredentials>,\n\n}\n\n\n\nimpl DockerImageBuilder {\n\n fn with_registry<S: Into<String>>(&mut self, registry_url: S) -> &mut DockerImageBuilder {\n\n self.registry = Some(registry_url.into());\n\n self\n\n }\n\n\n\n fn with_repo<S: Into<String>>(&mut self, repo: S) -> &mut DockerImageBuilder {\n\n self.repo = Some(repo.into());\n\n self\n\n }\n\n\n\n fn with_image<S: Into<String>>(&mut self, image: S) -> &mut DockerImageBuilder {\n", "file_path": "cli/src/main.rs", "rank": 17, "score": 48386.37758222497 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct PythonSafety {\n\n affected_package: String,\n\n affected_versions: String,\n\n installed_version: String,\n\n advisory_description: String,\n\n advisory_id: String,\n\n cvssv2: Option<String>,\n\n cvssv3: Option<String>,\n\n}\n\n\n", "file_path": "report-parser/src/main.rs", "rank": 18, "score": 48381.98678482897 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct MetaInfo {\n\n advisory: String,\n\n timestamp: Value,\n\n}\n\n\n", "file_path": "report-parser/src/main.rs", "rank": 19, "score": 48381.98678482897 }, { "content": "#[derive(Clone)]\n\nstruct VulnData {\n\n cvss: Cvss,\n\n advisory_str: ComprString,\n\n advisory_url: String,\n\n}\n\n\n", "file_path": "report-parser/src/main.rs", "rank": 20, "score": 48381.95775401819 }, { "content": "#[derive(Clone)]\n\nstruct CopyStream {\n\n buffer: Rc<RefCell<Vec<u8>>>,\n\n input: Rc<RefCell<actix_web::dev::Payload>>,\n\n}\n\n\n\nimpl Into<Pin<Box<dyn Stream<Item = Result<bytes::Bytes, PayloadError>>>>> for CopyStream {\n\n fn into(self) -> Pin<Box<dyn Stream<Item = Result<bytes::Bytes, PayloadError>>>> {\n\n Box::pin(self)\n\n }\n\n}\n\n\n\nimpl Stream for CopyStream {\n\n type Item = Result<bytes::Bytes, PayloadError>;\n\n\n\n fn poll_next(\n\n self: Pin<&mut Self>,\n\n cx: &mut std::task::Context<'_>,\n\n ) -> Poll<Option<Self::Item>> {\n\n let mut input = self.input.deref().borrow_mut();\n\n match input.deref_mut() {\n", "file_path": "data-collector/src/lib.rs", "rank": 21, "score": 48381.95775401819 }, { "content": "fn should_issue_be_suppressed(\n\n issue_hash: &IssueHash,\n\n suppressed_issues: &[SuppressedIssue],\n\n current_time: &DateTime<Utc>,\n\n) -> bool {\n\n if suppressed_issues.is_empty() {\n\n false\n\n } else {\n\n let matching_issues = suppressed_issues\n\n .iter()\n\n .filter(|x| &x.issue_hash == issue_hash)\n\n .collect::<Vec<_>>();\n\n if matching_issues.is_empty() {\n\n false\n\n } else {\n\n matching_issues\n\n .iter()\n\n .any(|x| x.expiry_date.is_none() || x.expiry_date > *current_time)\n\n }\n\n }\n", "file_path": "report-parser/src/main.rs", "rank": 22, "score": 48377.43360411623 }, { "content": "#[derive(thiserror::Error, Debug)]\n\nenum SlackSendError {\n\n #[error(\"Slack API Rate Limit encountered. Try again in {0:?}\")]\n\n RateLimited(std::time::Duration),\n\n #[error(\"Slack API returned an error: {cause}\")]\n\n SlackError { cause: String },\n\n #[error(\"HTTP Error ooccured: {0}\")]\n\n ReqwestError(#[from] reqwest::Error),\n\n}\n\n\n\nasync fn try_send_slack_message<T: AsRef<str> + serde::ser::Serialize + std::fmt::Display>(\n\n channel_id: T,\n\n event: &DependencyEvent,\n\n client: &reqwest::Client,\n\n oauth_token: T,\n\n) -> Result<(), SlackSendError> {\n\n let payload = json!({\n\n \"channel\": channel_id,\n\n \"text\": event.to_slack_message()\n\n });\n\n let req = client\n", "file_path": "slack-connector/src/main.rs", "rank": 23, "score": 47466.97475248223 }, { "content": "#[derive(Deserialize, Debug)]\n\n#[serde(untagged)]\n\nenum SafetyJsonData {\n\n Vuln(Vec<SafetyPackageVulnInfo>),\n\n Meta(MetaInfo),\n\n}\n\n\n", "file_path": "report-parser/src/main.rs", "rank": 24, "score": 47466.93097873389 }, { "content": "fn parse_safety_json(\n\n report: &ToolReport,\n\n vulns: &HashMap<String, VulnData>,\n\n safety_vulns: &HashMap<String, Option<String>>,\n\n) -> Result<Vec<DependencyEvent>, Box<dyn Error>> {\n\n let mut events = Vec::new();\n\n let python_dep_vulns: Vec<PythonSafety> = serde_json::from_str(report.tool_output.as_ref())?;\n\n\n\n let default_cvss = Cvss::builder()\n\n .with_version(CvssVersion::Unknown)\n\n .build()\n\n .unwrap();\n\n\n\n for vuln in python_dep_vulns.iter() {\n\n let advisory_id = AdvisoryId::try_from(vuln.advisory_id.to_owned())?;\n\n\n\n let mut event = DependencyEvent {\n\n event_version: EventVersion::try_from(\"1\".to_string())?,\n\n event_id: EventID::try_from(Uuid::new_v4().to_hyphenated().to_string())?,\n\n parent_event_id: report.event_id.clone(),\n", "file_path": "report-parser/src/main.rs", "rank": 25, "score": 47243.34565690759 }, { "content": "fn parse_tool_report(\n\n report: &ToolReport,\n\n vulns: &HashMap<String, VulnData>,\n\n safety_vuln_map: &HashMap<String, Option<String>>,\n\n) -> Result<Vec<Vec<u8>>, Box<dyn Error>> {\n\n let events = if report.tool_name == \"bundler-audit\" {\n\n if report.output_format == \"PlainText\" {\n\n parse_bundler_audit_plaintext(&report, &vulns)\n\n } else {\n\n Err(Box::new(\n\n err_msg(format!(\n\n \"Unknown output format for Bundler-audit in ToolReport: {:?}\",\n\n report\n\n ))\n\n .compat(),\n\n )\n\n .into())\n\n }\n\n } else if report.tool_name == \"safety\" {\n\n if report.output_format == \"JSON\" {\n", "file_path": "report-parser/src/main.rs", "rank": 26, "score": 47243.34565690759 }, { "content": "fn download_and_parse_vulns(\n\n index: String,\n\n last_updated_time: Option<DateTime<Utc>>,\n\n base_url: &Url,\n\n client: &Client,\n\n) -> Result<Option<HashMap<String, VulnData>>, Box<dyn Error>> {\n\n lazy_static! {\n\n static ref META_LAST_MOD_RE: Regex = Regex::new(\"lastModifiedDate:(.*)\\r\\n\").unwrap();\n\n static ref META_COMPRESSED_GZ_SIZE_RE: Regex = Regex::new(\"gzSize:(.*)\\r\\n\").unwrap();\n\n static ref META_UNCOMPRESSED_SIZE_RE: Regex = Regex::new(\"size:(.*)\\r\\n\").unwrap();\n\n static ref META_SHA256_RE: Regex = Regex::new(\"sha256:(.*)\\r\\n\").unwrap();\n\n }\n\n\n\n let meta_filename = format!(\"nvdcve-1.1-{}.meta\", index);\n\n let meta_url = base_url.join(&meta_filename)?;\n\n\n\n let meta_resp_text = client\n\n .get(meta_url)\n\n .send()\n\n .map_err(|err| {\n", "file_path": "report-parser/src/main.rs", "rank": 27, "score": 47243.34565690759 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct SafetyPackageVulnInfo {\n\n advisory: String,\n\n cve: Value,\n\n id: String,\n\n specs: Vec<String>,\n\n v: String,\n\n}\n\n\n", "file_path": "report-parser/src/main.rs", "rank": 28, "score": 46184.6880869022 }, { "content": "struct NestedJsonFmtSerializer {\n\n initial_value: Value,\n\n}\n\n\n\nimpl slog::Serializer for NestedJsonFmtSerializer {\n\n fn emit_usize(&mut self, key: slog::Key, val: usize) -> Result<(), Error> {\n\n self.initial_value\n\n .dot_set(key, val)\n\n .map_err(|_| slog::Error::Other)\n\n }\n\n\n\n fn emit_isize(&mut self, key: slog::Key, val: isize) -> Result<(), Error> {\n\n self.initial_value\n\n .dot_set(key, val)\n\n .map_err(|_| slog::Error::Other)\n\n }\n\n\n\n fn emit_bool(&mut self, key: slog::Key, val: bool) -> Result<(), Error> {\n\n self.initial_value\n\n .dot_set(key, val)\n", "file_path": "kiln_lib/src/log.rs", "rank": 29, "score": 46180.13490618946 }, { "content": "fn parse_bundler_audit_plaintext(\n\n report: &ToolReport,\n\n vulns: &HashMap<String, VulnData>,\n\n) -> Result<Vec<DependencyEvent>, Box<dyn Error>> {\n\n lazy_static! {\n\n static ref BLOCK_RE: Regex = Regex::new(\"(Name: .*\\nVersion: .*\\nAdvisory: .*\\nCriticality: .*\\nURL: .*\\nTitle: .*\\nSolution:.*\\n)\").unwrap();\n\n }\n\n let mut events = Vec::new();\n\n\n\n let default_cvss = Cvss::builder()\n\n .with_version(CvssVersion::Unknown)\n\n .build()\n\n .unwrap();\n\n\n\n for block in BLOCK_RE.captures_iter(report.tool_output.as_ref()) {\n\n let block = block.get(0).unwrap().as_str();\n\n let fields = block\n\n .trim_end()\n\n .split('\\n')\n\n .map(|line| line.split(\": \").collect::<Vec<_>>())\n", "file_path": "report-parser/src/main.rs", "rank": 30, "score": 46180.13490618946 }, { "content": "fn download_and_parse_python_safety_vulns(\n\n server_name: &str,\n\n etag: &mut Option<String>,\n\n client: &Client,\n\n) -> Result<Option<HashMap<String, Option<String>>>, Box<dyn Error>> {\n\n let head_resp = client.head(server_name).send()?;\n\n let mut etag_str = None;\n\n if head_resp.status().is_success() {\n\n if let Some(etag_new) = head_resp.headers().get(ETAG) {\n\n // If the etag passed in is none or different to the one we just got, then download below....\n\n match etag {\n\n Some(etag_old) => {\n\n if *etag_old == etag_new.to_str().unwrap() {\n\n return Ok(None);\n\n } else {\n\n etag_str = Some(etag_new.to_str().unwrap().to_owned());\n\n }\n\n }\n\n None => etag_str = Some(etag_new.to_str().unwrap().to_owned()),\n\n }\n", "file_path": "report-parser/src/main.rs", "rank": 31, "score": 45181.358261377165 }, { "content": "pub trait Hashable {\n\n fn hash(&self) -> Vec<u8>;\n\n}\n", "file_path": "kiln_lib/src/traits.rs", "rank": 32, "score": 45177.71309465321 }, { "content": "use serde::Serialize;\n\nuse std::error::Error;\n\nuse std::fmt;\n\n\n\n#[derive(Debug, PartialEq, Serialize)]\n\npub struct ValidationError {\n\n pub error_code: u8,\n\n pub error_message: String,\n\n pub json_field_name: Option<String>,\n\n}\n\n\n\nimpl Error for ValidationError {}\n\n\n\nimpl fmt::Display for ValidationError {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"Error Validating data: (Err {}) {}\",\n\n self.error_code, self.error_message\n\n )\n", "file_path": "kiln_lib/src/validation.rs", "rank": 33, "score": 37432.70146236184 }, { "content": " pub fn environment_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 127,\n\n error_message: \"Environment not a valid string\".into(),\n\n json_field_name: Some(\"environment\".into()),\n\n }\n\n }\n\n\n\n pub fn environment_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 133,\n\n error_message: \"Environment present but empty\".into(),\n\n json_field_name: Some(\"environment\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_version_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 130,\n\n error_message: \"Tool version not a valid string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 34, "score": 37428.77663070081 }, { "content": " pub fn application_name_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 111,\n\n error_message: \"Application name present but empty\".into(),\n\n json_field_name: Some(\"application_name\".into()),\n\n }\n\n }\n\n\n\n pub fn application_name_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 102,\n\n error_message: \"Application name required\".into(),\n\n json_field_name: Some(\"application_name\".into()),\n\n }\n\n }\n\n\n\n pub fn application_name_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 112,\n\n error_message: \"Application name not a valid string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 35, "score": 37428.37574785553 }, { "content": " pub fn tool_name_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 105,\n\n error_message: \"Tool name required\".into(),\n\n json_field_name: Some(\"tool_name\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_name_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 119,\n\n error_message: \"Tool name not a valid string\".into(),\n\n json_field_name: Some(\"tool_name\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_output_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 120,\n\n error_message: \"Tool output present but empty\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 36, "score": 37428.37574785553 }, { "content": " json_field_name: Some(\"tool_version\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_version_present_but_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 129,\n\n error_message: \"Tool version present but empty\".into(),\n\n json_field_name: Some(\"tool_version\".into()),\n\n }\n\n }\n\n\n\n pub fn avro_schema_validation_failed() -> ValidationError {\n\n ValidationError {\n\n error_code: 130,\n\n error_message: \"Tried to deserialise a ToolReport from Avro but value didn't pass schema validation\".into(),\n\n json_field_name: None\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 37, "score": 37428.23298512575 }, { "content": " pub fn tool_output_format_not_an_enum() -> ValidationError {\n\n ValidationError {\n\n error_code: 131,\n\n error_message: \"Tool output format not an avro enum\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn environment_not_an_enum() -> ValidationError {\n\n ValidationError {\n\n error_code: 132,\n\n error_message: \"Environment not an avro enum\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn event_version_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 133,\n\n error_message: \"Event version missing\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 38, "score": 37428.012650117504 }, { "content": " pub fn git_commit_hash_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 115,\n\n error_message: \"Git commit hash present but empty\".into(),\n\n json_field_name: Some(\"git_commit_hash\".into()),\n\n }\n\n }\n\n\n\n pub fn git_commit_hash_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 104,\n\n error_message: \"Git commit hash required\".into(),\n\n json_field_name: Some(\"git_commit_hash\".into()),\n\n }\n\n }\n\n\n\n pub fn git_commit_hash_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 117,\n\n error_message: \"Git commit hash not a valid string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 39, "score": 37427.50084439888 }, { "content": " pub fn tool_output_format_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 122,\n\n error_message: \"Tool output format present but empty\".into(),\n\n json_field_name: Some(\"tool_output_format\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_output_format_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 107,\n\n error_message: \"Tool output format required\".into(),\n\n json_field_name: Some(\"tool_output_format\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_output_format_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 123,\n\n error_message: \"Tool output format not a valid string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 40, "score": 37427.50084439888 }, { "content": " error_message: \"One or more issue hashes are missing\".into(),\n\n json_field_name: Some(\"suppressed_issues[].issue_hash\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn issue_hash_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 162,\n\n error_message: \"One or more issue hashes are not strings\".into(),\n\n json_field_name: Some(\"suppressed_issues[].issue_hash\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn suppression_reason_required() -> ValidationError {\n\n ValidationError {\n\n error_code: 163,\n\n error_message: \"One or more issue suppression reasons are missing\".into(),\n\n json_field_name: Some(\"suppressed_issues[].suppression_reason\".to_owned()),\n\n }\n\n }\n", "file_path": "kiln_lib/src/validation.rs", "rank": 41, "score": 37425.44371816537 }, { "content": "\n\n pub fn suppressed_by_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 169,\n\n error_message: \"One or more Suppressed Issues SuppressedBy fields are not a string\"\n\n .into(),\n\n json_field_name: Some(\"suppressed_issues[].suppressed_by\".into()),\n\n }\n\n }\n\n\n\n pub fn suppressed_by_required() -> ValidationError {\n\n ValidationError {\n\n error_code: 170,\n\n error_message: \"One or more Suppressed Issues SuppressedBy fields missing\".into(),\n\n json_field_name: Some(\"suppressed_issues[].suppressed_by\".into()),\n\n }\n\n }\n\n\n\n pub fn suppressed_by_empty() -> ValidationError {\n\n ValidationError {\n", "file_path": "kiln_lib/src/validation.rs", "rank": 42, "score": 37424.66247426186 }, { "content": " json_field_name: Some(\"end_time\".into()),\n\n }\n\n }\n\n\n\n pub fn environment_not_a_valid_option() -> ValidationError {\n\n ValidationError {\n\n error_code: 128,\n\n error_message: \"Environment not a valid option\".into(),\n\n json_field_name: Some(\"environment\".into()),\n\n }\n\n }\n\n\n\n pub fn environment_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 110,\n\n error_message: \"Environment required\".into(),\n\n json_field_name: Some(\"environment\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 43, "score": 37423.59155569501 }, { "content": " json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n\n pub fn event_version_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 134,\n\n error_message: \"Event version not a string\".into(),\n\n json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n\n pub fn event_version_present_but_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 135,\n\n error_message: \"Event version present but empty\".into(),\n\n json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 44, "score": 37423.316143135955 }, { "content": " }\n\n}\n\n\n\nimpl ValidationError {\n\n pub fn body_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 100,\n\n error_message: \"Request body empty\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn body_media_type_incorrect() -> ValidationError {\n\n ValidationError {\n\n error_code: 101,\n\n error_message: \"Request body not correct media type\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 45, "score": 37423.202379761125 }, { "content": " pub fn expiry_date_not_a_valid_date() -> ValidationError {\n\n ValidationError {\n\n error_code: 159,\n\n error_message:\n\n \"One or more issue suppression expiry dates do not look like valid dates\".into(),\n\n json_field_name: Some(\"suppressed_issues[].expiry_date\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn suppression_reason_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 160,\n\n error_message: \"One or more issue suppression reasons are empty\".into(),\n\n json_field_name: Some(\"suppressed_issues[].suppression_reason\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn issue_hash_required() -> ValidationError {\n\n ValidationError {\n\n error_code: 161,\n", "file_path": "kiln_lib/src/validation.rs", "rank": 46, "score": 37422.75563071786 }, { "content": " json_field_name: Some(\"application_name\".into()),\n\n }\n\n }\n\n\n\n pub fn git_branch_name_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 113,\n\n error_message: \"Git branch name present but empty\".into(),\n\n json_field_name: Some(\"git_branch\".into()),\n\n }\n\n }\n\n\n\n pub fn git_branch_name_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 114,\n\n error_message: \"Git branch name not a valid string\".into(),\n\n json_field_name: Some(\"git_branch\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 47, "score": 37422.1454600062 }, { "content": " json_field_name: Some(\"tool_output\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_output_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 106,\n\n error_message: \"Tool output required\".into(),\n\n json_field_name: Some(\"tool_output\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_output_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 121,\n\n error_message: \"Tool output not a valid string\".into(),\n\n json_field_name: Some(\"tool_output\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 48, "score": 37421.61251984509 }, { "content": "\n\n pub fn suppression_reason_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 164,\n\n error_message: \"One or more issue suppression reasons are not strings\".into(),\n\n json_field_name: Some(\"suppressed_issues[].suppression_reason\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn expiry_date_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 165,\n\n error_message: \"One or more issue suppression expiry dates are not strings\".into(),\n\n json_field_name: Some(\"suppressed_issues[].expiry_date\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn suppressed_issue_not_a_record() -> ValidationError {\n\n ValidationError {\n\n error_code: 166,\n", "file_path": "kiln_lib/src/validation.rs", "rank": 49, "score": 37420.44294240003 }, { "content": " json_field_name: Some(\"advisory_description\".into()),\n\n }\n\n }\n\n\n\n pub fn cvss_version_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 152,\n\n error_message: \"CVSS Version field not parsable as a string\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn cvss_score_not_valid() -> ValidationError {\n\n ValidationError {\n\n error_code: 153,\n\n error_message: \"CVSS score not valid\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 50, "score": 37419.56056007579 }, { "content": " pub fn event_version_unknown() -> ValidationError {\n\n ValidationError {\n\n error_code: 136,\n\n error_message: \"Event version unknown\".into(),\n\n json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n\n pub fn event_id_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 137,\n\n error_message: \"Event ID missing\".into(),\n\n json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n\n pub fn event_id_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 138,\n\n error_message: \"Event ID not a string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 51, "score": 37419.48211659084 }, { "content": " json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n\n pub fn event_id_present_but_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 138,\n\n error_message: \"Event ID present but empty\".into(),\n\n json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n\n pub fn event_id_not_a_uuid() -> ValidationError {\n\n ValidationError {\n\n error_code: 139,\n\n error_message: \"Event ID does not look like a UUID\".into(),\n\n json_field_name: Some(\"event_version\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 52, "score": 37419.256225258745 }, { "content": " json_field_name: Some(\"installed_version\".into()),\n\n }\n\n }\n\n\n\n pub fn advisory_id_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 148,\n\n error_message: \"Advisory Id not a string\".into(),\n\n json_field_name: Some(\"advisory_id\".into()),\n\n }\n\n }\n\n\n\n pub fn advisory_url_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 149,\n\n error_message: \"Advisory url empty\".into(),\n\n json_field_name: Some(\"advisory_url\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 53, "score": 37419.255179361615 }, { "content": " error_message: \"Suppressed Issue Avro object is not a record\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn suppressed_flag_not_a_boolean() -> ValidationError {\n\n ValidationError {\n\n error_code: 167,\n\n error_message: \"Suppression flag Avro value is not a boolean\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn suppressed_issue_toml_value_not_a_table() -> ValidationError {\n\n ValidationError {\n\n error_code: 168,\n\n error_message: \"Suppressed Issue TOML type must be a Table\".into(),\n\n json_field_name: None,\n\n }\n\n }\n", "file_path": "kiln_lib/src/validation.rs", "rank": 54, "score": 37419.219989158024 }, { "content": " json_field_name: Some(\"git_commit_hash\".into()),\n\n }\n\n }\n\n\n\n pub fn git_commit_hash_not_valid() -> ValidationError {\n\n ValidationError {\n\n error_code: 116,\n\n error_message: \"Git commit hash not valid\".into(),\n\n json_field_name: Some(\"git_commit_hash\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_name_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 118,\n\n error_message: \"Tool name present but empty\".into(),\n\n json_field_name: Some(\"tool_name\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 55, "score": 37418.56081399687 }, { "content": " error_code: 171,\n\n error_message:\n\n \"One or more Suppressed Issues SuppressedBy fields are present but empty\".into(),\n\n json_field_name: Some(\"suppressed_issues[].suppressed_by\".into()),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"web\")]\n\nuse actix_web::BaseHttpResponse;\n\n\n\n#[cfg(feature = \"web\")]\n\nuse actix_web::body::Body;\n\n\n\n#[cfg(feature = \"web\")]\n\nuse actix_web::http::StatusCode;\n\n\n\n#[cfg(feature = \"web\")]\n\nimpl Into<BaseHttpResponse<Body>> for ValidationError {\n\n fn into(self) -> BaseHttpResponse<Body> {\n", "file_path": "kiln_lib/src/validation.rs", "rank": 56, "score": 37418.406637399 }, { "content": " pub fn cvss_version_known_without_score() -> ValidationError {\n\n ValidationError {\n\n error_code: 154,\n\n error_message: \"CVSS version is known but score not provided\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn cvss_version_unknown_with_score() -> ValidationError {\n\n ValidationError {\n\n error_code: 155,\n\n error_message: \"CVSS version is unknown but score was provided\".into(),\n\n json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn cvss_not_a_record() -> ValidationError {\n\n ValidationError {\n\n error_code: 156,\n\n error_message: \"CVSS avro value not a record\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 57, "score": 37418.24098068361 }, { "content": " pub fn start_time_not_a_timestamp() -> ValidationError {\n\n ValidationError {\n\n error_code: 125,\n\n error_message: \"Start time not a valid timestamp\".into(),\n\n json_field_name: Some(\"start_time\".into()),\n\n }\n\n }\n\n\n\n pub fn end_time_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 109,\n\n error_message: \"End time required\".into(),\n\n json_field_name: Some(\"end_time\".into()),\n\n }\n\n }\n\n\n\n pub fn end_time_not_a_timestamp() -> ValidationError {\n\n ValidationError {\n\n error_code: 126,\n\n error_message: \"End time not a valid timestamp\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 58, "score": 37418.00047634732 }, { "content": " json_field_name: Some(\"tool_output_format\".into()),\n\n }\n\n }\n\n\n\n pub fn tool_output_format_invalid() -> ValidationError {\n\n ValidationError {\n\n error_code: 124,\n\n error_message: \"Tool output format not acceptable\".into(),\n\n json_field_name: Some(\"tool_output_format\".into()),\n\n }\n\n }\n\n\n\n pub fn start_time_missing() -> ValidationError {\n\n ValidationError {\n\n error_code: 108,\n\n error_message: \"Start time required\".into(),\n\n json_field_name: Some(\"start_time\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 59, "score": 37417.335181668226 }, { "content": " pub fn advisory_description_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 140,\n\n error_message: \"Advisory description empty\".into(),\n\n json_field_name: Some(\"advisory_description\".into()),\n\n }\n\n }\n\n\n\n pub fn advisory_id_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 141,\n\n error_message: \"Advisory Id empty\".into(),\n\n json_field_name: Some(\"advisory_id\".into()),\n\n }\n\n }\n\n\n\n pub fn affected_package_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 142,\n\n error_message: \"Affected package empty\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 60, "score": 37416.92686448632 }, { "content": " pub fn timestamp_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 145,\n\n error_message: \"Timestamp does not look like a string\".into(),\n\n json_field_name: Some(\"timestamp\".into()),\n\n }\n\n }\n\n\n\n pub fn affected_package_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 146,\n\n error_message: \"Affected package not a string\".into(),\n\n json_field_name: Some(\"affected_package\".into()),\n\n }\n\n }\n\n\n\n pub fn installed_version_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 147,\n\n error_message: \"Installed version not a string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 61, "score": 37416.55603524553 }, { "content": " pub fn advisory_url_not_valid() -> ValidationError {\n\n ValidationError {\n\n error_code: 150,\n\n error_message: \"Advisory Url not a valid Url\".into(),\n\n json_field_name: Some(\"advisory_url\".into()),\n\n }\n\n }\n\n\n\n pub fn advisory_url_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 151,\n\n error_message: \"Advisory Url not a string\".into(),\n\n json_field_name: Some(\"advisory_url\".into()),\n\n }\n\n }\n\n\n\n pub fn advisory_description_not_a_string() -> ValidationError {\n\n ValidationError {\n\n error_code: 151,\n\n error_message: \"Advisory description not a string\".into(),\n", "file_path": "kiln_lib/src/validation.rs", "rank": 62, "score": 37416.347130884235 }, { "content": " json_field_name: Some(\"affected_package\".into()),\n\n }\n\n }\n\n\n\n pub fn installed_version_empty() -> ValidationError {\n\n ValidationError {\n\n error_code: 143,\n\n error_message: \"Installed version empty\".into(),\n\n json_field_name: Some(\"installed_version\".into()),\n\n }\n\n }\n\n\n\n pub fn timestamp_not_a_valid_timestamp() -> ValidationError {\n\n ValidationError {\n\n error_code: 144,\n\n error_message: \"Timestamp does not look like a valid timestamp\".into(),\n\n json_field_name: Some(\"timestamp\".into()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 63, "score": 37415.575382513394 }, { "content": " json_field_name: None,\n\n }\n\n }\n\n\n\n pub fn suppressed_issues_not_an_array() -> ValidationError {\n\n ValidationError {\n\n error_code: 157,\n\n error_message: \"Suppressed Issues not an array of objects\".into(),\n\n json_field_name: Some(\"suppressed_issues\".to_owned()),\n\n }\n\n }\n\n\n\n pub fn issue_hash_not_valid() -> ValidationError {\n\n ValidationError {\n\n error_code: 158,\n\n error_message: \"One or more issue hashes do not look like a valid SHA256 hashes\".into(),\n\n json_field_name: Some(\"suppressed_issues[].issue_hash\".to_owned()),\n\n }\n\n }\n\n\n", "file_path": "kiln_lib/src/validation.rs", "rank": 64, "score": 37414.287273878035 }, { "content": " BaseHttpResponse::build(StatusCode::BAD_REQUEST)\n\n .content_type(mime::APPLICATION_JSON)\n\n .body(serde_json::to_string(&self).unwrap())\n\n }\n\n}\n\n\n\n#[cfg(feature = \"web\")]\n\nimpl actix_web::ResponseError for ValidationError {\n\n fn status_code(&self) -> StatusCode {\n\n StatusCode::BAD_REQUEST\n\n }\n\n\n\n fn error_response(&self) -> BaseHttpResponse<Body> {\n\n BaseHttpResponse::build(StatusCode::BAD_REQUEST)\n\n .content_type(mime::APPLICATION_JSON)\n\n .body(serde_json::to_string(&self).unwrap())\n\n }\n\n}\n", "file_path": "kiln_lib/src/validation.rs", "rank": 65, "score": 37411.58211835215 }, { "content": "def validate_version_number(ctx, param, value):\n\n try:\n\n return semver.VersionInfo.parse(value)\n\n except TypeError:\n\n raise click.BadParameter(\"Version number not semver compatible\")\n\n except ValueError:\n", "file_path": "utils/release.py", "rank": 66, "score": 32727.23817538193 }, { "content": "fn main() -> Result<(), std::boxed::Box<dyn std::error::Error>> {\n\n openssl_probe::init_ssl_cert_env_vars();\n\n\n\n let matches = App::new(\"Kiln data forwarder\")\n\n .arg(\n\n Arg::with_name(\"tool_name\")\n\n .help(\"Name of the security tool run\")\n\n .long(\"tool-name\")\n\n .required(true)\n\n .takes_value(true)\n\n .value_name(\"TOOL_NAME\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"tool_version\")\n\n .help(\"Version of the security tool run\")\n\n .long(\"tool-version\")\n\n .takes_value(true)\n\n .value_name(\"TOOL_VERSION\"),\n\n )\n\n .arg(\n", "file_path": "data-forwarder/src/main.rs", "rank": 67, "score": 32059.40588799814 }, { "content": "def check_for_expected_working_copy_changes(kiln_repo):\n\n (staged, unstaged, untracked) = dulwich.porcelain.status(kiln_repo)\n\n if staged['add'] or staged['delete'] or staged['modify']:\n\n return False\n\n for item in unstaged:\n\n if item != b\"CHANGELOG.md\" and not PurePath(item.decode(\"utf-8\")).match(\"utils/*\"):\n\n return False\n", "file_path": "utils/release.py", "rank": 68, "score": 31150.68370139577 }, { "content": "# Starting a Kiln stack locally\n\nCurrently, there is no automated integration testing of Kiln, but it is possible to quickly bring up a local Kiln stack to manually test changes with a Kafka cluster. Assuming you want to test a new tool image against the rest of Kiln, you will need the following components:\n\n\n\n* Data forwarder compiled for musl-libc (to include in tool image)\n\n* Tool image of choice (using bundler-audit as an example)\n\n* Data collector docker image built\n\n* Report parser docker image built\n\n* Slack connector docker image built\n\n* Java JRE (required for building Java Key Store for Kafka)\n\n\n\nParts of this process have been automated using Cargo-make, which is the task runner for the Kiln build process.\n\n\n\n## Building the Data-forwarder binary for musl-libc\n\nThere is a Cargo-make target for building this component for musl-libc which will also run linting and unit tests.\n\n```\n\ncd data-forwarder\n\ncargo make build-data-forwarder-musl\n\n```\n\n\n\n## Building the tool docker image\n\nThis step assumes you have already built the Data-forwarder using the previously mentioned Cargo-make target and are in the project root. The Bundler-audit `Makefile.toml` includes a set of tasks that can be used as a starting point for building new tool images.\n\n```\n\ncargo make tools\n\n```\n\n\n\n## Building the Server Components\n\nTo build the data-collector Docker image, run `cargo make server-components` from the project root. This target will be kept updated with all of the docker image targets. If you want a faster iteration cycle, you can cd into the directory of the component you're working on and run `cargo make build-COMPONENT-NAME-git-docker`. \n\n\n\n## Configuring the slack-connector docker container\n\nYou will need to create a `.env` file in the root of the project. This path is ignored by Git, because it will contain a Slack OAuth2 token. Find the information needed by following the instructions in the Slack-connector component README.\n\n\n", "file_path": "docs/integration_testing.md", "rank": 69, "score": 27859.769871946297 }, { "content": "## Generating TLS certificates\n\nKiln expects to connect to a Kafka cluster over TLS only, so in order to run a stack locally, we need to setup a basic PKI to ensure certificates can be validated and the connection will be successful. This process has been scripted, but still requires a small amount of user interaction.\n\n\n\nTo start the process, run the `gen_certs.sh` script in the root of the project. When prompted if you trust each certificate, enter 'yes'. This is the Java Keytool building the keystore and truststore required to connect to Kafka. This step should result in a new directory called 'tls' containing a signed CA certificate and 2 Java Keystore files containing the CA certificate and the Kafka broker certificate.\n\n\n\n## Starting the stack\n\nOnce you've built the required docker images and generated the PKI using the `gen_certs.sh` script, you can bring up a Kiln stack. This requires two terminals, because Kiln expects that the Kafka cluster is ready to accept incoming connections when it starts.\n\n\n\nIn the first terminal, run `docker-compose up zookeeper kafka`. Once this has finished starting up and you see a message in the console output about the ToolReports topic being created, you can bring up the data-collector and report-parser by running `docker-compose up data-collector report-parser slack-connector` in the second terminal. \n\n\n", "file_path": "docs/integration_testing.md", "rank": 70, "score": 27858.870414304183 }, { "content": "## Sending an example request\n\nYou should rarely need to use these instructions. They are preserved in case changes are made to the Data-collector or downstream components and you need to test a specific failure case that isn't easy to replicate some other way.\n\n\n\nBelow are a valid JSON payload for a request to the data-collector and an example cURL command to send this payload:\n\n\n\n```\n\n{\n\n \"event_version\": \"1\",\n\n \"event_id\": \"123e4567-e89b-12d3-a456-426655440000\",\n\n \"application_name\": \"Test application\",\n\n \"git_branch\": \"main\",\n\n \"git_commit_hash\": \"e99f715d0fe787cd43de967b8a79b56960fed3e5\",\n\n \"tool_name\": \"example tool\",\n\n \"tool_output\": \"{}\",\n\n \"output_format\": \"JSON\",\n\n \"start_time\": \"2019-09-13T19:35:38+00:00\",\n\n \"end_time\": \"2019-09-13T19:37:14+00:00\",\n\n \"environment\": \"Local\",\n\n\t\"tool_version\": \"1.0\"\n\n}\n\n```\n\n\n\n```\n\ncurl -X POST \\\n\n http://127.0.0.1:8081 \\\n\n -H 'Accept: */*' \\\n\n -H 'Accept-Encoding: gzip, deflate' \\\n\n -H 'Cache-Control: no-cache' \\\n\n -H 'Connection: keep-alive' \\\n\n -H 'Content-Length: 372' \\\n\n -H 'Content-Type: application/json' \\\n\n -H 'Host: 127.0.0.1:8081' \\\n\n -H 'cache-control: no-cache' \\\n\n -d '{\n\n \"event_version\": \"1\",\n\n \"event_id\": \"123e4567-e89b-12d3-a456-426655440000\",\n\n \"application_name\": \"Test application\",\n\n \"git_branch\": \"main\",\n\n \"git_commit_hash\": \"e99f715d0fe787cd43de967b8a79b56960fed3e5\",\n\n \"tool_name\": \"example tool\",\n\n \"tool_output\": \"{}\",\n\n \"output_format\": \"JSON\",\n\n \"start_time\": \"2019-09-13T19:35:38+00:00\",\n\n \"end_time\": \"2019-09-13T19:37:14+00:00\",\n\n \"environment\": \"Local\",\n\n \"tool_version\": \"1.0\"\n\n}'\n\n```\n", "file_path": "docs/integration_testing.md", "rank": 71, "score": 27856.060334043264 }, { "content": "## Connecting the console consumer\n\nTo check the full flow of messages being consumed from the Kafka DependencyEvents topic, the easiest way is to start a Kafka console consumer. To do this, you need to `docker exec` into the Kafka broker container and from there you can start the console consumer.\n\n\n\nRun `docker exec -it kiln_kafka_1 bash` to get a shell within the running Kafka container.\n\n\n\nThen to start the console consumer, run `$KAFKA_HOME/bin/kafka-console-consumer.sh --bootstrap-server kafka:9092 --topic DependencyEvents --consumer.config /tls/client-ssl.properties --from-beginning`. Now if you send a valid HTTP request to the data-collector, you should see a serialised Avro message printed in this terminal.\n\n\n\n## Running a tool image against a local Kiln stack\n\nUsing the Bundler-audit tool image as an example, this command will start the tool, mounting the current working directory for analysis and report it to a locally running Kiln stack: `docker run -it -v \"${PWD}:/code\" --net host -e SCAN_ENV=\"Local\" -e APP_NAME=\"Railsgoat\" -e DATA_COLLECTOR_URL=\"http://localhost:8081\" kiln/bundler-audit:git-latest`\n\n\n\nA good codebase to test this particular example on is [OWASP RailsGoat](https://github.com/OWASP/railsgoat).\n\n\n", "file_path": "docs/integration_testing.md", "rank": 72, "score": 27855.747302047243 }, { "content": " fn main_returns_error_when_environment_vars_missing() {\n\n set_env_vars();\n\n std::env::remove_var(\"KAFKA_BOOTSTRAP_TLS\");\n\n\n\n let actual = main();\n\n\n\n match actual {\n\n Ok(_) => panic!(\"expected Err(_) value\"),\n\n Err(err) => assert_eq!(\n\n \"Required environment variable KAFKA_BOOTSTRAP_TLS failed validation because value is missing\",\n\n err.to_string()\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "data-collector/src/main.rs", "rank": 81, "score": 44.314335983773745 }, { "content": "}\n\n\n\n#[cfg(feature = \"avro\")]\n\nimpl TryFrom<avro_rs::types::Value> for SuppressionReason {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: avro_rs::types::Value) -> Result<Self, Self::Error> {\n\n match value {\n\n avro_rs::types::Value::String(s) => SuppressionReason::try_from(s),\n\n _ => Err(ValidationError::suppression_reason_not_a_string()),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"all\")]\n\npub mod tests {\n\n // TODO: Separate tests based on whether they test the JSON validation or the business logic\n\n // validation\n\n use super::*;\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 82, "score": 44.24179393881785 }, { "content": "impl AsRef<str> for ToolOutput {\n\n fn as_ref(&self) -> &str {\n\n self.0.as_ref()\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct ToolVersion(Option<String>);\n\n\n\nimpl TryFrom<Option<String>> for ToolVersion {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: Option<String>) -> Result<Self, Self::Error> {\n\n match value {\n\n None => Ok(ToolVersion(None)),\n\n Some(value) => {\n\n if value.is_empty() {\n\n Err(ValidationError::tool_version_present_but_empty())\n\n } else {\n\n Ok(ToolVersion(Some(value)))\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 85, "score": 43.47046309805454 }, { "content": "\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct EventVersion(String);\n\n\n\nimpl TryFrom<String> for EventVersion {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n Err(ValidationError::event_version_present_but_empty())\n\n } else if value != \"1\" {\n\n Err(ValidationError::event_version_unknown())\n\n } else {\n\n Ok(EventVersion(value))\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for EventVersion {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 86, "score": 42.36303145339201 }, { "content": " None => write!(f, \"Not Provided\"),\n\n Some(t) => write!(f, \"{}\", t),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct GitCommitHash(String);\n\n\n\nimpl TryFrom<String> for GitCommitHash {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n return Err(ValidationError::git_commit_hash_empty());\n\n };\n\n\n\n let re = Regex::new(r\"^[0-9a-fA-F]{40}$\").unwrap();\n\n if re.is_match(&value) {\n\n Ok(GitCommitHash(value))\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 87, "score": 42.14482015768368 }, { "content": " #[test]\n\n fn parse_payload_returns_error_when_body_empty() {\n\n let p = \"\".to_owned();\n\n let payload = p.as_bytes().into_iter().cloned().collect::<Vec<u8>>();\n\n let body: Bytes = Bytes::from_iter(payload);\n\n let expected = ValidationError::body_empty();\n\n let actual = parse_payload(&body).expect_err(\"expected Err(_) value\");\n\n\n\n assert_eq!(expected, actual);\n\n }\n\n\n\n #[test]\n\n fn parse_payload_returns_error_when_body_contains_bytes() {\n\n let p = \"\\u{0000}\".to_string();\n\n let payload = p.as_bytes().into_iter().cloned().collect::<Vec<u8>>();\n\n let body: Bytes = Bytes::from_iter(payload);\n\n let expected = ValidationError::body_media_type_incorrect();\n\n\n\n let actual = parse_payload(&body).expect_err(\"expected Ok(_) value\");\n\n\n", "file_path": "data-collector/src/main.rs", "rank": 88, "score": 41.653027268075434 }, { "content": "\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n Err(ValidationError::suppression_reason_empty())\n\n } else {\n\n Ok(SuppressionReason(value))\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct SuppressedBy(String);\n\n\n\nimpl TryFrom<String> for SuppressedBy {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n Err(ValidationError::suppressed_by_empty())\n\n } else {\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 89, "score": 40.437903073421545 }, { "content": " } else {\n\n Err(ValidationError::git_commit_hash_not_valid())\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for GitCommitHash {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct ToolName(String);\n\n\n\nimpl TryFrom<String> for ToolName {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 90, "score": 39.39085653935812 }, { "content": " type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n Err(ValidationError::application_name_empty())\n\n } else {\n\n Ok(ApplicationName(value))\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for ApplicationName {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct GitBranch(Option<String>);\n\n\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 91, "score": 39.36163111644401 }, { "content": "impl TryFrom<Option<String>> for GitBranch {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: Option<String>) -> Result<Self, Self::Error> {\n\n match value {\n\n None => Ok(GitBranch(None)),\n\n Some(value) => {\n\n if value.is_empty() {\n\n Err(ValidationError::git_branch_name_empty())\n\n } else {\n\n Ok(GitBranch(Some(value)))\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for GitBranch {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match &self.0 {\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 92, "score": 38.73640158159533 }, { "content": "\n\nimpl std::fmt::Display for AdvisoryId {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct AdvisoryUrl(Url);\n\n\n\nimpl TryFrom<String> for AdvisoryUrl {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n Err(ValidationError::advisory_url_empty())\n\n } else if Url::parse(&value).is_err() {\n\n Err(ValidationError::advisory_url_not_valid())\n\n } else {\n\n Ok(AdvisoryUrl(Url::parse(&value).unwrap()))\n", "file_path": "kiln_lib/src/dependency_event.rs", "rank": 93, "score": 38.31392957974776 }, { "content": " write!(f, \"{}\", self.0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct EventID(uuid::Uuid);\n\n\n\nimpl TryFrom<String> for EventID {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n\n Err(ValidationError::event_id_present_but_empty())\n\n } else {\n\n match uuid::Uuid::parse_str(value.as_ref()) {\n\n Ok(id) => Ok(EventID(id)),\n\n Err(_) => Err(ValidationError::event_id_not_a_uuid()),\n\n }\n\n }\n\n }\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 94, "score": 38.18308458878952 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize)]\n\npub struct AffectedPackage(String);\n\n\n\nimpl Hashable for AffectedPackage {\n\n fn hash(&self) -> Vec<u8> {\n\n digest::digest(&digest::SHA256, &self.0.as_bytes())\n\n .as_ref()\n\n .to_vec()\n\n }\n\n}\n\n\n\nimpl TryFrom<String> for AffectedPackage {\n\n type Error = ValidationError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n if value.is_empty() {\n", "file_path": "kiln_lib/src/dependency_event.rs", "rank": 95, "score": 37.696660825618835 }, { "content": "impl From<HandlerError> for actix_web::error::Error {\n\n fn from(err: HandlerError) -> Self {\n\n match err {\n\n HandlerError::ValidationError(e) => actix_web::error::ErrorBadRequest(e),\n\n HandlerError::KafkaError { source } => {\n\n actix_web::error::ErrorInternalServerError(source)\n\n }\n\n HandlerError::AvroError(_) => actix_web::error::ErrorInternalServerError(err),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Config {\n\n kafka_bootstrap_tls: Vec<String>,\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "data-collector/src/main.rs", "rank": 96, "score": 37.584548096385234 }, { "content": " }\n\n\n\n #[test]\n\n fn advisory_id_try_from_string_returns_error_when_value_empty() {\n\n let expected = ValidationError::advisory_id_empty();\n\n let actual = AdvisoryId::try_from(\"\".to_string()).expect_err(\"Expected Err(_) value\");\n\n\n\n assert_eq!(expected, actual)\n\n }\n\n\n\n #[test]\n\n fn advisory_url_try_from_string_returns_error_when_value_empty() {\n\n let expected = ValidationError::advisory_url_empty();\n\n let actual = AdvisoryUrl::try_from(\"\".to_string()).expect_err(\"Expected Err(_) value\");\n\n\n\n assert_eq!(expected, actual)\n\n }\n\n\n\n #[test]\n\n fn advisory_url_try_from_string_returns_error_when_value_not_valid() {\n", "file_path": "kiln_lib/src/dependency_event.rs", "rank": 97, "score": 36.646493796739904 }, { "content": " let issue_hash = match t.get(\"issue_hash\") {\n\n None => Err(ValidationError::issue_hash_required()),\n\n Some(toml::Value::String(s)) => IssueHash::try_from(s.clone()),\n\n _ => Err(ValidationError::issue_hash_not_a_string()),\n\n }?;\n\n\n\n let suppression_reason = match t.get(\"suppression_reason\") {\n\n None => Err(ValidationError::suppression_reason_required()),\n\n Some(toml::Value::String(s)) => SuppressionReason::try_from(s.clone()),\n\n _ => Err(ValidationError::suppression_reason_not_a_string()),\n\n }?;\n\n\n\n let expiry_date = match t.get(\"expiry_date\") {\n\n None => Ok(ExpiryDate::from(None)),\n\n Some(toml::Value::String(s)) => ExpiryDate::try_from(Some(s.clone())),\n\n _ => Err(ValidationError::expiry_date_not_a_string()),\n\n }?;\n\n\n\n let suppressed_by = match t.get(\"suppressed_by\") {\n\n None => Err(ValidationError::suppressed_by_required()),\n", "file_path": "kiln_lib/src/tool_report.rs", "rank": 98, "score": 35.34145270563486 }, { "content": " let actual =\n\n Timestamp::try_from(\"not a timestamp\".to_string()).expect_err(\"Expected Err(_) value\");\n\n\n\n assert_eq!(expected, actual)\n\n }\n\n\n\n #[test]\n\n fn affected_package_try_from_string_returns_error_when_value_empty() {\n\n let expected = ValidationError::affected_package_empty();\n\n let actual = AffectedPackage::try_from(\"\".to_string()).expect_err(\"Expected Err(_) value\");\n\n\n\n assert_eq!(expected, actual)\n\n }\n\n\n\n #[test]\n\n fn installed_version_try_from_string_returns_error_when_value_empty() {\n\n let expected = ValidationError::installed_version_empty();\n\n let actual = InstalledVersion::try_from(\"\".to_string()).expect_err(\"Expected Err(_) value\");\n\n\n\n assert_eq!(expected, actual)\n", "file_path": "kiln_lib/src/dependency_event.rs", "rank": 99, "score": 35.2782345101227 } ]
Rust
src/get_stats.rs
razorheadfx/project-cleanup
8444bb77da3b5ed969bcd28f4daf4c09363a5fc8
use std::{ thread, process }; use std::path::PathBuf; use std::sync::mpsc::channel; use std::collections::HashMap; use colored::*; use humansize::{ FileSize, file_size_opts as options }; use crate::languages::*; use crate::file_utils::{ fname, walk_files }; use crate::spinner::Spinner; #[derive(Debug)] pub struct Stats { pub size_deps : u64, pub size_src : u64, pub modified : u64 } pub struct StatsResult { stats : HashMap<PathBuf, Stats>, langs : HashMap<Language, u32>, total_size_src : u64, total_size_deps_candelete : u64, total_size_deps_modified : u64 } pub fn get(project_paths : Vec<PathBuf>) -> HashMap<PathBuf, Stats> { let (tx, rc) = channel(); thread::spawn(move || { let mut stats = HashMap::new(); let mut langs = HashMap::new(); langs.insert(NODE, 0); langs.insert(RUST, 0); langs.insert(JAVA, 0); let mut total_size_src = 0; let mut total_size_deps_candelete = 0; let mut total_size_deps_modified = 0; for path in project_paths { let _ = tx.send(None); let lang = identify(&path).unwrap(); if let Some(lang_counter) = langs.get_mut(&lang) { *lang_counter += 1; } let size_src = walk_files(&path, &|p| !lang.get_paths().contains(&fname(&&p))) .into_iter() .filter_map(|p| p.metadata().ok()) .map(|d| d.len()) .fold(0, |acc, i| acc + i); total_size_src += size_src; let mut size_deps = 0; for p in lang.get_paths() { size_deps += walk_files(&path.join(p), &|_| true) .into_iter() .filter_map(|p| p.metadata().ok()) .map(|d| d.len()) .fold(0, |acc, i| acc + i); } let modified = walk_files(&path, &|p| !lang.get_paths().contains(&fname(&&p))) .into_iter() .filter_map(|p| p.metadata().ok()) .filter_map(|d| d.modified().ok()) .filter_map(|m| m.elapsed().ok()) .map(|e| e.as_secs()) .min().unwrap(); if modified > 2592000 { total_size_deps_candelete += size_deps; } else { total_size_deps_modified += size_deps; } stats.insert(path.clone(), Stats {size_deps, size_src, modified}); } let _ = tx.send(Some(StatsResult {stats, langs, total_size_src, total_size_deps_candelete, total_size_deps_modified})); }); println!("Analysing projects"); let mut spinner = Spinner::new("Analysing projects..."); let mut i = 0; loop { let data = rc.recv(); if let Err(err) = data { println!("Error in thread: {}", err); process::exit(0); } let data = data.unwrap(); if let Some(res) = data { let results : StatsResult = res; spinner.finish(format!("Analysed {} projects", results.stats.len()).as_str()); for (lang, count) in results.langs { if count == 1 { println!(" - {} {} project", count.to_string().bold(), lang.name()); } else if count > 1 { println!(" - {} {} projects", count.to_string().bold(), lang.name()); } } println!(" {} of source code and project files", format_size(results.total_size_src).bold()); if results.total_size_deps_candelete > 0 || results.total_size_deps_modified > 0 { if results.total_size_deps_candelete == 0 { println!(" No dependencies & builds over 1 month old"); } else { println!(" {} of dependencies & builds over 1 month old", format_size(results.total_size_deps_candelete).bold()); } if results.total_size_deps_modified == 0 { println!(" No recently used dependencies & builds"); } else { println!(" {} of recently used dependencies & builds", format_size(results.total_size_deps_modified).bold()); } } return results.stats; } i += 1; spinner.update(format!("Analysing {} projects...", i).as_str()); } } pub fn format_size(size : u64) -> String { return size.file_size(options::CONVENTIONAL).unwrap(); }
use std::{ thread, process }; use std::path::PathBuf; use std::sync::mpsc::channel; use std::collections::HashMap; use colored::*; use humansize::{ FileSize, file_size_opts as options }; use crate::languages::*; use crate::file_utils::{ fname, walk_files }; use crate::spinner::Spinner; #[derive(Debug)] pub struct Stats { pub size_deps : u64, pub size_src : u64, pub modified : u64 } pub struct StatsResult { stats : HashMap<PathBuf, Stats>, langs : HashMap<Language, u32>, total_size_src : u64, total_size_deps_candelete : u64, total_size_deps_modified : u64 }
pub fn format_size(size : u64) -> String { return size.file_size(options::CONVENTIONAL).unwrap(); }
pub fn get(project_paths : Vec<PathBuf>) -> HashMap<PathBuf, Stats> { let (tx, rc) = channel(); thread::spawn(move || { let mut stats = HashMap::new(); let mut langs = HashMap::new(); langs.insert(NODE, 0); langs.insert(RUST, 0); langs.insert(JAVA, 0); let mut total_size_src = 0; let mut total_size_deps_candelete = 0; let mut total_size_deps_modified = 0; for path in project_paths { let _ = tx.send(None); let lang = identify(&path).unwrap(); if let Some(lang_counter) = langs.get_mut(&lang) { *lang_counter += 1; } let size_src = walk_files(&path, &|p| !lang.get_paths().contains(&fname(&&p))) .into_iter() .filter_map(|p| p.metadata().ok()) .map(|d| d.len()) .fold(0, |acc, i| acc + i); total_size_src += size_src; let mut size_deps = 0; for p in lang.get_paths() { size_deps += walk_files(&path.join(p), &|_| true) .into_iter() .filter_map(|p| p.metadata().ok()) .map(|d| d.len()) .fold(0, |acc, i| acc + i); } let modified = walk_files(&path, &|p| !lang.get_paths().contains(&fname(&&p))) .into_iter() .filter_map(|p| p.metadata().ok()) .filter_map(|d| d.modified().ok()) .filter_map(|m| m.elapsed().ok()) .map(|e| e.as_secs()) .min().unwrap(); if modified > 2592000 { total_size_deps_candelete += size_deps; } else { total_size_deps_modified += size_deps; } stats.insert(path.clone(), Stats {size_deps, size_src, modified}); } let _ = tx.send(Some(StatsResult {stats, langs, total_size_src, total_size_deps_candelete, total_size_deps_modified})); }); println!("Analysing projects"); let mut spinner = Spinner::new("Analysing projects..."); let mut i = 0; loop { let data = rc.recv(); if let Err(err) = data { println!("Error in thread: {}", err); process::exit(0); } let data = data.unwrap(); if let Some(res) = data { let results : StatsResult = res; spinner.finish(format!("Analysed {} projects", results.stats.len()).as_str()); for (lang, count) in results.langs { if count == 1 { println!(" - {} {} project", count.to_string().bold(), lang.name()); } else if count > 1 { println!(" - {} {} projects", count.to_string().bold(), lang.name()); } } println!(" {} of source code and project files", format_size(results.total_size_src).bold()); if results.total_size_deps_candelete > 0 || results.total_size_deps_modified > 0 { if results.total_size_deps_candelete == 0 { println!(" No dependencies & builds over 1 month old"); } else { println!(" {} of dependencies & builds over 1 month old", format_size(results.total_size_deps_candelete).bold()); } if results.total_size_deps_modified == 0 { println!(" No recently used dependencies & builds"); } else { println!(" {} of recently used dependencies & builds", format_size(results.total_size_deps_modified).bold()); } } return results.stats; } i += 1; spinner.update(format!("Analysing {} projects...", i).as_str()); } }
function_block-full_function
[ { "content": "/// Gets the filename of a given path\n\npub fn fname(path : &Path) -> &str {\n\n\tpath.file_name().unwrap_or(OsStr::new(\"\")).to_str().unwrap_or(\"\")\n\n}\n", "file_path": "src/file_utils/mod.rs", "rank": 0, "score": 70209.53629687006 }, { "content": "pub fn identify(p : &Path) -> Option<Language> {\n\n\tif p.join(\"package.json\").exists() { return Some(NODE); }\n\n\tif p.join(\"Cargo.toml\").exists() { return Some(RUST); }\n\n\tif p.join(\"pom.xml\").exists() || p.join(\"gradlew\").exists() { return Some(JAVA); }\n\n\treturn None;\n\n}\n", "file_path": "src/languages/identify.rs", "rank": 2, "score": 53193.350140535294 }, { "content": "/// Filters the given projects to determine which directories should be removed\n\n///\n\n/// # Arguments\n\n/// * `projects` - Hash map of the projects and their collected stats\n\n/// * `all` - True to ignore the modified date\n\n///\n\n/// # Returns\n\n/// The directories to remove, and the total size that will be fred if all\n\n/// paths are removed\n\npub fn filter(projects : HashMap<PathBuf, Stats>, all : bool) -> (Vec<PathBuf>, u64) {\n\n\tlet mut remove = Vec::new();\n\n\tlet mut remove_size = 0;\n\n\n\n\tfor (path, stats) in projects {\n\n\t\t// If a project is older than a month its dependencies should be removed\n\n\t\tif stats.modified > 2592000 || all {\n\n\t\t\tif let Some(lang) = identify(&path) {\n\n\t\t\t\tremove_size += stats.size_deps;\n\n\n\n\t\t\t\t// Each language has different paths that should be removed\n\n\t\t\t\tfor lang_path in lang.get_paths() {\n\n\t\t\t\t\tlet p = path.join(lang_path);\n\n\t\t\t\t\tif p.exists() { remove.push(p); }\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\t// If there are no projects that should actually be removed, just stop here\n", "file_path": "src/filter_paths.rs", "rank": 3, "score": 53180.36747141291 }, { "content": "/// Finds all code projects in the given directory or directories\n\n///\n\n/// # Arguments\n\n/// * `root_paths` - The path(s) to search\n\npub fn find(root_paths : Vec<PathBuf>, ignore : Option<Regex>) -> Vec<PathBuf> {\n\n\t// Create message channel for thread communication\n\n\tlet (tx, rc) = channel();\n\n\n\n\t// Spawn controller thread\n\n\tthread::spawn(move || {\n\n\t\tlet mut threads = Vec::new();\n\n\n\n\t\t// Create thread for each path\n\n\t\tfor path in root_paths {\n\n\t\t\t// Each thread gets its own channel transmitter\n\n\t\t\tlet tx_t = tx.clone();\n\n\t\t\tlet ignore_t = ignore.clone();\n\n\n\n\t\t\tthreads.push(thread::spawn(move || {\n\n\t\t\t\t// Walk all directories until finding a project directory\n\n\t\t\t\treturn walk_dirs(&path, &|p| {\n\n\t\t\t\t\tlet _ = tx_t.send(None);\n\n\n\n\t\t\t\t\tif is_ignored(&ignore_t, p) {\n", "file_path": "src/find_paths.rs", "rank": 4, "score": 41361.23487680449 }, { "content": "/// Finds all files in a directory and its subdirectories, unless stopped\n\n///\n\n/// # Arguments\n\n/// * `dir` - The root directory\n\n/// * `on_dir` - Function that is called for every directory found, should\n\n/// return `true` if walking should continue into this directory\n\n/// or `false` if not\n\npub fn walk_files(dir : &Path, on_dir : &Fn(&Path) -> bool) -> Vec<PathBuf> {\n\n\tlet mut files = Vec::new();\n\n\n\n\t// Read directory contents\n\n\tif let Ok(contents) = dir.read_dir() {\n\n\t\tfor entry in contents.filter_map(|e| e.ok()) {\n\n\t\t\t// Store file\n\n\t\t\tif entry.path().is_file() {\n\n\t\t\t\tfiles.push(entry.path());\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\n\n\t\t\t// Recurse if wanted\n\n\t\t\tif on_dir(&entry.path()) {\n\n\t\t\t\tfiles.append(&mut walk_files(&entry.path(), on_dir));\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\treturn files;\n\n}\n", "file_path": "src/file_utils/walk.rs", "rank": 6, "score": 37859.42159765557 }, { "content": "/// Finds all directories and subdirectories for a given path, unless stopped\n\n///\n\n/// # Arguments\n\n/// * `dir` - The root directory\n\n/// * `on_dir` - Function that is called for every directory found, should\n\n/// return `true` if walking should continue into this directory\n\n/// or `false` if not\n\npub fn walk_dirs(dir : &Path, on_dir : &Fn(&Path) -> WalkDirsAction) -> (Vec<PathBuf>, Vec<PathBuf>) {\n\n\tlet mut found_paths = Vec::new();\n\n\tlet mut ignored_paths = Vec::new();\n\n\n\n\t// Read directory contents\n\n\tif let Ok(contents) = dir.read_dir() {\n\n\t\tfor entry in contents.filter_map(|e| e.ok()) {\n\n\t\t\t// Skip files and hidden directories\n\n\t\t\tif entry.path().is_file() { continue; }\n\n\t\t\tif entry.file_name().to_string_lossy().starts_with(\".\") { continue; }\n\n\n\n\t\t\t// Recurse if needed\n\n\t\t\tmatch on_dir(&entry.path()) {\n\n\t\t\t\tWalkDirsAction::AddAndRecurse => {\n\n\t\t\t\t\tfound_paths.push(entry.path());\n\n\n\n\t\t\t\t\tlet (found, ignored) = &mut walk_dirs(&entry.path(), on_dir);\n\n\t\t\t\t\tfound_paths.append(found);\n\n\t\t\t\t\tignored_paths.append(ignored);\n\n\t\t\t\t},\n\n\t\t\t\tWalkDirsAction::Add => found_paths.push(entry.path()),\n\n\t\t\t\tWalkDirsAction::Ignore => ignored_paths.push(entry.path()),\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\treturn (found_paths, ignored_paths);\n\n}\n\n\n", "file_path": "src/file_utils/walk.rs", "rank": 7, "score": 27817.074533499304 }, { "content": "/// Checks if a given path is ignored\n\n///\n\n/// # Arguments\n\n/// * `ignore` - The ignore regex, if set\n\n/// * `path` - Path to check against the ignore regex\n\n///\n\n/// # Returns\n\n/// * `true` - If the path matches the regex\n\n/// * `false` - If the regex and path don't match, if no ignore\n\n/// regex was given, or if the path is empty\n\nfn is_ignored(ignore : &Option<Regex>, path : &Path) -> bool {\n\n\tmatch ignore {\n\n\t\tNone => false,\n\n\t\tSome(re) => {\n\n\t\t\tmatch path.to_str() {\n\n\t\t\t\tNone => false,\n\n\t\t\t\tSome(path) => {\n\n\t\t\t\t\tif path.len() == 0 {\n\n\t\t\t\t\t\tfalse\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\tre.is_match(path)\n\n\t\t\t\t\t}\n\n\t\t\t\t},\n\n\t\t\t}\n\n\t\t},\n\n\t}\n\n}\n\n\n\n/// Returns one of two values, depending on whether the count is 1 or not\n\n///\n\n/// # Arguments\n\n/// * `count` - The item count\n\n/// * `singular` - Value when count == 1\n\n/// * `plural` - Value when count != 1\n", "file_path": "src/find_paths.rs", "rank": 8, "score": 26686.039648831465 }, { "content": "/// Removes the given paths\n\npub fn remove(paths : Vec<PathBuf>) {\n\n\tprintln!(\"Deleting selected directories\");\n\n\tlet mut spinner = Spinner::new(\"Deleting directories...\");\n\n\tlet mut i = 0;\n\n\n\n\tfor path in paths {\n\n\t\tif let Err(err) = remove_dir_all(path) {\n\n\t\t\tprintln!(\"Error while deleting directory: {}\", err);\n\n\t\t}\n\n\n\n\t\ti += 1;\n\n\t\tspinner.update(format!(\"Deleted {} directories\", i).as_str());\n\n\t}\n\n\n\n\tspinner.finish(format!(\"Deleted {} directories\", i).as_str());\n\n}\n", "file_path": "src/remove_paths.rs", "rank": 9, "score": 24477.122163664135 }, { "content": "use std::path::Path;\n\nuse std::ffi::OsStr;\n\n\n\nmod walk;\n\n\n\npub use self::walk::{ walk_dirs, walk_files, WalkDirsAction };\n\n\n\n/// Gets the filename of a given path\n", "file_path": "src/file_utils/mod.rs", "rank": 17, "score": 19985.565572958352 }, { "content": "use std::path::{ Path, PathBuf };\n\n\n\n/// Action to take when in a directory with `walk_dirs`\n\n#[derive(Debug)]\n\npub enum WalkDirsAction {\n\n\t/// Add this entry and recurse into the directory\n\n\tAddAndRecurse,\n\n\n\n\t/// Add this entry and don't recurse\n\n\tAdd,\n\n\n\n\t/// Ignore this entry\n\n\tIgnore,\n\n}\n\n\n\n/// Finds all directories and subdirectories for a given path, unless stopped\n\n///\n\n/// # Arguments\n\n/// * `dir` - The root directory\n\n/// * `on_dir` - Function that is called for every directory found, should\n\n/// return `true` if walking should continue into this directory\n\n/// or `false` if not\n", "file_path": "src/file_utils/walk.rs", "rank": 18, "score": 19980.953482002104 }, { "content": "use std::process;\n\nuse std::path::PathBuf;\n\nuse std::collections::HashMap;\n\n\n\nuse colored::*;\n\n\n\nuse crate::get_stats::Stats;\n\nuse crate::languages::identify;\n\n\n\n/// Filters the given projects to determine which directories should be removed\n\n///\n\n/// # Arguments\n\n/// * `projects` - Hash map of the projects and their collected stats\n\n/// * `all` - True to ignore the modified date\n\n///\n\n/// # Returns\n\n/// The directories to remove, and the total size that will be fred if all\n\n/// paths are removed\n", "file_path": "src/filter_paths.rs", "rank": 19, "score": 10.69668874707218 }, { "content": "use std::path::PathBuf;\n\nuse regex::Regex;\n\nuse std::io::{ stdin, stdout, Write };\n\n\n\nuse colored::*;\n\nuse structopt::StructOpt;\n\n\n\nmod languages;\n\nmod file_utils;\n\nmod find_paths;\n\nmod get_stats;\n\nmod filter_paths;\n\nmod remove_paths;\n\nmod spinner;\n\n\n\nuse crate::get_stats::format_size;\n\n\n", "file_path": "src/main.rs", "rank": 20, "score": 9.169775898841142 }, { "content": "use std::{ thread, process };\n\nuse std::path::{PathBuf, Path};\n\nuse std::sync::mpsc::channel;\n\n\n\nuse colored::*;\n\nuse regex::Regex;\n\n\n\nuse crate::languages::identify;\n\nuse crate::file_utils::{ walk_dirs, WalkDirsAction };\n\nuse crate::spinner::Spinner;\n\n\n\n/// Finds all code projects in the given directory or directories\n\n///\n\n/// # Arguments\n\n/// * `root_paths` - The path(s) to search\n", "file_path": "src/find_paths.rs", "rank": 21, "score": 8.888190879008683 }, { "content": "use std::time::Instant;\n\nuse std::cmp;\n\n\n\nuse colored::Colorize;\n\nuse std::io::{ stdout, Write };\n\n\n\nconst MS_PER_STEP : u128 = 100;\n\nconst STEPS : [&'static str; 10] = [\"⠸ \", \"⠼ \", \"⠴ \", \"⠦ \", \"⠧ \", \"⠇ \", \"⠏ \", \"⠋ \", \"⠙ \", \"⠹ \"];\n\nconst DONE : &'static str = \"ok\";\n\n\n\n/// Shows a rotating spinner in the terminal\n\n///\n\n/// The spinner will rotate when it is updated, it will not be updated\n\n/// automatically. Printing anything to the terminal will break the\n\n/// spinner. Call `.finish()` to finish the spinner.\n\npub struct Spinner {\n\n\tstep : usize,\n\n\tstep_increased : Instant,\n\n\ttext_length: usize,\n\n}\n", "file_path": "src/spinner.rs", "rank": 22, "score": 7.26769175110652 }, { "content": "use std::fs::remove_dir_all;\n\nuse std::path::PathBuf;\n\nuse crate::spinner::Spinner;\n\n\n\n/// Removes the given paths\n", "file_path": "src/remove_paths.rs", "rank": 23, "score": 5.159594819949559 }, { "content": "use std::path::Path;\n\n\n\nuse super::*;\n\n\n", "file_path": "src/languages/identify.rs", "rank": 24, "score": 5.101961534360449 }, { "content": "\tif remove.len() == 0 {\n\n\t\tprintln!(\"{}\", \"No projects have directories that can be removed\".yellow());\n\n\t\tprintln!(\" This is likely because your projects were recently modified\");\n\n\t\tprintln!(\" Run the application with `{}` to disregard file age\", \"--all\".bold());\n\n\t\tprintln!(\" Try `{}` for more options\", \"--help\".bold());\n\n\t\tprocess::exit(0);\n\n\t}\n\n\n\n\treturn (remove, remove_size);\n\n}\n", "file_path": "src/filter_paths.rs", "rank": 25, "score": 4.864614425411789 }, { "content": "mod identify;\n\nmod language;\n\n\n\npub use self::identify::identify;\n\npub use self::language::Language;\n\n\n\n/// Node.js projects\n\npub const NODE : Language = Language {\n\n\tname: \"Node.js\",\n\n\tpaths: &[\"node_modules\", \".cache\"]\n\n};\n\n\n\n/// Rust projects\n\npub const RUST : Language = Language {\n\n\tname: \"Rust\",\n\n\tpaths: &[\"target\"]\n\n};\n\n\n\n/// Java projects\n\npub const JAVA : Language = Language {\n\n\tname: \"Java\",\n\n\tpaths: &[\"build\", \".gradle\"]\n\n};\n", "file_path": "src/languages/mod.rs", "rank": 26, "score": 4.4036449990972075 }, { "content": "\t\t// Send the results to the main thread\n\n\t\tlet _ = tx.send(Some((found_paths.into_iter().filter(|p| identify(p).is_some()).collect::<Vec<PathBuf>>(), ignored_paths)));\n\n\t});\n\n\n\n\tprintln!(\"Searching for code projects\");\n\n\tlet mut spinner = Spinner::new(\"Searching directories...\");\n\n\tlet mut searched = 0;\n\n\n\n\tloop {\n\n\t\t// Wait for a message from the thread\n\n\t\tlet data = rc.recv();\n\n\n\n\t\t// Handle errors\n\n\t\tif let Err(err) = data {\n\n\t\t\tprintln!(\"Error in thread: {}\", err);\n\n\t\t\tprocess::exit(0);\n\n\t\t}\n\n\n\n\t\t// If the paths are loaded, return them\n\n\t\tlet data = data.unwrap();\n", "file_path": "src/find_paths.rs", "rank": 27, "score": 3.4412486427427935 }, { "content": "\n\n\t// If on Windows, we need to enable the virtual terminal\n\n\t// to allow for proper colour support. Other platforms should\n\n\t// support ansi colouring without a problem.\n\n\t#[cfg(windows)]\n\n\tcolored::control::set_virtual_terminal(true).expect(\"Could not initialise virtual terminal\");\n\n\n\n\t// Parse CLI settings\n\n\tlet mut settings = Settings::from_args();\n\n\t// Check if we need to include the working directory because no path was provided\n\n\tif settings.paths.is_empty(){\n\n\tsettings.paths.push(\".\".into())\n\n\t}\n\n\n\n\t// Find the project paths\n\n\tlet paths = find_paths::find(settings.paths, settings.ignore);\n\n\n\n\t// Get stats for the discovered projects\n\n\tlet stats = get_stats::get(paths);\n\n\n", "file_path": "src/main.rs", "rank": 28, "score": 2.989749840563955 }, { "content": "\t\t\t\t\t\treturn WalkDirsAction::Ignore;\n\n\t\t\t\t\t} else if identify(p).is_some() {\n\n\t\t\t\t\t\treturn WalkDirsAction::Add;\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\treturn WalkDirsAction::AddAndRecurse;\n\n\t\t\t\t\t}\n\n\t\t\t\t});\n\n\t\t\t}));\n\n\t\t}\n\n\n\n\t\t// Wait for all threads to finish and combine the results\n\n\t\tlet mut found_paths = Vec::new();\n\n\t\tlet mut ignored_paths = Vec::new();\n\n\n\n\t\tfor thread in threads {\n\n\t\t\tlet (found, ignored) = &mut thread.join().unwrap();\n\n\t\t\tfound_paths.append(found);\n\n\t\t\tignored_paths.append(ignored);\n\n\t\t}\n\n\n", "file_path": "src/find_paths.rs", "rank": 29, "score": 2.5315515641690434 }, { "content": "/// The definition for a project language\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\npub struct Language {\n\n\tpub name : &'static str,\n\n\tpub paths : &'static [&'static str]\n\n}\n\n\n\nimpl Language {\n\n\t/// Gets the name of the language\n\n\tpub fn name(&self) -> &'static str { &self.name }\n\n\n\n\t/// Gets the paths that should be removed from a project\n\n\t///\n\n\t/// # Arguments\n\n\t/// * `path` - Path to the project directory\n\n\t///\n\n\t/// # Returns\n\n\t/// The path(s) that will be removed from the given project, or none if\n\n\t/// there are no paths to be removed\n\n\tpub fn get_paths(&self) -> &'static [&'static str] { self.paths }\n\n}\n", "file_path": "src/languages/language.rs", "rank": 30, "score": 2.4768205638313674 }, { "content": "# Logo\n\nThe logo is based on a design made specially for this project by \n\nthe awesome [chimzycash](https://github.com/chimzycash).\n\n\n\nThe source file for the logo is the `logo.afdesign` file, which can be \n\nopened with [Affinity Designer](https://affinity.serif.com/en-gb/).\n", "file_path": "logo/README.md", "rank": 31, "score": 2.237253215831166 }, { "content": "\t/// * `text` - The text message to display after the spinner indicator\n\n\tpub fn update(&mut self, text : &str) {\n\n\t\tif self.step_increased.elapsed().as_millis() >= MS_PER_STEP {\n\n\t\t\tself.step = match self.step {\n\n\t\t\t\t9 => 0,\n\n\t\t\t\t_ => self.step + 1,\n\n\t\t\t};\n\n\n\n\t\t\tself.step_increased = Instant::now();\n\n\t\t}\n\n\n\n\t\tself.text_length = cmp::max(text.len(), self.text_length);\n\n\n\n\t\tlet indicator : &'static str = STEPS[self.step];\n\n\t\tlet spacer = \" \".repeat(self.text_length - text.len());\n\n\t\tprint!(\"\\r {} {}{}\", indicator.cyan(), text, spacer);\n\n\n\n\t\t// Try to flush the stdout buffer to update the print line before the\n\n\t\t// process is done, but it's not a problem if it won't flush right away\n\n\t\t// so we can just capture the result without doing anything with it\n", "file_path": "src/spinner.rs", "rank": 32, "score": 2.033413542201318 }, { "content": "[![Project Cleanup](./readme_logo.png)](https://woubuc.github.io/project-cleanup/)\n\n\n\n[![View on Crates.io](https://img.shields.io/crates/v/project-cleanup.svg)](https://crates.io/crates/project-cleanup)\n\n[![Download](https://img.shields.io/badge/download-latest-informational.svg)](https://github.com/woubuc/project-cleanup/releases/latest)\n\n[![License](https://img.shields.io/github/license/woubuc/project-cleanup.svg)](https://github.com/woubuc/project-cleanup/blob/master/LICENSE)\n\n\n\nThis little tool will recursively browse a directory to find code \n\nprojects in several languages. If the project hasn't been touched for \n\nmore than a month, it will remove directories containing libraries, \n\ndependencies, builds, etc.\n\n\n\nThe reasoning behind this is that these files can be retrieved or \n\nre-generated at any time, but if you haven't worked on the project for \n\na month chances are you don't need them taking up space on your hard \n\ndrive right now.\n\n\n\n![Screenshot](readme_screenshot.png)\n\n\n\n## Install\n\nDownload the binary for your platform from the \n\n[releases page](https://github.com/woubuc/project-cleanup/releases)\n\n\n\nOr install via Cargo: `cargo install project-cleanup`\n\n\n\n## How it works\n\nRun the application with `project-cleanup --help` to see the options.\n\n\n\n## Supported languages\n\n- Node.js (will remove the `node_modules` and `.cache` directories)\n\n- Rust (will remove the `target` directory)\n\n- Java (will remove the `.gradle` and `build` directories)\n\n\n\nThese languages are based on my own experience and use patterns. I welcome\n\nissues and pull requests to add more languages and/or more unnecessary\n\ndirectories in already added languages.\n\n\n\n## License\n\nEverything in this repository is published under the MIT license. See\n\nthe LICENSE file for more information.\n", "file_path": "README.md", "rank": 33, "score": 1.8903074798734014 }, { "content": "\t\tlet _ = stdout().flush();\n\n\t}\n\n\n\n\t/// Finishes the spinner with a green checkmark\n\n\t///\n\n\t/// # Arguments\n\n\t/// * `text` - The completion message to display\n\n\tpub fn finish(&self, text : &str) {\n\n\t\tlet spacer = \" \".repeat(self.text_length - text.len());\n\n\t\tprintln!(\"\\r {} {}{}\", DONE.green(), text, spacer);\n\n\t\tlet _ = stdout().flush();\n\n\t}\n\n}\n", "file_path": "src/spinner.rs", "rank": 34, "score": 1.6129085832350065 }, { "content": "\n\nimpl Spinner {\n\n\t/// Starts a new spinner\n\n\t///\n\n\t/// # Arguments\n\n\t/// * `initial_text` - The initial text message to display after the spinner indicator\n\n\tpub fn new(initial_text : &'static str) -> Spinner {\n\n\t\tlet mut spinner = Spinner {\n\n\t\t\tstep: 0,\n\n\t\t\tstep_increased: Instant::now(),\n\n\t\t\ttext_length: 0,\n\n\t\t};\n\n\n\n\t\tspinner.update(initial_text);\n\n\t\treturn spinner;\n\n\t}\n\n\n\n\t/// Updates the spinner with a new message\n\n\t///\n\n\t/// # Arguments\n", "file_path": "src/spinner.rs", "rank": 35, "score": 1.362815734097135 }, { "content": "\t\tif let Some((found, ignored)) = data {\n\n\t\t\t// Log the search stats\n\n\t\t\tspinner.finish(format!(\"Searched {} directories\", searched).as_str());\n\n\n\n\t\t\tprintln!(\" {} Found {} {}\", \"i \".blue(), found.len(), plural(found.len(), \"project\", \"projects\"));\n\n\n\n\t\t\tif ignored.len() > 0 {\n\n\t\t\t\tprintln!(\" {} Ignored {} {}\", \"i \".blue(), ignored.len(), plural(ignored.len(), \"directory\", \"directories\"));\n\n\t\t\t}\n\n\n\n\t\t\treturn found;\n\n\t\t}\n\n\n\n\t\t// If we're still going, display the progress\n\n\t\tsearched += 1;\n\n\t\tspinner.update(format!(\"Searching {} directories\", searched).as_str());\n\n\t}\n\n}\n\n\n", "file_path": "src/find_paths.rs", "rank": 36, "score": 1.2208906339831378 }, { "content": "\t// Find the paths that should be removed\n\n\tlet (remove, remove_size) = filter_paths::filter(stats, settings.all);\n\n\n\n\t// Verify paths to remove\n\n\tprintln!(\"Ready to remove {} of unnecessary files\", format_size(remove_size).cyan().bold());\n\n\tprintln!(\"{}\", \"ALL CONTENTS OF THESE DIRECTORIES WILL BE DELETED\".white().on_red().bold());\n\n\tfor path in &remove { println!(\" {}\", path.display()); }\n\n\n\n\tif !settings.force {\n\n\t\tloop {\n\n\t\t\tprint!(\"Do you want to continue? (y/n) \");\n\n\t\t\tlet _ = stdout().flush();\n\n\n\n\t\t\tlet mut input = String::new();\n\n\t\t\tstdin().read_line(&mut input).unwrap();\n\n\t\t\tlet input = input.trim();\n\n\n\n\t\t\tif input == \"n\" { return; }\n\n\t\t\tif input == \"y\" { break; }\n\n\t\t\tprintln!(\" {}\", \"Please enter either 'y' or 'n'\".yellow());\n\n\t\t}\n\n\t}\n\n\n\n\t// Delete directories\n\n\tremove_paths::remove(remove);\n\n}\n", "file_path": "src/main.rs", "rank": 37, "score": 0.967943916131818 } ]
Rust
src/connectivity/bluetooth/profiles/bt-avrcp/src/metrics/mod.rs
csrpi/fuchsia
2f015594dcb4c13aa51eee305ad561078f1f9b7f
use { fuchsia_bluetooth::types::PeerId, fuchsia_inspect::{self as inspect, NumericProperty}, fuchsia_inspect_derive::Inspect, parking_lot::Mutex, std::{collections::HashSet, sync::Arc}, }; use crate::profile::{AvrcpControllerFeatures, AvrcpTargetFeatures}; pub const METRICS_NODE_NAME: &str = "metrics"; #[derive(Default, Inspect)] struct PeerSupportMetrics { target_peers_supporting_browsing: inspect::UintProperty, target_peers_supporting_cover_art: inspect::UintProperty, controller_peers_supporting_browsing: inspect::UintProperty, controller_peers_supporting_cover_art: inspect::UintProperty, distinct_target_peers_supporting_browsing: inspect::UintProperty, distinct_target_peers_supporting_cover_art: inspect::UintProperty, distinct_controller_peers_supporting_browsing: inspect::UintProperty, distinct_controller_peers_supporting_cover_art: inspect::UintProperty, #[inspect(skip)] tg_browse_peers: HashSet<PeerId>, #[inspect(skip)] tg_cover_art_peers: HashSet<PeerId>, #[inspect(skip)] ct_browse_peers: HashSet<PeerId>, #[inspect(skip)] ct_cover_art_peers: HashSet<PeerId>, } #[derive(Default, Inspect)] struct MetricsNodeInner { connection_errors: inspect::UintProperty, control_connections: inspect::UintProperty, browse_connections: inspect::UintProperty, distinct_peers: inspect::UintProperty, #[inspect(skip)] distinct_peers_set: HashSet<PeerId>, control_channel_collisions: inspect::UintProperty, support_node: PeerSupportMetrics, inspect_node: inspect::Node, } impl MetricsNodeInner { fn check_distinct_peer(&mut self, id: PeerId) { if self.distinct_peers_set.insert(id) { self.distinct_peers.add(1); } } fn controller_supporting_browsing(&mut self, id: PeerId) { self.support_node.controller_peers_supporting_browsing.add(1); if self.support_node.ct_browse_peers.insert(id) { self.support_node.distinct_controller_peers_supporting_browsing.add(1); } } fn controller_supporting_cover_art(&mut self, id: PeerId) { self.support_node.controller_peers_supporting_cover_art.add(1); if self.support_node.ct_cover_art_peers.insert(id) { self.support_node.distinct_controller_peers_supporting_cover_art.add(1); } } fn target_supporting_browsing(&mut self, id: PeerId) { self.support_node.target_peers_supporting_browsing.add(1); if self.support_node.tg_browse_peers.insert(id) { self.support_node.distinct_target_peers_supporting_browsing.add(1); } } fn target_supporting_cover_art(&mut self, id: PeerId) { self.support_node.target_peers_supporting_cover_art.add(1); if self.support_node.tg_cover_art_peers.insert(id) { self.support_node.distinct_target_peers_supporting_cover_art.add(1); } } } #[derive(Clone, Default, Inspect)] pub struct MetricsNode { #[inspect(forward)] inner: Arc<Mutex<MetricsNodeInner>>, } impl MetricsNode { pub fn new_peer(&self, id: PeerId) { self.inner.lock().check_distinct_peer(id); } pub fn connection_error(&self) { self.inner.lock().connection_errors.add(1); } pub fn control_connection(&self) { self.inner.lock().control_connections.add(1); } pub fn browse_connection(&self) { self.inner.lock().browse_connections.add(1); } pub fn control_collision(&self) { self.inner.lock().control_channel_collisions.add(1); } pub fn controller_features(&self, id: PeerId, features: AvrcpControllerFeatures) { let mut inner = self.inner.lock(); if features.contains(AvrcpControllerFeatures::SUPPORTSBROWSING) { inner.controller_supporting_browsing(id); } if features.supports_cover_art() { inner.controller_supporting_cover_art(id); } } pub fn target_features(&self, id: PeerId, features: AvrcpTargetFeatures) { let mut inner = self.inner.lock(); if features.contains(AvrcpTargetFeatures::SUPPORTSBROWSING) { inner.target_supporting_browsing(id); } if features.contains(AvrcpTargetFeatures::SUPPORTSCOVERART) { inner.target_supporting_cover_art(id); } } } #[cfg(test)] mod tests { use super::*; use {fuchsia_inspect::assert_inspect_tree, fuchsia_inspect_derive::WithInspect}; #[test] fn multiple_peers_connection_updates_to_shared_node() { let inspect = inspect::Inspector::new(); let metrics = MetricsNode::default().with_inspect(inspect.root(), "metrics").unwrap(); let (id1, metrics1) = (PeerId(2220), metrics.clone()); let (id2, metrics2) = (PeerId(7982), metrics.clone()); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 0u64, control_connections: 0u64, browse_connections: 0u64, distinct_peers: 0u64, control_channel_collisions: 0u64, } }); metrics1.new_peer(id1); metrics1.connection_error(); metrics1.new_peer(id2); metrics2.control_connection(); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 1u64, control_connections: 1u64, browse_connections: 0u64, distinct_peers: 2u64, control_channel_collisions: 0u64, } }); metrics1.connection_error(); metrics1.connection_error(); metrics1.control_connection(); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 3u64, control_connections: 2u64, browse_connections: 0u64, distinct_peers: 2u64, control_channel_collisions: 0u64, } }); metrics1.new_peer(id1); metrics1.control_collision(); metrics1.control_connection(); metrics1.browse_connection(); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 3u64, control_connections: 3u64, browse_connections: 1u64, distinct_peers: 2u64, control_channel_collisions: 1u64, } }); } #[test] fn controller_peers_service_updates() { let inspect = inspect::Inspector::new(); let metrics = MetricsNode::default().with_inspect(inspect.root(), "metrics").unwrap(); let id1 = PeerId(1102); let tg_service1 = AvrcpTargetFeatures::empty(); let ct_service1 = AvrcpControllerFeatures::empty(); metrics.controller_features(id1, ct_service1); metrics.target_features(id1, tg_service1); assert_inspect_tree!(inspect, root: { metrics: contains { target_peers_supporting_browsing: 0u64, distinct_target_peers_supporting_browsing: 0u64, target_peers_supporting_cover_art: 0u64, distinct_target_peers_supporting_cover_art: 0u64, controller_peers_supporting_browsing: 0u64, distinct_controller_peers_supporting_browsing: 0u64, controller_peers_supporting_cover_art: 0u64, distinct_controller_peers_supporting_cover_art: 0u64, } }); let id2 = PeerId(1102); let ct_service2 = AvrcpControllerFeatures::all(); let tg_service2 = AvrcpTargetFeatures::all(); metrics.controller_features(id2, ct_service2); metrics.target_features(id2, tg_service2); assert_inspect_tree!(inspect, root: { metrics: contains { target_peers_supporting_browsing: 1u64, distinct_target_peers_supporting_browsing: 1u64, target_peers_supporting_cover_art: 1u64, distinct_target_peers_supporting_cover_art: 1u64, controller_peers_supporting_browsing: 1u64, distinct_controller_peers_supporting_browsing: 1u64, controller_peers_supporting_cover_art: 1u64, distinct_controller_peers_supporting_cover_art: 1u64, } }); metrics.controller_features(id2, ct_service2); metrics.target_features(id2, tg_service2); assert_inspect_tree!(inspect, root: { metrics: contains { target_peers_supporting_browsing: 2u64, distinct_target_peers_supporting_browsing: 1u64, target_peers_supporting_cover_art: 2u64, distinct_target_peers_supporting_cover_art: 1u64, controller_peers_supporting_browsing: 2u64, distinct_controller_peers_supporting_browsing: 1u64, controller_peers_supporting_cover_art: 2u64, distinct_controller_peers_supporting_cover_art: 1u64, } }); } }
use { fuchsia_bluetooth::types::PeerId, fuchsia_inspect::{self as inspect, NumericProperty}, fuchsia_inspect_derive::Inspect, parking_lot::Mutex, std::{collections::HashSet, sync::Arc}, }; use crate::profile::{AvrcpControllerFeatures, AvrcpTargetFeatures}; pub const METRICS_NODE_NAME: &str = "metrics"; #[derive(Default, Inspect)] struct PeerSupportMetrics { target_peers_supporting_browsing: inspect::UintProperty, target_peers_supporting_cover_art: inspect::UintProperty, controller_peers_supporting_browsing: inspect::UintProperty, controller_peers_supporting_cover_art: inspect::UintProperty, distinct_target_peers_supporting_browsing: inspect::UintProperty, distinct_target_peers_supporting_cover_art: inspect::UintProperty, distinct_controller_peers_supporting_browsing: inspect::UintProperty, distinct_controller_peers_supporting_cover_art: inspect::UintProperty, #[inspect(skip)] tg_browse_peers: HashSet<PeerId>, #[inspect(skip)] tg_cover_art_peers: HashSet<PeerId>, #[inspect(skip)] ct_browse_peers: HashSet<PeerId>, #[inspect(skip)] ct_cover_art_peers: HashSet<PeerId>, } #[derive(Default, Inspect)] struct MetricsNodeInner { connection_errors: inspect::UintProperty, control_connections: inspect::UintProperty, browse_connections: inspect::UintProperty, distinct_peers: inspect::UintProperty, #[inspect(skip)] distinct_peers_set: HashSet<PeerId>, control_channel_collisions: inspect::UintProperty, support_node: PeerSupportMetrics, inspect_node: inspect::Node, } impl MetricsNodeInner { fn check_distinct_peer(&mut self, id: PeerId) { if self.distinct_peers_set.insert(id) { self.distinct_peers.add(1); } } fn controller_supporting_browsing(&mut self, id: PeerId) { self.support_node.controller_peers_supporting_browsing.add(1); if self.support_node.ct_browse_peers.insert(id) { self.support_node.distinct_controller_peers_supporting_browsing.add(1); } } fn controller_supporting_cover_art(&mut self, id: PeerId) { self.support_node.controller_peers_supporting_cover_art.add(1); if self.support_node.ct_cover_art_peers.insert(id) { self.support_node.distinct_controller_peers_supporting_cover_art.add(1); } } fn target_supporting_browsing(&mut self, id: PeerId) { self.support_node.target_peers_supporting_browsing.add(1); if self.support_node.tg_browse_peers.insert(id) { self.support_node.distinct_target_peers_supporting_browsing.add(1); } } fn target_supporting_cover_art(&mut self, id: PeerId) { self.support_node.target_peers_supporting_cover_art.add(1); if self.support_node.tg_cover_art_peers.insert(id) { self.support_node.distinct_target_peers_supporting_cover_art.add(1); } } } #[derive(Clone, Default, Inspect)] pub struct MetricsNode { #[inspect(forward)] inner: Arc<Mutex<MetricsNodeInner>>, } impl MetricsNode { pub fn new_peer(&self, id: PeerId) { self.inner.lock().check_distinct_peer(id); } pub fn connection_error(&self) { self.inner.lock().connection_errors.add(1); } pub fn control_connection(&self) { self.inner.lock().control_connections.add(1); } pub fn browse_connection(&self) { self.inner.lock().browse_connections.add(1); } pub fn control_collision(&self) { self.inner.lock().control_channel_collisions.add(1); } pub fn controller_features(&self, id: PeerId, features: AvrcpControllerFeatures) { let mut inner = self.inner.lock(); if features.contains(AvrcpControllerFeatures::SUPPORTSBROWSING) { inner.controller_supporting_browsing(id); } if features.supports_cover_art() { inner.controller_supporting_cover_art(id); } } pub fn target_features(&self, id: PeerId, features: AvrcpTargetFeatures) { let mut inner = self.inner.lock(); if features.contains(AvrcpTargetFeatures::SUPPORTSBROWSING) { inner.target_supporting_browsing(id); } if features.contains(AvrcpTargetFeatures::SUPPORTSCOVERART) { inner.target_supporting_cover_art(id); } } } #[cfg(test)] mod tests { use super::*; use {fuchsia_inspect::assert_inspect_tree, fuchsia_inspect_derive::WithInspect}; #[test] fn multiple_peers_connection_updates_to_shared_node() { let inspect = inspect::Inspector::new(); let metrics = MetricsNode::default().with_inspect(inspect.root(), "metrics").unwrap(); let (id1, metrics1) = (PeerId(2220), metrics.clone()); let (id2, metrics2) = (PeerId(7982), metrics.clone()); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 0u64, control_connections: 0u64, browse_connections: 0u64, distinct_peers: 0u64, control_channel_collisions: 0u64, } }); metrics1.new_peer(id1); metrics1.connection_error(); metrics1.new_peer(id2); metric
r(id1); metrics1.control_collision(); metrics1.control_connection(); metrics1.browse_connection(); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 3u64, control_connections: 3u64, browse_connections: 1u64, distinct_peers: 2u64, control_channel_collisions: 1u64, } }); } #[test] fn controller_peers_service_updates() { let inspect = inspect::Inspector::new(); let metrics = MetricsNode::default().with_inspect(inspect.root(), "metrics").unwrap(); let id1 = PeerId(1102); let tg_service1 = AvrcpTargetFeatures::empty(); let ct_service1 = AvrcpControllerFeatures::empty(); metrics.controller_features(id1, ct_service1); metrics.target_features(id1, tg_service1); assert_inspect_tree!(inspect, root: { metrics: contains { target_peers_supporting_browsing: 0u64, distinct_target_peers_supporting_browsing: 0u64, target_peers_supporting_cover_art: 0u64, distinct_target_peers_supporting_cover_art: 0u64, controller_peers_supporting_browsing: 0u64, distinct_controller_peers_supporting_browsing: 0u64, controller_peers_supporting_cover_art: 0u64, distinct_controller_peers_supporting_cover_art: 0u64, } }); let id2 = PeerId(1102); let ct_service2 = AvrcpControllerFeatures::all(); let tg_service2 = AvrcpTargetFeatures::all(); metrics.controller_features(id2, ct_service2); metrics.target_features(id2, tg_service2); assert_inspect_tree!(inspect, root: { metrics: contains { target_peers_supporting_browsing: 1u64, distinct_target_peers_supporting_browsing: 1u64, target_peers_supporting_cover_art: 1u64, distinct_target_peers_supporting_cover_art: 1u64, controller_peers_supporting_browsing: 1u64, distinct_controller_peers_supporting_browsing: 1u64, controller_peers_supporting_cover_art: 1u64, distinct_controller_peers_supporting_cover_art: 1u64, } }); metrics.controller_features(id2, ct_service2); metrics.target_features(id2, tg_service2); assert_inspect_tree!(inspect, root: { metrics: contains { target_peers_supporting_browsing: 2u64, distinct_target_peers_supporting_browsing: 1u64, target_peers_supporting_cover_art: 2u64, distinct_target_peers_supporting_cover_art: 1u64, controller_peers_supporting_browsing: 2u64, distinct_controller_peers_supporting_browsing: 1u64, controller_peers_supporting_cover_art: 2u64, distinct_controller_peers_supporting_cover_art: 1u64, } }); } }
s2.control_connection(); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 1u64, control_connections: 1u64, browse_connections: 0u64, distinct_peers: 2u64, control_channel_collisions: 0u64, } }); metrics1.connection_error(); metrics1.connection_error(); metrics1.control_connection(); assert_inspect_tree!(inspect, root: { metrics: contains { connection_errors: 3u64, control_connections: 2u64, browse_connections: 0u64, distinct_peers: 2u64, control_channel_collisions: 0u64, } }); metrics1.new_pee
function_block-random_span
[]
Rust
src/record.rs
marcsch/seq_io
3d461a3651fb975cd509fd7f580f1b44ccd9b9d3
use memchr::memchr; use std::borrow::Cow; use std::io; use std::str; pub use crate::core::{QualRecordPosition, SeqRecordPosition}; pub trait BaseRecord { fn head(&self) -> &[u8]; fn seq(&self) -> &[u8]; fn full_seq(&self) -> Cow<[u8]>; fn full_seq_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]> where F: FnOnce() -> &'s mut Vec<u8>; fn num_seq_lines(&self) -> usize; fn has_quality(&self) -> bool; fn opt_qual(&self) -> Option<&[u8]>; fn opt_full_qual(&self) -> Option<Cow<[u8]>>; fn opt_full_qual_given<'s, F>(&'s self, owned_fn: F) -> Option<Cow<'s, [u8]>> where F: FnOnce() -> &'s mut Vec<u8>; fn num_qual_lines(&self) -> usize; fn write<W>(&self, writer: W) -> io::Result<()> where W: io::Write; #[inline] fn id_bytes(&self) -> &[u8] { let head = self.head(); if let Some(pos) = memchr(b' ', head) { return &head[..pos]; } head } #[inline] fn id(&self) -> Result<&str, str::Utf8Error> { str::from_utf8(self.id_bytes()) } #[inline] fn desc_bytes(&self) -> Option<&[u8]> { let head = self.head(); if let Some(pos) = memchr(b' ', head) { return Some(&head[pos + 1..]); } None } #[inline] fn desc(&self) -> Option<Result<&str, str::Utf8Error>> { self.desc_bytes().map(str::from_utf8) } #[inline] fn id_desc_bytes(&self) -> (&[u8], Option<&[u8]>) { let head = self.head(); if let Some(pos) = memchr(b' ', head) { return (&head[..pos], Some(&head[pos + 1..])); } (head, None) } #[inline] fn id_desc(&self) -> Result<(&str, Option<&str>), str::Utf8Error> { let (id, desc) = self.id_desc_bytes(); Ok((str::from_utf8(id)?, desc.map(str::from_utf8).transpose()?)) } } impl<'a, R> BaseRecord for &'a R where R: BaseRecord, { fn head(&self) -> &[u8] { (**self).head() } fn seq(&self) -> &[u8] { (**self).seq() } fn full_seq(&self) -> Cow<[u8]> { (**self).full_seq() } fn full_seq_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]> where F: FnOnce() -> &'s mut Vec<u8>, { (**self).full_seq_given(owned_fn) } fn num_seq_lines(&self) -> usize { (**self).num_seq_lines() } fn id_bytes(&self) -> &[u8] { (**self).id_bytes() } fn id(&self) -> Result<&str, std::str::Utf8Error> { (**self).id() } fn desc_bytes(&self) -> Option<&[u8]> { (**self).desc_bytes() } fn desc(&self) -> Option<Result<&str, std::str::Utf8Error>> { (**self).desc() } fn id_desc_bytes(&self) -> (&[u8], Option<&[u8]>) { (**self).id_desc_bytes() } fn id_desc(&self) -> Result<(&str, Option<&str>), std::str::Utf8Error> { (**self).id_desc() } fn has_quality(&self) -> bool { (**self).has_quality() } fn opt_qual(&self) -> Option<&[u8]> { (**self).opt_qual() } fn opt_full_qual(&self) -> Option<Cow<[u8]>> { (**self).opt_full_qual() } fn opt_full_qual_given<'s, F: FnOnce() -> &'s mut Vec<u8>>( &'s self, owned_fn: F, ) -> Option<Cow<'s, [u8]>> { (**self).opt_full_qual_given(owned_fn) } fn num_qual_lines(&self) -> usize { (**self).num_qual_lines() } fn write<W: io::Write>(&self, writer: W) -> io::Result<()> { (**self).write(writer) } }
use memchr::memchr; use std::borrow::Cow; use std::io; use std::str; pub use crate::core::{QualRecordPosition, SeqRecordPosition}; pub trait BaseRecord { fn head(&self) -> &[u8]; fn seq(&self) -> &[u8]; fn full_seq(&self) -> Cow<[u8]>; fn full_seq_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]> where F: FnOnce() -> &'s mut Vec<u8>; fn num_seq_lines(&self) -> usize; fn has_quality(&self) -> bool; fn opt_qual(&self) -> Option<&[u8]>; fn opt_full_qual(&self) -> Option<Cow<[u8]>>; fn opt_full_qual_given<'s, F>(&'s self, owned_fn: F) -> Option<Cow<'s, [u8]>> where F: FnOnce() -> &'s mut Vec<u8>; fn num_qual_lines(&self) -> usize; fn write<W>(&self, writer: W) -> io::Result<()> where W: io::Write; #[inline] fn id_bytes(&self) -> &[u8] { let head = self.head(); if let Some(pos) = memchr(b' ', head) { return &head[..pos]; } head } #[inline] fn id(&self) -> Result<&str, str::Utf8Error> { str::from_utf8(self.id_bytes()) } #[inline] fn desc_bytes(&self) -> Option<&[u8]> { let head = self.head(); if let Some(pos) = memchr(b' ', head) { return Some(&head[pos + 1..]); } None } #[inline] fn desc(&self) -> Option<Result<&str, str::Utf8Error>> { self.desc_bytes().map(str::from_utf8) } #[inline] fn id_desc_bytes(&self) -> (&[u8], Option<&[u8]>) { let head = self.head(); if let Some(pos) = memchr(b' ', head) { return (&head[..pos], Some(&head[pos + 1..])); } (head, None) } #[inline] fn id_desc(&self) -> Result<(&str, Option<&str>), str::Utf8Error> { let (id, desc) = self.id_desc_bytes(); Ok((str::from_utf8(id)?, desc.map(str::from_utf8).transpose()?)) } } impl<'a, R> BaseRecord for &'a R where R: BaseRecord, { fn head(&self) -> &[u8] { (**self).head() } fn seq(&self) -> &[u8] { (**self).seq() } fn full_seq(&self) -> Cow<[u8]> { (**self).full_seq() } fn full_seq_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]> where F: FnOnce() -> &'s mut Vec<u8>, { (**self).full_seq_given(owned_fn) } fn num_seq_lines(&self) -> usize { (**self).num_seq_lines() } fn id_bytes(&self) -> &[u8] { (**self).id_bytes() } fn id(&self) -> Result<&str, std::str::Utf8Error> { (**self).id() } fn desc_bytes(&self) -> Option<&[u8]> { (**self).desc_bytes() } fn desc(&self) -> Option<Result<&str, std::str::Utf8Error>> { (**self).desc() } fn id_desc_bytes(&self) -> (&[u8], Option<&[u8]>) { (**self).id_desc_bytes() } fn id_desc(&self) -> Result<(&str, Option<&str>), std::str::Utf8Error> { (**self).id_desc() } fn has_quality(&self) -> bool { (**self).has_quality() } fn opt_qual(&self) -> Option<&[u8]> { (**self).opt_qual() } fn opt_full_qual(&self) -> Option<Cow<[u8]>> { (**self).opt_full_qual() }
fn num_qual_lines(&self) -> usize { (**self).num_qual_lines() } fn write<W: io::Write>(&self, writer: W) -> io::Result<()> { (**self).write(writer) } }
fn opt_full_qual_given<'s, F: FnOnce() -> &'s mut Vec<u8>>( &'s self, owned_fn: F, ) -> Option<Cow<'s, [u8]>> { (**self).opt_full_qual_given(owned_fn) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn write_wrap<W, H>(mut writer: W, head: H, seq: &[u8], wrap: usize) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n{\n\n write_head(&mut writer, head)?;\n\n write_wrap_seq(writer, seq, wrap)\n\n}\n\n\n\n/// Writes data to the FASTA format. Wraps the sequence to produce multi-line FASTA\n\n/// with a maximum width specified by the `wrap` parameter. Accepts a sequence\n\n/// iterator.\n", "file_path": "src/fasta/write.rs", "rank": 0, "score": 271949.6867511525 }, { "content": "#[inline]\n\npub fn write_wrap_seq<W>(mut writer: W, seq: &[u8], wrap: usize) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n{\n\n assert!(wrap > 0);\n\n for chunk in seq.chunks(wrap) {\n\n writer.write_all(chunk)?;\n\n writer.write_all(b\"\\n\")?;\n\n }\n\n Ok(())\n\n}\n\n\n\n/// Writes the sequence line from an iterator of lines.\n", "file_path": "src/fasta/write.rs", "rank": 1, "score": 256482.66336053016 }, { "content": "#[inline]\n\npub fn write<W, H>(writer: W, head: H, seq: &[u8], qual: &[u8]) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n{\n\n write_iter(writer, head, Some(seq), Some(qual))\n\n}\n\n\n\n/// Helper function for writing data (not necessarily stored in a `Record` instance)\n\n/// to the FASTQ format. In contrast to [`write`](write()), this\n\n/// function allows specifying sequence and quality iterators.\n", "file_path": "src/fastq/write.rs", "rank": 2, "score": 244185.29592646527 }, { "content": "#[inline]\n\npub fn write<W, H>(writer: W, head: H, seq: &[u8]) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n{\n\n write_iter(writer, head, Some(seq))\n\n}\n\n\n\n/// Helper function for writing data (not necessarily stored in a `Record` instance)\n\n/// to the FASTA format. In contrast to [`write`](write()), this\n\n/// function accepts a sequence iterator.\n", "file_path": "src/fasta/write.rs", "rank": 3, "score": 241483.09842729807 }, { "content": "/// This function reads record sets and processes them in parallel threads.\n\n///\n\n/// * It takes a [`RecordSetReader`](RecordSetReader), which reads data into\n\n/// record sets in a background thread.\n\n/// * These are then sent to `n_workers` worker threads, where the heavy work\n\n/// is done in the `work` closure.\n\n/// * Once ready, the record sets and work results are sent to the main thread\n\n/// and provided to the `func` closure. The won't necessarily arrive in the\n\n/// same order as they were read.\n\npub fn read_process_recordsets<R, W, F, O, Out>(\n\n reader: R,\n\n n_workers: u32,\n\n queue_len: usize,\n\n work: W,\n\n func: F,\n\n) -> Out\n\nwhere\n\n R: RecordSetReader + Send,\n\n R::RecordSet: Default + Send,\n\n O: Default + Send,\n\n W: Send + Sync + Fn(&mut R::RecordSet, &mut O),\n\n F: FnOnce(ParallelDataSets<R::RecordSet, R::Err, O>) -> Out,\n\n{\n\n read_process_recordsets_init(|| Ok::<_, ()>(reader), n_workers, queue_len, work, func).unwrap()\n\n}\n\n\n", "file_path": "src/parallel.rs", "rank": 4, "score": 226318.36745128353 }, { "content": "#[inline]\n\npub fn write_wrap_iter<'a, W, H, S>(mut writer: W, head: H, seq: S, wrap: usize) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n S: IntoIterator<Item = &'a [u8]>,\n\n{\n\n write_head(&mut writer, head)?;\n\n write_wrap_seq_iter(writer, seq, wrap)\n\n}\n\n\n\n/// Writes only the sequence line.\n", "file_path": "src/fasta/write.rs", "rank": 5, "score": 225245.56723727885 }, { "content": "#[inline]\n\npub fn write_head<W, H>(writer: W, head: H) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n{\n\n head.write_head(writer, b'>')\n\n}\n\n\n\n/// Writes only the sequence line.\n", "file_path": "src/fasta/write.rs", "rank": 6, "score": 225016.82022212044 }, { "content": "#[inline]\n\npub fn write_seq<W>(writer: W, seq: &[u8]) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n{\n\n write_seq_iter(writer, Some(seq))\n\n}\n\n\n\n/// Writes the sequence line, and wraps the output to a maximum width specified by `wrap`.\n", "file_path": "src/fasta/write.rs", "rank": 7, "score": 221779.96376841364 }, { "content": "#[inline]\n\npub fn write_iter<W, H, S, L>(mut writer: W, head: H, seq: S) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n S: IntoIterator<Item = L>,\n\n L: Borrow<[u8]>,\n\n{\n\n write_head(&mut writer, head)?;\n\n write_seq_iter(writer, seq)\n\n}\n\n\n\n/// Writes data to the FASTA format. Wraps the sequence to produce multi-line FASTA\n\n/// with a maximum width specified by the `wrap` parameter.\n", "file_path": "src/fasta/write.rs", "rank": 8, "score": 215946.50869631552 }, { "content": "/// Like [`read_process_recordsets`](read_process_recordsets), but additionally\n\n/// allows initiating the reader in the background thread using a closure\n\n/// (`reader_init`).\n\n/// This is useful for readers, which don't implement `Send`.\n\n/// The `reader_init` closure has to return a result. Errors are returned from\n\n/// the main function witout being mixed with reading errors. This may lead to\n\n/// nested `Result` being returned if the `func` closure returns `Result`.\n\npub fn read_process_recordsets_init<R, Ri, Ei, W, F, O, Out>(\n\n reader_init: Ri,\n\n n_workers: u32,\n\n queue_len: usize,\n\n work: W,\n\n func: F,\n\n) -> Result<Out, Ei>\n\nwhere\n\n R: RecordSetReader,\n\n Ri: Send + FnOnce() -> Result<R, Ei>,\n\n R::RecordSet: Default + Send,\n\n O: Default + Send,\n\n W: Send + Sync + Fn(&mut R::RecordSet, &mut O),\n\n F: FnOnce(ParallelDataSets<R::RecordSet, R::Err, O>) -> Out,\n\n Ei: Send,\n\n{\n\n let (done_send, done_recv) = mpsc::sync_channel(queue_len);\n\n let (empty_send, empty_recv) = mpsc::sync_channel(queue_len);\n\n\n\n crossbeam::scope(|scope| {\n", "file_path": "src/parallel.rs", "rank": 9, "score": 203906.16963043966 }, { "content": "/// Using this function currently does not work due to a\n\n/// [compiler bug](https://github.com/rust-lang/rust/issues/62529).\n\n///\n\n/// [`read_process_fasta_records`](read_process_fasta_records),\n\n/// [`read_process_fastq_records`](read_process_fastq_records) and\n\n/// [`read_process_fastx_records`](read_process_fastx_records)\n\n/// provide the same functionality for now\n\n/// (implemented using [`parallel_record_impl`](parallel_record_impl) macro).\n\npub fn read_process_records_init<R, Ri, W, F, O, Out, E>(\n\n reader_init: Ri,\n\n n_workers: u32,\n\n queue_len: usize,\n\n work: W,\n\n mut func: F,\n\n) -> Result<Option<Out>, E>\n\nwhere\n\n R: RecordSetReader,\n\n Ri: Send + FnOnce() -> Result<R, E>,\n\n R::RecordSet: Default + Send,\n\n for<'a> &'a R::RecordSet: IntoIterator + Send,\n\n O: Default + Send,\n\n W: Send + Sync + Fn(<&R::RecordSet as IntoIterator>::Item, &mut O),\n\n F: FnMut(<&R::RecordSet as IntoIterator>::Item, &mut O) -> Option<Out>,\n\n E: From<<R as RecordSetReader>::Err> + Send,\n\n{\n\n read_process_recordsets_init(\n\n reader_init,\n\n n_workers,\n", "file_path": "src/parallel.rs", "rank": 10, "score": 203900.9765169961 }, { "content": "#[inline]\n\npub fn write_wrap_seq_iter<'a, W, S, L>(mut writer: W, seq: S, wrap: usize) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n S: IntoIterator<Item = L>,\n\n L: Borrow<[u8]>,\n\n{\n\n assert!(wrap > 0);\n\n let mut n_line = 0;\n\n for line in seq {\n\n let mut chunk = line.borrow();\n\n loop {\n\n let remaining = wrap - n_line;\n\n if chunk.len() <= remaining {\n\n writer.write_all(chunk)?;\n\n n_line += chunk.len();\n\n break;\n\n }\n\n // chunk longer than line -> break\n\n let (line, rest) = chunk.split_at(remaining);\n\n chunk = rest;\n\n // println!(\"write {:?}\", line);\n\n writer.write_all(line)?;\n\n writer.write_all(b\"\\n\")?;\n\n n_line = 0;\n\n }\n\n }\n\n writer.write_all(b\"\\n\")\n\n}\n", "file_path": "src/fasta/write.rs", "rank": 11, "score": 201103.7598259808 }, { "content": "#[inline]\n\npub fn write_seq_iter<'a, W, S, L>(mut writer: W, seq: S) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n S: IntoIterator<Item = L>,\n\n L: Borrow<[u8]>,\n\n{\n\n for line in seq {\n\n writer.write_all(line.borrow())?;\n\n }\n\n writer.write_all(b\"\\n\")\n\n}\n\n\n\n/// Writes the sequence line from an iterator (such as `SeqLines`) and wraps the output\n\n/// to a maximum width specified by `wrap`.\n", "file_path": "src/fasta/write.rs", "rank": 12, "score": 193473.5784646408 }, { "content": "#[inline]\n\npub fn write_iter<W, H, S, Ls, Q, Lq>(mut writer: W, head: H, seq: S, qual: Q) -> io::Result<()>\n\nwhere\n\n W: io::Write,\n\n H: HeadWriter,\n\n S: IntoIterator<Item = Ls>,\n\n Ls: Borrow<[u8]>,\n\n Q: IntoIterator<Item = Lq>,\n\n Lq: Borrow<[u8]>,\n\n{\n\n head.write_head(&mut writer, b'@')?;\n\n for line in seq {\n\n writer.write_all(line.borrow())?;\n\n }\n\n writer.write_all(b\"\\n+\\n\")?;\n\n for line in qual {\n\n writer.write_all(line.borrow())?;\n\n }\n\n writer.write_all(b\"\\n\")\n\n}\n", "file_path": "src/fastq/write.rs", "rank": 13, "score": 193338.06800802838 }, { "content": "/// generates 'nrecords' FASTQ records with fixed ID / description lengths (20 and 50), but configurable otherwise\n\nfn with_seqlen(nrecords: usize, seq_len: usize, sep_ids: bool, cr: bool) -> Vec<u8> {\n\n gen_fastq(nrecords, 20, 50, seq_len, sep_ids, cr)\n\n}\n\n\n\n// data to be used with parallel readers that require 'static\n\nlazy_static! {\n\n static ref L300: Vec<u8> = with_seqlen(N, 300, false, false);\n\n}\n\n\n\nmacro_rules! bench {\n\n ($c:expr, $name:expr, $data:ident, $code:block) => {\n\n let name = format!(\"fastq {}\", $name);\n\n let id = BenchmarkId::new(name, 0);\n\n $c.bench_with_input(id, $data, move |b, $data| b.iter(|| $code));\n\n };\n\n}\n\n\n\nmacro_rules! fastq {\n\n ($c:expr, $name:expr, $data:ident, $rec:ident, $code:block) => {\n\n bench!($c, $name, $data, {\n", "file_path": "benches/fastq.rs", "rank": 14, "score": 192092.5498523073 }, { "content": "pub fn compare_simple<X, R, P, S>(mut reader: X, mut simple: Reader<R>, match_format: bool)\n\nwhere\n\n R: std::io::Read,\n\n X: seq_io::fastx::dynamic::FastxReader<R, P, S>,\n\n P: seq_io::policy::BufPolicy,\n\n S: seq_io::PositionStore,\n\n{\n\n let mut buf1 = vec![];\n\n let mut buf2 = vec![];\n\n let mut match_format = match_format;\n\n while let Some(result) = reader.next_fastx() {\n\n // println!(\"seq_io: {:?}\", result);\n\n let simple_next = simple.next();\n\n // println!(\"simple: {:?}\", simple_next);\n\n if match_format && simple_next.is_none() {\n\n continue;\n\n }\n\n let simple_result = simple_next.expect(\"Simple reader has no next record\");\n\n if match_format && (simple_result.is_err() || result.is_err()) {\n\n continue;\n", "file_path": "fuzz/fuzz_targets/simple_reader.rs", "rank": 15, "score": 191574.71027477185 }, { "content": "/// Helper trait used to allow supplying either the whole head or separate ID\n\n/// and description parts to the `write_...()` functions in the `fasta` and\n\n/// `fastq` modules.\n\npub trait HeadWriter {\n\n /// Writes the header line to output.\n\n fn write_head<W>(&self, writer: W, start_byte: u8) -> io::Result<()>\n\n where\n\n W: io::Write;\n\n}\n\n\n\nimpl<'a> HeadWriter for &'a [u8] {\n\n fn write_head<W>(&self, mut writer: W, start_byte: u8) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n write!(writer, \"{}\", start_byte as char)?;\n\n writer.write_all(self)?;\n\n writer.write_all(b\"\\n\")\n\n }\n\n}\n\n\n\nmacro_rules! impl_write_head {\n\n ($t:ty) => {\n", "file_path": "src/helpers.rs", "rank": 16, "score": 187936.6371309554 }, { "content": "fn reader_bench(c: &mut Criterion, name: &str, data: &[u8], single_line: bool) {\n\n let mut group = c.benchmark_group(name);\n\n group.throughput(Throughput::Bytes(data.len() as u64));\n\n\n\n // simple parsing\n\n fasta!(group, \"seqio borrow\", data, r, {\n\n black_box(r);\n\n });\n\n fastx!(group, \"seqio_fastx borrow\", data, r, {\n\n black_box(r);\n\n });\n\n fastx_dynamic!(group, \"seqio_fastx_dynamic borrow\", data, r, {\n\n black_box(r);\n\n });\n\n\n\n if single_line {\n\n fasta_single!(\n\n group,\n\n \"seqio_single_linestore borrow\",\n\n data,\n", "file_path": "benches/fasta.rs", "rank": 17, "score": 160080.1072371492 }, { "content": "/// Recognizes the sequence format of the input and returns an appropriate FASTA\n\n/// or FASTQ reader as [`Box<dyn FastxReader<...>`](FastxReader) or `None` if\n\n/// the input has only empty lines.\n\n///\n\n/// `reader` is a convenience function for the most frequent case. If options\n\n/// need to be changed, use [`ReaderBuilder`](ReaderBuilder). The following two\n\n/// calls are equivalent:\n\n///\n\n/// ```rust ignore\n\n/// // using the reader() function:\n\n/// let reader = seq_io::fastx::dynamic::reader(rdr, multiline_fastq).unwrap();\n\n///\n\n/// // using the builder API:\n\n/// let reader = ReaderBuilder::new()\n\n/// .set_multiline_fastq(multiline_fastq)\n\n/// .from_reader(rdr)\n\n/// .unwrap();\n\n/// ```\n\n///\n\n/// If the first non-empty line starts with `>`, a\n\n/// [`fasta::Reader`](crate::fasta::Reader) is returned. If it starts with\n\n/// `@`, [`fastq::Reader`](crate::fastq::Reader) is returned if\n\n/// `multiline_fastq` is `false`, otherwise\n\n/// [`fastq::multiline::Reader`](crate::fastq::multiline::Reader) is returned,\n\n/// If the first non-empty line contains an invalid start byte, an error with\n\n/// `ErrorKind::InvalidStart` will be returned.\n\npub fn reader<'s, R>(\n\n reader: R,\n\n multiline_fastq: bool,\n\n) -> Result<Option<Box<dyn FastxReader<R, StdPolicy, LineStore> + 's>>>\n\nwhere\n\n R: io::Read + 's,\n\n{\n\n ReaderBuilder::new()\n\n .multiline_fastq(multiline_fastq)\n\n .from_reader(reader)\n\n}\n\n\n", "file_path": "src/fastx/dynamic.rs", "rank": 18, "score": 158253.77980144133 }, { "content": "pub fn compare_recset<X, Y, R, P, S>(mut reader1: X, mut reader2: Y)\n\nwhere\n\n R: std::io::Read,\n\n X: seq_io::fastx::dynamic::FastxReader<R, P, S>,\n\n Y: seq_io::fastx::dynamic::FastxReader<R, P, S>,\n\n P: seq_io::policy::BufPolicy,\n\n S: seq_io::PositionStore,\n\n{\n\n let mut buf1 = vec![];\n\n let mut buf2 = vec![];\n\n let mut recset = fastx::dynamic::RecordSet::default();\n\n loop {\n\n let result1 = reader1.read_record_set_fastx(&mut recset);\n\n // compare complete records\n\n //println!(\"recset {:?}\", recset);\n\n for rec1 in &recset {\n\n //println!(\"... -> rec {:?}\", rec1);\n\n let result2 = reader2.next_fastx().expect(\"Reader 2 has no next record\");\n\n //println!(\"... -> direcot result {:?}\", result2);\n\n let rec2 = result2.unwrap_or_else(|e| panic!(\"Reader 2 returned error: {:?}\", e));\n", "file_path": "fuzz/fuzz_targets/simple_reader.rs", "rank": 19, "score": 152685.14231197178 }, { "content": "#[inline]\n\npub fn recognize_format<R, P>(\n\n reader: &mut BufReader<R, P>,\n\n) -> Result<Option<(SeqFormat, (usize, u64))>>\n\nwhere\n\n R: Read,\n\n P: BufPolicy,\n\n{\n\n assert!(reader.capacity() >= 2);\n\n let mut line_offset = 0;\n\n let mut byte_offset = 0;\n\n let ret = 'outer: loop {\n\n if reader.fill_buf()? == 0 {\n\n reader.make_room(byte_offset);\n\n // println!(\"make room {}\", byte_offset);\n\n byte_offset = 0;\n\n if reader.fill_buf()? == 0 {\n\n let buf = reader.buffer();\n\n byte_offset += buf.len();\n\n debug_assert!(buf.len() <= 1);\n\n let last = buf.get(0).cloned();\n", "file_path": "src/fastx/recognition.rs", "rank": 20, "score": 151032.6092924311 }, { "content": "pub fn evaluate(data: &[u8]) {\n\n // FASTQ\n\n // normal\n\n let reader = fastq::Reader::with_capacity(data, 3);\n\n let simple_reader = simple_reader::Reader::new_fastq(data, false);\n\n compare_simple(reader, simple_reader, false);\n\n compare_readers(\n\n fastq::Reader::with_capacity(data, 3).set_store::<fastq::RangeStore>(),\n\n fastq::Reader::with_capacity(data, 3).set_store::<fastx::LineStore>(),\n\n );\n\n compare_readers(\n\n fastq::Reader::with_capacity(data, 3).set_store::<fastq::multiline::MultiRangeStore>(),\n\n fastq::Reader::with_capacity(data, 3).set_store::<fastx::LineStore>(),\n\n );\n\n compare_recset(\n\n fastq::Reader::with_capacity(data, 3), \n\n fastq::Reader::with_capacity(data, 3)\n\n );\n\n\n\n // multi-line\n", "file_path": "fuzz/fuzz_targets/_fastq.rs", "rank": 21, "score": 150906.19857563343 }, { "content": "pub fn evaluate(data: &[u8]) {\n\n // FASTA\n\n // normal\n\n let reader = fasta::Reader::with_capacity(data, 3);\n\n let simple_rdr = simple_reader::Reader::new_fasta(data, true);\n\n compare_simple(reader, simple_rdr, false);\n\n compare_readers(\n\n fasta::Reader::with_capacity(data, 3).set_store::<fasta::LineStore>(),\n\n fasta::Reader::with_capacity(data, 3).set_store::<fastx::LineStore>(),\n\n );\n\n compare_readers(\n\n // TODO: line numbers not correct\n\n fasta::Reader::with_capacity(data, 3).set_store::<fasta::LineStore>(),\n\n fasta::Reader::with_capacity(data, 3).set_store::<fastq::RangeStore>(),\n\n );\n\n compare_readers(\n\n fasta::Reader::with_capacity(data, 3).set_store::<fasta::LineStore>(),\n\n fasta::Reader::with_capacity(data, 3).set_store::<fastq::multiline::MultiRangeStore>(),\n\n );\n\n compare_recset(\n", "file_path": "fuzz/fuzz_targets/_fasta.rs", "rank": 22, "score": 150906.19857563343 }, { "content": "/// generates 'nrecords' FASTA with fixed ID / description lengths (20 and 50), but configurable otherwise\n\nfn with_seqlen(nrecords: usize, seq_len: usize, break_seq: Option<usize>, cr: bool) -> Vec<u8> {\n\n gen_fasta(nrecords, 20, 50, seq_len, break_seq, cr)\n\n}\n\n\n\nmacro_rules! bench {\n\n ($c:expr, $name:expr, $data:ident, $code:block) => {\n\n let name = format!(\"fasta {}\", $name);\n\n let id = BenchmarkId::new(name, 0);\n\n $c.bench_with_input(id, $data, move |b, $data| b.iter(|| $code));\n\n };\n\n}\n\n\n\nmacro_rules! fasta {\n\n ($c:expr, $name:expr, $data:ident, $rec:ident, $code:block) => {\n\n bench!($c, $name, $data, {\n\n let mut reader = fasta::Reader::new($data);\n\n while let Some(r) = reader.next() {\n\n let $rec = r.unwrap();\n\n $code\n\n }\n", "file_path": "benches/fasta.rs", "rank": 23, "score": 150303.98689182315 }, { "content": "/// Trait for FASTX reading.\n\n///\n\n/// Provides the same methods as individual `Reader` types, but in contrast of\n\n/// returning / operating on different `RefRecord` / `RecordSet` /\n\n/// `RecordsIter` ... types, types from the `fastx` module are used, allowing\n\n/// for FASTX functionality with dynamic dispatch.\n\npub trait FastxReader<R, P, S>\n\nwhere\n\n R: io::Read,\n\n P: BufPolicy,\n\n S: QualRecordPosition,\n\n{\n\n /// Returns the next [`fastx::RefRecord`](crate::fastx::RefRecord), if any.\n\n fn next_fastx(&mut self) -> Option<Result<RefRecord<S>>>;\n\n\n\n /// Updates a [`fastx::RecordSet`](crate::fastx::RecordSet) with new data.\n\n fn read_record_set_fastx(&mut self, record_set: &mut RecordSet<S>) -> Result<bool>;\n\n\n\n fn read_record_set_exact_fastx(\n\n &mut self,\n\n record_set: &mut crate::fastx::RecordSet<S>,\n\n n_records: usize,\n\n ) -> crate::fastx::Result<bool>;\n\n\n\n /// Returns the sequence format (`SeqFormat::FASTA` or `SeqFormat::FASTQ`)\n\n /// if known. For FASTA / FASTQ readers, the format is known in the\n", "file_path": "src/fastx/dynamic.rs", "rank": 24, "score": 147501.45088812418 }, { "content": "pub fn compare_readers<X, Y, R, P1, P2, S1, S2>(mut reader1: X, mut reader2: Y)\n\nwhere\n\n R: std::io::Read,\n\n X: seq_io::fastx::dynamic::FastxReader<R, P1, S1>,\n\n Y: seq_io::fastx::dynamic::FastxReader<R, P2, S2>,\n\n P1: seq_io::policy::BufPolicy,\n\n P2: seq_io::policy::BufPolicy,\n\n S1: seq_io::PositionStore,\n\n S2: seq_io::PositionStore,\n\n{\n\n let mut buf1 = vec![];\n\n let mut buf2 = vec![];\n\n while let Some(result1) = reader1.next_fastx() {\n\n let result2 = reader2.next_fastx().expect(\"Reader 2 has no next record\");\n\n //println!(\"rdr 1: {:?}\", result1);\n\n //println!(\"rdr 2: {:?}\", result2);\n\n match result1 {\n\n Ok(rec1) => {\n\n let rec2 = result2.unwrap_or_else(|e| panic!(\"Reader 2 returned error: {:?}\", e));\n\n compare_records(rec1, rec2, &mut buf1, &mut buf2);\n", "file_path": "fuzz/fuzz_targets/simple_reader.rs", "rank": 25, "score": 144061.09716798767 }, { "content": "fn compare_records<R1, R2>(rec1: R1, rec2: R2, mut buf1: &mut Vec<u8>, mut buf2: &mut Vec<u8>)\n\nwhere\n\n R1: BaseRecord + Debug,\n\n R2: BaseRecord + Debug,\n\n{\n\n //println!(\"rec 1: {:?}\", rec1);\n\n //println!(\"rec 2: {:?}\", rec2);\n\n assert_eq!(rec1.head(), rec2.head());\n\n buf1.clear();\n\n buf2.clear();\n\n let seq1 = rec1.full_seq_given(|| &mut buf1);\n\n let seq2 = rec1.full_seq_given(|| &mut buf2);\n\n assert_eq!(seq1, seq2);\n\n buf1.clear();\n\n buf2.clear();\n\n let q1 = rec1.opt_full_qual_given(|| &mut buf1);\n\n let q2 = rec1.opt_full_qual_given(|| &mut buf2);\n\n assert_eq!(q1, q2);\n\n}\n\n\n", "file_path": "fuzz/fuzz_targets/simple_reader.rs", "rank": 27, "score": 141600.79288995508 }, { "content": "fn bench_readers(c: &mut Criterion, data: &'static [u8]) {\n\n let mut group = c.benchmark_group(\"fastq\");\n\n group.throughput(Throughput::Bytes(data.len() as u64));\n\n\n\n // warm up (first measurement seems not always stable)\n\n fastq!(group, \"seqio discard\", data, r, {\n\n black_box(r);\n\n });\n\n fastq_multi!(group, \"seqio_multi discard\", data, r, {\n\n black_box(r);\n\n });\n\n\n\n // simple parsing\n\n fastq!(group, \"seqio borrow\", data, r, {\n\n black_box(r);\n\n });\n\n fastq_multi!(group, \"seqio_multi borrow\", data, r, {\n\n black_box(r);\n\n });\n\n fastx!(group, \"seqio_fastx borrow\", data, r, {\n", "file_path": "benches/fastq.rs", "rank": 28, "score": 137604.58692252394 }, { "content": "pub trait FastxSeekReader<R, P, S>: FastxReader<R, P, S>\n\nwhere\n\n R: io::Read + io::Seek,\n\n P: BufPolicy,\n\n S: QualRecordPosition,\n\n{\n\n /// Seeks to a specified position. Equivalent to the `seek()` method of the\n\n /// individual readers.\n\n fn seek_fastx(&mut self, pos: &crate::Position) -> io::Result<()>;\n\n}\n\n\n\n/// Borrowed iterator of `OwnedRecord`\n\npub struct RecordsIter<'a, R, P, S>\n\nwhere\n\n P: crate::policy::BufPolicy + 'a,\n\n R: std::io::Read + 'a,\n\n S: QualRecordPosition + 'a,\n\n{\n\n rdr: &'a mut dyn FastxReader<R, P, S>,\n\n}\n", "file_path": "src/fastx/dynamic.rs", "rank": 29, "score": 137428.51942045987 }, { "content": "/// FASTX record trait implemented by both `RefRecord` and `OwnedRecord`\n\n/// which adds more methods to [`BaseRecord`](crate::BaseRecord).\n\npub trait Record: BaseRecord {\n\n /// Write the record to the given `io::Write` instance. For FASTA,\n\n /// the sequence is wrapped to produce multi-line FASTA with a\n\n /// maximum width specified by `wrap`.\n\n /// Wrapping FASTQ sequence and quality lines is explicitly not supported\n\n /// because such files are prone to parsing problems.\n\n fn write_wrap<W>(&self, writer: W, wrap: usize) -> io::Result<()>\n\n where\n\n W: io::Write;\n\n\n\n /// Writes the record to an output given a sequence format.\n\n ///\n\n /// FASTA lines can be wrapped by specifying `wrap_fasta = Some(width)`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if `SeqFormat::FASTQ` was specified, but there is no quality\n\n /// information available becase the input is FASTA.\n\n fn write_as<W>(\n\n &self,\n", "file_path": "src/fastx/record.rs", "rank": 30, "score": 131926.11606514562 }, { "content": "/// FASTQ record trait implemented by both `RefRecord` and `OwnedRecord`\n\n/// which adds more methods to [`BaseRecord`](crate::BaseRecord).\n\npub trait Record: BaseRecord {\n\n /// Return the FASTQ quality line as byte slice\n\n fn qual(&self) -> &[u8];\n\n\n\n fn full_qual(&self) -> Cow<[u8]>;\n\n\n\n fn full_qual_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n Self: Sized;\n\n\n\n doc_record_check_lengths!(\n\n \"use seq_io::fastq::Reader;\",\n\n fn check_lengths(&self) -> Result<&Self, Error>;\n\n );\n\n}\n\n\n\n// TODO: necessary?\n\n\n\n// impl<'a, R: Record> Record for &'a R {\n", "file_path": "src/fastq/record.rs", "rank": 31, "score": 131926.11606514562 }, { "content": "/// FASTA record trait implemented by both `RefRecord` and `OwnedRecord`,\n\n/// which adds more methods to [`BaseRecord`](crate::BaseRecord).\n\npub trait Record: BaseRecord {\n\n /// Writes the record to the given `io::Write` instance.\n\n /// The sequence is wrapped to produce multi-line FASTA with a maximum width\n\n /// specified by `wrap`.\n\n fn write_wrap<W>(&self, writer: W, wrap: usize) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n Self: Sized;\n\n}\n\n\n\nimpl<'a, R: Record> Record for &'a R {\n\n fn write_wrap<W>(&self, writer: W, wrap: usize) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n Self: Sized,\n\n {\n\n (**self).write_wrap(writer, wrap)\n\n }\n\n}\n\n\n", "file_path": "src/fasta/record.rs", "rank": 32, "score": 131926.11606514562 }, { "content": "/// A simple trait required to be implemented for readers fed into the\n\n/// functions in this module.\n\npub trait RecordSetReader {\n\n type RecordSet: Send;\n\n type Err: Send;\n\n fn fill_data(&mut self, record: &mut Self::RecordSet) -> Result<bool, Self::Err>;\n\n}\n\n\n", "file_path": "src/parallel.rs", "rank": 33, "score": 115272.83582025369 }, { "content": "/// Trait for type constructors of line iterators.\n\n///\n\n/// The iterator types cannot be directly specified as associated types of\n\n/// `SeqRecordPosition` / `QaulRecordPosition` because their lifetime would\n\n/// pollute the trait signature.\n\n/// Therefore, this intermediate type is used to generate the final type. For\n\n/// more information see this article on the\n\n/// [\"family trait pattern\"](http://lukaskalbertodt.github.io/2018/08/03/solving-the-generalized-streaming-iterator-problem-without-gats.html#workaround-b-hrtbs--the-family-trait-pattern).\n\npub trait LinesIterKind<'a> {\n\n type Out: Iterator<Item = &'a [u8]> + DoubleEndedIterator;\n\n}\n\n\n\n/// Type constructor for [`LineSearchIter`](LineSearchIter).\n\npub struct LineSearchKind;\n\n\n\nimpl<'a> LinesIterKind<'a> for LineSearchKind {\n\n type Out = LineSearchIter<'a>;\n\n}\n\n\n\n/// Type constructor for [`LinePositionIter`](LinePositionIter).\n\npub struct LinePositionIterKind;\n\n\n\nimpl<'a> LinesIterKind<'a> for LinePositionIterKind {\n\n type Out = LinePositionIter<'a>;\n\n}\n\n\n", "file_path": "src/core/position.rs", "rank": 34, "score": 108677.50660998264 }, { "content": "fn readers(c: &mut Criterion) {\n\n bench_readers(c, &L300);\n\n}\n\n\n", "file_path": "benches/fastq.rs", "rank": 35, "score": 107336.88896876585 }, { "content": "fn readers(c: &mut Criterion) {\n\n let data = with_seqlen(N, 300, None, false);\n\n reader_bench(c, \"fasta\", &data, true);\n\n\n\n let data = with_seqlen(N, 300, Some(80), false);\n\n reader_bench(c, \"fasta_multiline\", &data, false);\n\n}\n\n\n", "file_path": "benches/fasta.rs", "rank": 36, "score": 107336.88896876585 }, { "content": "#[inline]\n\nfn init_record<Q: QualRecordPosition>(store: &mut Q, start: usize) {\n\n store.init();\n\n store.set_record_start(start);\n\n}\n\n\n\npub(crate) struct CoreReader<R, P, Q, S>\n\nwhere\n\n R: io::Read,\n\n P: BufPolicy,\n\n Q: QualRecordPosition + Deref<Target = S>,\n\n{\n\n buf_reader: crate::core::BufReader<R, P>,\n\n // Position of current record within current buffer\n\n pos_store: Q,\n\n // only used for multi-line FASTQ\n\n length_diff: isize,\n\n // Current search position within the record (only relevant with read_record_set)\n\n search_pos: Option<SearchPosition>,\n\n // Parsing state\n\n state: State,\n", "file_path": "src/core/inner.rs", "rank": 37, "score": 105482.82884639659 }, { "content": "/// Policy that configures how the internal buffer grows upon\n\n/// encountering large sequences that don't fit into the current buffer.\n\npub trait BufPolicy: Send + Sync {\n\n /// Takes the current buffer size in bytes and returns the new\n\n /// size the the buffer should grow to. This function is called every time\n\n /// the buffer has to be enlarged.\n\n fn grow(&mut self, current_size: usize) -> usize;\n\n\n\n /// Returns a buffer limit, if any. Called every time the buffer has to be\n\n /// enlarged. If the new buffer size (as calculated based on the call to\n\n /// `grow()`) exceeds the given limit, the readers will return an error\n\n /// of `ErrorKind::BufferLimit`.\n\n fn limit(&self) -> Option<usize> {\n\n None\n\n }\n\n\n\n /// Combines `grow()` and `limit()` into one call. Takes the current buffer\n\n /// size and returns the new size, unless it is larger than the limit.\n\n fn grow_limited(&mut self, current_size: usize) -> Option<usize> {\n\n let new_size = self.grow(current_size);\n\n if let Some(l) = self.limit() {\n\n if new_size > l {\n", "file_path": "src/policy.rs", "rank": 38, "score": 105214.17719909799 }, { "content": "fn readers_cap(c: &mut Criterion) {\n\n let k = 1024;\n\n let caps = [8 * k, 16 * k, 32 * k, 64 * k, 128 * k, 256 * k];\n\n let lengths = [100, 250, 500, 1000, 10000];\n\n\n\n for &seqlen in &lengths {\n\n let n = N * 250 / seqlen;\n\n let input = with_seqlen(n, seqlen, None, false);\n\n let input = input.as_slice();\n\n let mut group = c.benchmark_group(format!(\"fasta_cap_{}\", seqlen));\n\n group.throughput(Throughput::Bytes(input.len() as u64));\n\n\n\n for &cap in &caps {\n\n let name = format!(\"{} {}k\", seqlen, cap);\n\n bench!(group, name, input, {\n\n let mut reader = seq_io::fasta::Reader::with_capacity(input, cap);\n\n while let Some(r) = reader.next() {\n\n let _ = r.unwrap();\n\n }\n\n });\n\n }\n\n }\n\n}\n\n\n\ncriterion_group!(benches, readers, readers_cap);\n\ncriterion_main!(benches);\n", "file_path": "benches/fasta.rs", "rank": 39, "score": 104645.28677602095 }, { "content": "fn readers_cap(c: &mut Criterion) {\n\n let k = 1024;\n\n let caps = [8 * k, 16 * k, 32 * k, 64 * k, 128 * k, 256 * k];\n\n let lengths = [100, 250, 500, 1000, 10000];\n\n\n\n for &seqlen in &lengths {\n\n let n = N * 250 / seqlen;\n\n let input = with_seqlen(n, seqlen, false, false);\n\n let input = input.as_slice();\n\n let mut group = c.benchmark_group(format!(\"fastq_cap_{}\", seqlen));\n\n group.throughput(Throughput::Bytes(input.len() as u64));\n\n\n\n for &cap in &caps {\n\n let name = format!(\"{} {}\", seqlen, cap);\n\n bench!(group, name, input, {\n\n let mut reader = seq_io::fastq::Reader::with_capacity(input, cap);\n\n while let Some(r) = reader.next() {\n\n let _ = r.unwrap();\n\n }\n\n });\n\n }\n\n }\n\n}\n\n\n\ncriterion_group!(benches, readers, readers_cap);\n\ncriterion_main!(benches);\n", "file_path": "benches/fastq.rs", "rank": 40, "score": 104645.28677602095 }, { "content": "/// Recognizes the sequence format of the file and returns an appropriate FASTA\n\n/// or FASTQ reader as [`Box<dyn FastxSeekReader<...>`](FastxSeekReader) or `None` if\n\n/// the input has only empty lines. See [`reader`](reader) for more information\n\n/// about the behaviour.\n\npub fn from_path<'s, P>(\n\n path: P,\n\n multiline_fastq: bool,\n\n) -> Result<Option<Box<dyn FastxSeekReader<File, StdPolicy, LineStore> + 's>>>\n\nwhere\n\n P: AsRef<Path> + 's,\n\n{\n\n ReaderBuilder::new()\n\n .multiline_fastq(multiline_fastq)\n\n .from_path(path)\n\n}\n\n\n\nmacro_rules! get_reader {\n\n ($builder:ident, $reader:expr, $ReaderType:ident) => {{\n\n let multiline_fastq = $builder.multiline_fastq;\n\n let mut buf_reader =\n\n BufReader::with_capacity($reader, $builder.capacity).set_policy($builder.buf_policy);\n\n recognize_format(&mut buf_reader)?\n\n .map(|(fmt, (byte, line))| {\n\n let out: Box<dyn $ReaderType<_, _, _>> = match fmt {\n", "file_path": "src/fastx/dynamic.rs", "rank": 41, "score": 103494.67158922587 }, { "content": "/// Trait for objects storing the coordinates of sequence records in the buffer.\n\n///\n\n/// The minimal methods required for FASTA parsing have to be implemented,\n\n/// FASTQ methods are optional (but should be implemented if storing quality\n\n/// data). A position store can of course choose to ignore some data by not\n\n/// doing anything in a method call.\n\npub trait QualRecordPosition: SeqRecordPosition {\n\n type QualLinesKind: for<'a> LinesIterKind<'a>;\n\n\n\n /// Sets the start index of the FASTQ separator byte. This method is *also*\n\n /// called for FASTA records before set_record_end() with the *same*\n\n /// position. The `has_line` argument has the same meaning as there.\n\n fn set_sep_pos(&mut self, _pos: usize, _has_line: bool);\n\n\n\n /// Returns the byte index of the FASTQ record separator\n\n fn sep_pos(&self) -> usize;\n\n\n\n /// Set the start of the quality information. Only ever called on FASTQ\n\n /// records.\n\n fn set_qual_start(&mut self, _pos: usize);\n\n\n\n /// Adds another quality line start index (only multi-line FASTQ).\n\n fn add_qual_line_start(&mut self, _pos: usize);\n\n\n\n /// Returns the byte index of the FASTQ quality line start\n\n fn qual_start(&self) -> usize;\n", "file_path": "src/core/position.rs", "rank": 42, "score": 102173.83175524134 }, { "content": "/// Trait for objects storing the coordinates of sequence records in the buffer.\n\n///\n\n/// The minimal methods required for FASTA parsing have to be implemented,\n\n/// FASTQ methods are optional (but should be implemented if storing quality\n\n/// data). A position store can of course choose to ignore some data by not\n\n/// doing anything in a method call.\n\npub trait SeqRecordPosition: Debug + Clone + Default {\n\n type SeqLinesKind: for<'a> LinesIterKind<'a>;\n\n\n\n /// Initialize the record. Called before set_record_start(). This is the\n\n /// place to clear information from the previous record if necessary.\n\n #[inline]\n\n fn init(&mut self) {}\n\n\n\n /// Sets the start index of the record, always called first, may be\n\n /// called multiple times while skipping empty lines.\n\n fn set_record_start(&mut self, start: usize);\n\n\n\n /// Returns the byte index of the record start\n\n fn record_start(&self) -> usize;\n\n\n\n /// Sets the start index of the sequence.\n\n fn set_seq_start(&mut self, pos: usize);\n\n\n\n /// Adds another sequence line start index.\n\n fn add_seq_line_start(&mut self, pos: usize);\n", "file_path": "src/core/position.rs", "rank": 43, "score": 95764.37405155454 }, { "content": "#[test]\n\nfn none_after_err() {\n\n let fq = &b\"@id\\nATGC\"[..];\n\n test_reader!(fq, reader, {\n\n assert!(reader.next().unwrap().is_err());\n\n assert!(reader.next().is_none());\n\n });\n\n}\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 44, "score": 89394.60786946441 }, { "content": "#[test]\n\nfn none_after_err() {\n\n let p = seq_io::policy::DoubleUntilLimited::new(2, 3);\n\n let mut reader = make_reader!($ReaderBuilder, &b\">id\\nATGC\\n\"[..], $PositionStore, 3, p);\n\n assert!(reader.next().unwrap().is_err());\n\n assert!(reader.next().is_none());\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 45, "score": 89394.60786946441 }, { "content": "#[test]\n\nfn write_head() {\n\n let mut out = vec![];\n\n fasta::write_head(&mut out, b\"id desc\").unwrap();\n\n assert_eq!(&out, b\">id desc\\n\");\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 46, "score": 89328.17026792694 }, { "content": "/// generates 'nrecords' FASTQ records with given properties\n\nfn gen_fastq(\n\n nrecords: usize,\n\n id_len: usize,\n\n desc_len: usize,\n\n seq_len: usize,\n\n sep_ids: bool,\n\n cr: bool,\n\n) -> Vec<u8> {\n\n let newline = if cr { b\"\\r\\n\".to_vec() } else { b\"\\n\".to_vec() };\n\n let mut rec: Vec<u8> = vec![];\n\n rec.push(b'@');\n\n let id: Vec<_> = repeat(b'i').take(id_len).collect();\n\n rec.extend(&id);\n\n rec.push(b' ');\n\n rec.extend(repeat(b'd').take(desc_len));\n\n rec.extend(&newline);\n\n\n\n let norm = Normal::new(seq_len as f64, seq_len as f64 * SEQLEN_SD_FRAC).unwrap();\n\n let rng = Isaac64Rng::from_seed([5; 32]);\n\n\n", "file_path": "benches/fastq.rs", "rank": 47, "score": 58484.552599236675 }, { "content": "/// generates 'nrecords' FASTA records with given properties\n\nfn gen_fasta(\n\n nrecords: usize,\n\n id_len: usize,\n\n desc_len: usize,\n\n seq_len: usize,\n\n break_seq: Option<usize>,\n\n cr: bool,\n\n) -> Vec<u8> {\n\n let newline = if cr { b\"\\r\\n\".to_vec() } else { b\"\\n\".to_vec() };\n\n let mut rec: Vec<u8> = vec![];\n\n rec.push(b'>');\n\n rec.extend(repeat(b'i').take(id_len));\n\n rec.push(b' ');\n\n rec.extend(repeat(b'd').take(desc_len));\n\n rec.extend(&newline);\n\n\n\n let norm = Normal::new(seq_len as f64, seq_len as f64 * SEQLEN_SD_FRAC).unwrap();\n\n let rng = Isaac64Rng::from_seed([5; 32]);\n\n\n\n rng.sample_iter(&norm)\n", "file_path": "benches/fasta.rs", "rank": 48, "score": 58484.552599236675 }, { "content": "#[test]\n\n#[allow(unused_variables)]\n\nfn no_sep() {\n\n let fq = &b\"\\n@id\\nATGC\\nIII\\nI\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), ErrorKind::UnexpectedEnd { pos: _ });\n\n let pos = err.position().unwrap();\n\n let exp_pos = Position::new().set_line(1).set_byte(1).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(pos.error_offset().unwrap().line(), 3);\n\n assert_eq!(pos.error_offset().unwrap().byte(), 13);\n\n let exp_pos = Position::new().set_line(4).set_byte(14).set_record(0).clone();\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().unwrap() == \"id\");\n\n });\n\n}\n\n\n\n// same as single-line FASTQ version, but with two newlines at end\n", "file_path": "tests/fastq_common/multiline.rs", "rank": 49, "score": 57034.81936607165 }, { "content": "#[test]\n\n#[should_panic(expected = \"capacity smaller than\")]\n\nfn policy() {\n\n let fq = &b\"@id\\nATGC\\n+\\nIIII\\n\"[..];\n\n for cap in 5..80 {\n\n let policy = seq_io::policy::DoubleUntilLimited::new(2, 5);\n\n let mut reader = make_reader!($ReaderBuilder, fq, $PositionStore, cap, policy);\n\n let res = reader.next().unwrap();\n\n let err = res.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::BufferLimit);\n\n assert!(err.position().is_none());\n\n }\n\n}\n\n\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 50, "score": 57034.81936607165 }, { "content": "#[test]\n\n#[allow(unused_variables)]\n\nfn truncated4() {\n\n let fq = &b\"\\n\\n@id\\nATGC\\n+\\n\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::UnexpectedEnd { pos: _ });\n\n let pos = err.position().unwrap();\n\n let exp_pos = _Position::new().set_line(2).set_byte(2).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(pos.error_offset().unwrap().line(), 2);\n\n assert_eq!(pos.error_offset().unwrap().byte(), 10);\n\n let exp_pos = _Position::new().set_line(4).set_byte(12).set_record(0).clone();\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().unwrap() == \"id\");\n\n });\n\n}\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 51, "score": 57034.81936607165 }, { "content": "#[test]\n\n#[allow(unused_variables)] // TODO: remove?\n\nfn truncated1() {\n\n let fq = &b\"\\n@id\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::UnexpectedEnd { pos: _ });\n\n let pos = err.position().unwrap();\n\n let exp_pos = _Position::new().set_line(1).set_byte(1).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(pos.error_offset().unwrap().line(), 0);\n\n assert_eq!(pos.error_offset().unwrap().byte(), 2);\n\n let exp_pos = _Position::new().set_line(1).set_byte(3).set_record(0).clone();\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().is_none());\n\n });\n\n}\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 52, "score": 57034.81936607165 }, { "content": "fn main() {\n\n for filename in args().skip(1) {\n\n\n\n if filename.ends_with(\".fasta\") || filename.ends_with(\".fa\") {\n\n let bio_reader = bio::io::fasta::Reader::from_file(&filename).unwrap();\n\n let mut reader = seq_io::fasta::Reader::from_path(&filename).unwrap();\n\n\n\n for bio_rec in bio_reader.records() {\n\n let bio_rec = bio_rec.unwrap();\n\n let rec = reader.next().expect(&format!(\"Record {:?} not found in reader\", bio_rec.id())).unwrap();\n\n assert_eq!(bio_rec.id(), rec.id().unwrap());\n\n assert_eq!(bio_rec.desc(), rec.desc().map(|d| d.unwrap()));\n\n assert_eq!(bio_rec.seq(), rec.owned_seq().as_slice());\n\n }\n\n assert!(reader.next().is_none());\n\n\n\n } else if filename.ends_with(\".fastq\") || filename.ends_with(\".fq\") {\n\n let bio_reader = bio::io::fastq::Reader::from_file(&filename).unwrap();\n\n let mut reader = seq_io::fastq::Reader::from_path(&filename).unwrap();\n\n\n", "file_path": "compare_bio/src/main.rs", "rank": 53, "score": 57034.81936607165 }, { "content": "#[test]\n\n#[allow(unused_variables)]\n\nfn truncated3() {\n\n let fq = &b\"\\n\\n@id\\nATGC\\n+\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::UnexpectedEnd { pos: _ });\n\n let pos = err.position().unwrap();\n\n let exp_pos = _Position::new().set_line(2).set_byte(2).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(pos.error_offset().unwrap().line(), 2);\n\n assert_eq!(pos.error_offset().unwrap().byte(), 9);\n\n let exp_pos = _Position::new().set_line(4).set_byte(11).set_record(0).clone();\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().unwrap() == \"id\");\n\n });\n\n}\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 54, "score": 57034.81936607165 }, { "content": "#[test]\n\n#[allow(unused_variables)]\n\nfn truncated2() {\n\n let fq = &b\"\\n@id\\nATGC\"[..];\n\n test_reader!(fq, reader, {\n\n let res = reader.next().unwrap();\n\n let err = res.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::UnexpectedEnd { pos: _ });\n\n let pos = err.position().unwrap();\n\n let exp_pos = _Position::new().set_line(1).set_byte(1).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(pos.error_offset().unwrap().line(), 1);\n\n assert_eq!(pos.error_offset().unwrap().byte(), 7);\n\n let exp_pos = _Position::new().set_line(2).set_byte(8).set_record(0).clone();\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().unwrap() == \"id\");\n\n });\n\n}\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 55, "score": 57034.81936607165 }, { "content": "#[test]\n\nfn no_sep() {\n\n let fq = &b\"\\n@id\\nATGC\\nIIII\\n\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), ErrorKind::InvalidSep { pos: _, found: Some(b'I') });\n\n let pos = err.position().unwrap();\n\n let exp_pos = Position::new().set_line(1).set_byte(1).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(pos.error_offset().unwrap().line(), 2);\n\n assert_eq!(pos.error_offset().unwrap().byte(), 9);\n\n let exp_pos = Position::new().set_line(3).set_byte(10).set_record(0).clone();\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().unwrap() == \"id\");\n\n });\n\n}\n\n\n\n\n\n// same as single-line FASTQ version, but with two newlines at end\n", "file_path": "tests/fastq_common/standard.rs", "rank": 56, "score": 57034.81936607165 }, { "content": "#[test]\n\nfn policy() {\n\n let p = seq_io::policy::DoubleUntilLimited::new(2, 5);\n\n let mut reader = make_reader!($ReaderBuilder, &b\">id\\nAT\\nGC\\n\"[..], $PositionStore, 3, p);\n\n let err = reader.next().unwrap().unwrap_err();\n\n assert_matches!(err.kind(), $ErrorKind::BufferLimit);\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 57, "score": 57034.81936607165 }, { "content": "#[test]\n\n#[allow(unused_variables)]\n\nfn unequal() {\n\n let fq = &b\"@id\\nATGC\\n+\\nIII\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::UnequalLengths { pos: _, seq: 4, qual: 3 });\n\n let pos = err.position().unwrap();\n\n let exp_pos = _Position::new().set_line(0).set_byte(0).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.error_offset().is_none());\n\n assert!(pos.record_id().expect(\"should have an ID\") == \"id\");\n\n });\n\n}\n\n\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 58, "score": 57034.81936607165 }, { "content": "#[test]\n\nfn invalid_start() {\n\n let fq = &b\"@id1\\nA\\n+\\nI\\nid\\nATGC\\n+\\nIIII\"[..];\n\n test_reader!(fq, reader, {\n\n reader.next().unwrap().unwrap();\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::InvalidStart { pos: _, found: b'i' });\n\n let pos = err.position().unwrap();\n\n validate_position!(reader.position(), Position::new().set_line(4).set_byte(11).set_record(1));\n\n assert!(pos.error_offset().is_none());\n\n assert!(pos.record_id().is_none());\n\n });\n\n}\n\n\n\n\n", "file_path": "tests/fastq_common/standard.rs", "rank": 59, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn write_seq() {\n\n let mut out = vec![];\n\n fasta::write_seq(&mut out, b\"ATGC\").unwrap();\n\n assert_eq!(&out, b\"ATGC\\n\");\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 60, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn seek_err() {\n\n let fq = &b\"@s1\\nA\\n+\\nI\\n@s2\\nA\\n\"[..];\n\n test_reader!(std::io::Cursor::new(fq), reader, {\n\n let record = reader.next().unwrap().unwrap();\n\n assert_eq!(record.id(), Ok(\"s1\"));\n\n let pos1 = reader.position().clone();\n\n\n\n // advance to errorneous record\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::UnexpectedEnd { pos: _ });\n\n let error_pos = err.position().unwrap().to_owned();\n\n let exp_pos = _Position::new().set_line(4).set_byte(10).set_record(1).clone();\n\n validate_position!(error_pos.record_position().unwrap(), exp_pos);\n\n assert_eq!(error_pos.error_offset().unwrap().line(), 1);\n\n assert_eq!(error_pos.error_offset().unwrap().byte(), 5);\n\n let exp_pos = _Position::new().set_line(5).set_byte(15).set_record(1).clone();\n\n validate_position!(error_pos.position().unwrap(), exp_pos);\n\n assert_eq!(error_pos.record_id(), Some(\"s2\"));\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 61, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn no_newline_end() {\n\n let mut reader = make_reader!($ReaderBuilder, &b\">id\\nATGC\"[..], $PositionStore);\n\n assert_eq!(reader.next().unwrap().unwrap().id_bytes(), b\"id\");\n\n assert!(reader.next().is_none());\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 62, "score": 55697.60845375834 }, { "content": "fn main() {\n\n let mut data = vec![];\n\n let filename = args().skip(1).next().unwrap().as_str().to_string();\n\n File::open(&filename)\n\n .unwrap()\n\n .read_to_end(&mut data)\n\n .expect(\"could not open file\");\n\n let data = data.as_slice();\n\n println!(\n\n \"data: {:?}\\n{:?}'\",\n\n data,\n\n String::from_utf8(data.to_owned())\n\n );\n\n\n\n fasta::evaluate(data);\n\n fastq::evaluate(data);\n\n}\n", "file_path": "fuzz/fuzz_debug/src/main.rs", "rank": 63, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn invalid_start() {\n\n let mut reader = make_reader!($ReaderBuilder, &b\"\\r\\nid\\nATGC\\n\"[..], $PositionStore);\n\n let rec = reader.next().unwrap();\n\n let err = rec.err().expect(\"Should be an error\");\n\n assert_matches!(err.kind(), $ErrorKind::InvalidStart { pos: _, found: b'i' });\n\n let pos = err.position().unwrap();\n\n let exp_pos = _Position::new().set_line(1).set_byte(2).set_record(0).clone();\n\n validate_position!(pos.record_position().unwrap(), exp_pos);\n\n assert!(pos.error_offset().is_none());\n\n validate_position!(pos.position().unwrap(), exp_pos);\n\n assert!(pos.record_id().is_none());\n\n}\n\n\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 64, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn write_record() {\n\n let fastq_in = &b\"\\n\\n@id\\nSEQQ\\n+ id\\r\\nQUAL\\n\\n@id2\\r\\nSEQ\\r\\n+\\nQUA\"[..];\n\n // lines joined and CR removed\n\n let fastq_out = &b\"@id\\nSEQQ\\n+\\nQUAL\\n@id2\\nSEQ\\n+\\nQUA\\n\"[..];\n\n // not changed apart from newlines before and after record\n\n let fastq_out_unchanged = &b\"@id\\nSEQQ\\n+ id\\r\\nQUAL\\n@id2\\r\\nSEQ\\r\\n+\\nQUA\\n\"[..];\n\n test_reader!(fastq_in, reader, {\n\n let mut out = vec![];\n\n let mut out_unchanged = vec![];\n\n while let Some(res) = reader.next() {\n\n let rec = res.unwrap();\n\n rec.write(&mut out).unwrap();\n\n rec.write_unchanged(&mut out_unchanged).unwrap();\n\n }\n\n assert_eq!(out.as_slice(), fastq_out);\n\n assert_eq!(out_unchanged.as_slice(), fastq_out_unchanged);\n\n });\n\n}\n\n\n\n}\n\n}\n", "file_path": "tests/fastq_common/standard.rs", "rank": 65, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn no_newline_end() {\n\n let fq = &b\"@id\\nATGC\\n+\\nIIII\"[..];\n\n test_reader!(fq, reader, {\n\n let rec = reader.next().unwrap().unwrap();\n\n assert_eq!(rec.id_bytes(), b\"id\");\n\n assert_eq!(rec.seq(), b\"ATGC\");\n\n assert_eq!(rec.opt_qual(), Some(&b\"IIII\"[..]));\n\n assert!(reader.next().is_none());\n\n });\n\n}\n\n\n\n\n\n// TODO: check b\"@id\\nSS\\n+\\nQ\\nQ\"\n\n\n", "file_path": "tests/fastq_common/common.rs", "rank": 66, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn write_record() {\n\n let fastq_in = &b\"\\n\\n@id\\nSEQQ\\n+ id\\r\\nQUAL\\n\\n@id2\\r\\nS\\nE\\nQ\\r\\n+\\nQU\\nA\"[..];\n\n // lines joined and CR removed\n\n let fastq_out = &b\"@id\\nSEQQ\\n+\\nQUAL\\n@id2\\nSEQ\\n+\\nQUA\\n\"[..];\n\n // not changed apart from newlines before and after record\n\n let fastq_out_unchanged = &b\"@id\\nSEQQ\\n+ id\\r\\nQUAL\\n\\n@id2\\r\\nS\\nE\\nQ\\r\\n+\\nQU\\nA\\n\"[..];\n\n test_reader!(fastq_in, reader, {\n\n let mut out = vec![];\n\n let mut out_unchanged = vec![];\n\n while let Some(res) = reader.next() {\n\n let rec = res.unwrap();\n\n rec.write(&mut out).unwrap();\n\n rec.write_unchanged(&mut out_unchanged).unwrap();\n\n }\n\n assert_eq!(out.as_slice(), fastq_out);\n\n assert_eq!(out_unchanged.as_slice(), fastq_out_unchanged);\n\n });\n\n}\n\n\n\n}\n\n}\n", "file_path": "tests/fastq_common/multiline.rs", "rank": 67, "score": 55697.60845375834 }, { "content": "#[test]\n\nfn write_seq_wrap() {\n\n let mut out = vec![];\n\n fasta::write_wrap_seq(&mut out, b\"ATGCA\", 2).unwrap();\n\n assert_eq!(&out, b\"AT\\nGC\\nA\\n\");\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 68, "score": 54460.30898597771 }, { "content": "#[test]\n\nfn write_seq_iter() {\n\n let mut out = vec![];\n\n fasta::write_seq_iter(&mut out, b\"ATGCA\".chunks(2)).unwrap();\n\n assert_eq!(&out, b\"ATGCA\\n\");\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 69, "score": 54460.30898597771 }, { "content": "#[test]\n\nfn empty_lines_end() {\n\n let mut reader = make_reader!($ReaderBuilder, &b\">id\\nATGC\\n\\n\\n\\n\\n\\n\\n\\n\\n\"[..], $PositionStore);\n\n assert_eq!(reader.next().unwrap().unwrap().id_bytes(), b\"id\");\n\n assert!(reader.next().is_none());\n\n}\n\n\n", "file_path": "tests/fasta_common/common.rs", "rank": 70, "score": 54460.30898597771 }, { "content": "#[test]\n\nfn write_seq_iter_wrap() {\n\n for size in 1..11 {\n\n let mut out = vec![];\n\n fasta::write_wrap_seq_iter(&mut out, b\"AAAATTTTGGG\".chunks(size), 3).unwrap();\n\n assert_eq!(&out, b\"AAA\\nATT\\nTTG\\nGG\\n\");\n\n\n\n let mut out = vec![];\n\n fasta::write_wrap_seq_iter(&mut out, b\"AAAATTTTGGG\".chunks(size), 4).unwrap();\n\n assert_eq!(&out, b\"AAAA\\nTTTT\\nGGG\\n\");\n\n }\n\n}\n\n\n\n}\n\n}\n", "file_path": "tests/fasta_common/common.rs", "rank": 71, "score": 53312.12669710249 }, { "content": "fn compare_errors(e1: Error, e2: Error) {\n\n match (e1.kind(), e2.kind()) {\n\n (ErrorKind::Io(_), ErrorKind::Io(_)) | \n\n (ErrorKind::BufferLimit, ErrorKind::BufferLimit) => {},\n\n (ErrorKind::InvalidStart { pos: _, found: f1 },\n\n ErrorKind::InvalidStart { pos: _, found: f2 }) => assert_eq!(f1, f2),\n\n (ErrorKind::InvalidSep { pos: _, found: f1 },\n\n ErrorKind::InvalidSep { pos: _, found: f2 }) => assert_eq!(f1, f2),\n\n (ErrorKind::UnexpectedEnd { pos: _},\n\n ErrorKind::UnexpectedEnd { pos: _}) => {},\n\n (ErrorKind::UnequalLengths { pos: _, seq: s1, qual: q1 },\n\n ErrorKind::UnequalLengths { pos: _, seq: s2, qual: q2 }) => {\n\n assert_eq!(s1, s2);\n\n assert_eq!(q1, q2);\n\n },\n\n _ => {}\n\n }\n\n}\n", "file_path": "fuzz/fuzz_targets/simple_reader.rs", "rank": 72, "score": 43764.28018593295 }, { "content": " None\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual_given<'s, F>(&'s self, _: F) -> Option<Cow<'s, [u8]>>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n None\n\n }\n\n\n\n #[inline]\n\n fn num_qual_lines(&self) -> usize {\n\n 0\n\n }\n\n\n\n #[inline]\n\n fn write<W>(&self, writer: W) -> io::Result<()>\n\n where\n\n W: io::Write,\n", "file_path": "src/fasta/record.rs", "rank": 73, "score": 41.14414961292676 }, { "content": " fn opt_full_qual(&self) -> Option<Cow<[u8]>> {\n\n Some(self.full_qual())\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual_given<'s, F>(&'s self, owned_fn: F) -> Option<Cow<'s, [u8]>>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n Some(self.full_qual_given(owned_fn))\n\n }\n\n\n\n #[inline]\n\n fn num_qual_lines(&self) -> usize {\n\n self.buf_pos.num_qual_lines()\n\n }\n\n\n\n #[inline]\n\n fn write<W>(&self, writer: W) -> io::Result<()>\n\n where\n", "file_path": "src/fastq/record.rs", "rank": 74, "score": 36.2709639003591 }, { "content": " fn opt_qual(&self) -> Option<&[u8]> {\n\n None\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual(&self) -> Option<Cow<[u8]>> {\n\n None\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual_given<'s, F>(&'s self, _: F) -> Option<Cow<'s, [u8]>>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n None\n\n }\n\n\n\n #[inline]\n\n fn num_qual_lines(&self) -> usize {\n\n 0\n", "file_path": "src/fasta/record.rs", "rank": 75, "score": 34.3232647136777 }, { "content": " fn full_seq(&self) -> Cow<[u8]> {\n\n (&self.seq).into()\n\n }\n\n\n\n #[inline]\n\n fn full_seq_given<'s, F: FnOnce() -> &'s mut Vec<u8>>(&'s self, _: F) -> Cow<'s, [u8]> {\n\n (&self.seq).into()\n\n }\n\n\n\n #[inline]\n\n fn num_seq_lines(&self) -> usize {\n\n 1\n\n }\n\n\n\n #[inline]\n\n fn has_quality(&self) -> bool {\n\n false\n\n }\n\n\n\n #[inline]\n", "file_path": "src/fasta/record.rs", "rank": 76, "score": 33.63512800616774 }, { "content": " #[inline]\n\n fn full_seq_given<'s, F>(&'s self, _: F) -> Cow<'s, [u8]>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n (&self.seq).into()\n\n }\n\n\n\n #[inline]\n\n fn num_seq_lines(&self) -> usize {\n\n 1\n\n }\n\n\n\n #[inline]\n\n fn has_quality(&self) -> bool {\n\n true\n\n }\n\n\n\n #[inline]\n\n fn opt_qual(&self) -> Option<&[u8]> {\n", "file_path": "src/fastq/record.rs", "rank": 77, "score": 33.12075901930116 }, { "content": "\n\n #[inline]\n\n fn full_seq_given<'s, F: FnOnce() -> &'s mut Vec<u8>>(&'s self, _: F) -> Cow<'s, [u8]> {\n\n (&self.seq).into()\n\n }\n\n\n\n #[inline]\n\n fn num_seq_lines(&self) -> usize {\n\n 1\n\n }\n\n\n\n #[inline]\n\n fn has_quality(&self) -> bool {\n\n self.qual.is_some()\n\n }\n\n\n\n #[inline]\n\n fn opt_qual(&self) -> Option<&[u8]> {\n\n self.qual.as_ref().map(|q| q.as_slice())\n\n }\n", "file_path": "src/fastx/record.rs", "rank": 78, "score": 32.0671882941289 }, { "content": "\n\n #[inline]\n\n fn full_qual_given<'s, F>(&'s self, _: F) -> Cow<'s, [u8]>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n (&self.qual).into()\n\n }\n\n\n\n #[inline]\n\n fn check_lengths(&self) -> Result<&Self, Error> {\n\n if self.seq.len() == self.qual.len() {\n\n return Ok(self);\n\n }\n\n let id = String::from_utf8_lossy(self.id_bytes()).into();\n\n let pos = ErrorPosition::new(None, None, Some(id));\n\n return Err(Error::new(ErrorKind::UnequalLengths {\n\n pos,\n\n seq: self.seq.len(),\n\n qual: self.qual.len(),\n\n }));\n\n }\n\n}\n\n\n\nimpl_recordset!(RefRecord, QualRecordPosition, RangeStore, \"fastq\", \"fastq\");\n", "file_path": "src/fastq/record.rs", "rank": 80, "score": 31.467224166103893 }, { "content": " out.extend(line);\n\n }\n\n return out.into();\n\n }\n\n }\n\n}\n\n\n\n/// Joins lines together\n\n#[inline]\n\npub(crate) fn join_lines_given<'a, L, F>(\n\n mut lines: L,\n\n num_lines: usize,\n\n owned_fn: F,\n\n) -> Cow<'a, [u8]>\n\nwhere\n\n L: Iterator<Item = &'a [u8]>,\n\n F: FnOnce() -> &'a mut Vec<u8>,\n\n{\n\n match num_lines {\n\n 1 => lines.next().unwrap().into(),\n", "file_path": "src/core/position.rs", "rank": 81, "score": 31.227523408550514 }, { "content": " Some(self.qual())\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual(&self) -> Option<Cow<[u8]>> {\n\n Some(self.full_qual())\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual_given<'s, F>(&'s self, _: F) -> Option<Cow<'s, [u8]>>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n Some((&self.qual).into())\n\n }\n\n\n\n #[inline]\n\n fn num_qual_lines(&self) -> usize {\n\n 1\n\n }\n", "file_path": "src/fastq/record.rs", "rank": 84, "score": 29.683555015891596 }, { "content": "\n\n #[inline]\n\n fn opt_full_qual(&self) -> Option<Cow<[u8]>> {\n\n self.qual.as_ref().map(|q| q.into())\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual_given<'s, F>(&'s self, _: F) -> Option<Cow<'s, [u8]>>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n self.qual.as_ref().map(|q| q.as_slice().into())\n\n }\n\n\n\n #[inline]\n\n fn num_qual_lines(&self) -> usize {\n\n 1\n\n }\n\n\n\n #[inline]\n", "file_path": "src/fastx/record.rs", "rank": 86, "score": 29.299985323121966 }, { "content": " fn full_seq(&self) -> Cow<[u8]> {\n\n join_lines(self.seq_lines(), self.num_seq_lines())\n\n }\n\n\n\n #[inline]\n\n fn full_seq_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n join_lines_given(\n\n self.buf_pos.seq_lines(self.buffer),\n\n self.buf_pos.num_seq_lines(),\n\n owned_fn,\n\n )\n\n }\n\n\n\n #[inline]\n\n fn num_seq_lines(&self) -> usize {\n\n self.buf_pos.num_seq_lines()\n\n }\n", "file_path": "src/fastx/record.rs", "rank": 89, "score": 27.838620606624435 }, { "content": " self.buf_pos.num_qual_lines(),\n\n ))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n #[inline]\n\n fn opt_full_qual_given<'s, F>(&'s self, owned_fn: F) -> Option<Cow<'s, [u8]>>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n if self.buf_pos.has_qual() {\n\n Some(join_lines_given(\n\n self.buf_pos.qual_lines(self.buffer),\n\n self.buf_pos.num_qual_lines(),\n\n owned_fn,\n\n ))\n\n } else {\n\n None\n", "file_path": "src/fastx/record.rs", "rank": 91, "score": 27.61185048246147 }, { "content": " /// Creates a new Reader from an already instantiated\n\n /// [`BufReader`](crate::core::BufReader).\n\n /// This is mostly useful if doing format recognition before deciding on\n\n /// the exact reader type.\n\n #[inline]\n\n pub fn from_buf_reader(rdr: crate::core::BufReader<R, P>, byte_offset: usize, line_idx: u64) -> Self {\n\n Self::_from_buf_reader(rdr, byte_offset, line_idx)\n\n }\n\n\n\n #[inline]\n\n fn _next(&mut self, check_lengths: bool) -> Option<crate::fastx::Result<(&[u8], &S)>> {\n\n if let Some(fasta) = try_opt!(self._check_is_fasta()) {\n\n self.inner.next(fasta, $multiline_fasta, $multiline_fastq, false, check_lengths)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Searches the next record and returns a\n\n #[doc = $refrec_link]\n", "file_path": "src/core/reader.rs", "rank": 92, "score": 27.47810938752933 }, { "content": "\n\n #[inline]\n\n fn full_qual_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]>\n\n where\n\n F: FnOnce() -> &'s mut Vec<u8>,\n\n {\n\n join_lines_given(\n\n self.buf_pos.qual_lines(self.buffer),\n\n self.buf_pos.num_qual_lines(),\n\n owned_fn,\n\n )\n\n }\n\n\n\n #[inline]\n\n fn check_lengths(&self) -> Result<&Self, Error> {\n\n self._check_lengths(false)\n\n }\n\n}\n\n\n\nimpl<'a, S> RefRecord<'a, S>\n", "file_path": "src/fastq/record.rs", "rank": 93, "score": 27.32310659214481 }, { "content": "// fn qual(&self) -> &[u8] {\n\n// (**self).qual()\n\n// }\n\n\n\n// fn full_qual(&self) -> Cow<[u8]> {\n\n// (**self).full_qual()\n\n// }\n\n\n\n// fn full_qual_given<'s, F>(&'s self, owned_fn: F) -> Cow<'s, [u8]>\n\n// where\n\n// F: FnOnce() -> &'s mut Vec<u8>,\n\n// Self: Sized\n\n// {\n\n// (**self).full_qual_given(owned_fn)\n\n// }\n\n\n\n// fn check_lengths(&self) -> Result<&Self, Error> {\n\n// (**self).check_lengths()\n\n// }\n\n// }\n", "file_path": "src/fastq/record.rs", "rank": 94, "score": 26.94215221318778 }, { "content": "}\n\n\n\nimpl<'a> HeadWriter for &String {\n\n fn write_head<W>(&self, writer: W, start_byte: u8) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n self.as_str().write_head(writer, start_byte)\n\n }\n\n}\n\n\n\nimpl<'a> HeadWriter for (&'a [u8], Option<&'a [u8]>) {\n\n fn write_head<W>(&self, mut writer: W, start_byte: u8) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n write!(writer, \"{}\", start_byte)?;\n\n writer.write_all(self.0)?;\n\n writer.write_all(b\" \")?;\n\n if let Some(desc) = self.1 {\n", "file_path": "src/helpers.rs", "rank": 95, "score": 26.845342009463643 }, { "content": " /// `reader_init` should return a result. The error type needs to\n\n /// implement `From<RecordSetReader::Err>`\n\n ///\n\n pub fn $name_init<R, Ri, $($bounds)*, W, F, O, Out, E>(\n\n reader_init: Ri,\n\n n_workers: u32,\n\n queue_len: usize,\n\n work: W,\n\n mut func: F,\n\n ) -> Result<Option<Out>, E>\n\n where\n\n R: RecordSetReader<RecordSet = $RecordSet, Err = $Error>,\n\n Ri: Send + FnOnce() -> Result<R, E>,\n\n O: Default + Send,\n\n W: Send + Sync + Fn($Record, &mut O),\n\n F: FnMut($Record, &mut O) -> Option<Out>,\n\n E: Send + From<R::Err>,\n\n {\n\n read_process_recordsets_init(\n\n reader_init,\n", "file_path": "src/parallel.rs", "rank": 96, "score": 26.773570432227185 }, { "content": " #[inline]\n\n pub fn len(&self) -> usize {\n\n self.npos\n\n }\n\n\n\n /// Returns the number of records in the record set.\n\n #[inline]\n\n pub fn is_empty(&self) -> bool {\n\n self.npos == 0\n\n }\n\n }\n\n\n\n impl<S> crate::core::RecordSet<S> for RecordSet<S>\n\n where\n\n S: $RecordStoreTrait,\n\n {\n\n #[inline]\n\n fn clear(&mut self) {\n\n self.npos = 0;\n\n }\n", "file_path": "src/core/record.rs", "rank": 97, "score": 26.344210856722015 }, { "content": " #[doc = $import_rdr]\n\n #[doc = $seq1]\n\n ///\n\n /// let mut reader = Reader::new(&seq[..]);\n\n /// let record = reader.next().unwrap().unwrap();\n\n /// assert_eq!(record.id(), Ok(\"id\"))\n\n /// ```\n\n #[inline]\n\n pub fn new(reader: R) -> Self {\n\n Self::with_capacity(reader, crate::core::BUFSIZE)\n\n }\n\n\n\n /// Creates a new reader with a given buffer capacity. The minimum allowed\n\n /// capacity is 3.\n\n #[inline]\n\n pub fn with_capacity(reader: R, capacity: usize) -> Self {\n\n Self::_new(reader, capacity, crate::policy::StdPolicy)\n\n }\n\n}\n\n\n", "file_path": "src/core/reader.rs", "rank": 98, "score": 26.274654489546947 }, { "content": "\n\n #[inline]\n\n fn write<W>(&self, writer: W) -> io::Result<()>\n\n where\n\n W: io::Write,\n\n {\n\n write(writer, self.head(), self.seq(), self.qual())\n\n }\n\n}\n\n\n\nimpl Record for OwnedRecord {\n\n #[inline]\n\n fn qual(&self) -> &[u8] {\n\n &self.qual\n\n }\n\n\n\n #[inline]\n\n fn full_qual(&self) -> Cow<[u8]> {\n\n (&self.qual).into()\n\n }\n", "file_path": "src/fastq/record.rs", "rank": 99, "score": 26.265316254103322 } ]
Rust
examples/hashrocket-bench.rs
hobinjk/tokio-websocket
b54adc78740ce7caf442e54e906b0a41b6daf7a8
extern crate futures; extern crate tokio_core; extern crate tokio_io; extern crate websocket; extern crate serde_json; use serde_json::Value; use std::collections::HashMap; use std::rc::Rc; use std::cell::RefCell; use std::io::{Error, ErrorKind}; use tokio_core::net::TcpListener; use tokio_core::reactor::Core; use tokio_io::AsyncRead; use futures::{Future, Stream, Sink}; use futures::sync::mpsc; use websocket::{Request, WebSocketCodec, new_text_frame, Opcode, Frame}; const NULL_PAYLOAD: &'static Value = &Value::Null; enum Message { Echo(Frame), Broadcast(Frame, Frame), None(), } fn process_frame(frame: Frame) -> Message { if frame.header.opcode == Opcode::Close { return Message::Echo(frame); } if frame.header.opcode != Opcode::Text { return Message::None(); } let payload = frame.payload_string().unwrap(); if let Ok(Value::Object(obj)) = serde_json::from_str::<Value>(&payload) { if let Some(&Value::String(ref s)) = obj.get("type") { if s == "echo" { return Message::Echo(frame); } if s == "broadcast" { let msg = format!(r#"{{"type":"broadcastResult","payload":{}}}"#, obj.get("payload").unwrap_or(NULL_PAYLOAD)); return Message::Broadcast(frame, new_text_frame(&msg, None)); } } } Message::None() } fn main() { let addr = "0.0.0.0:8084".parse().unwrap(); let mut core = Core::new().unwrap(); let handle = core.handle(); let socket = TcpListener::bind(&addr, &handle).unwrap(); let connections = Rc::new(RefCell::new(HashMap::new())); let srv = socket.incoming().for_each(move |(conn, addr)| { let (sink, stream) = conn.framed(WebSocketCodec::new()).split(); let (tx, rx) = mpsc::unbounded(); connections.borrow_mut().insert(addr, tx); let connections_inner = connections.clone(); let reader = stream.for_each(move |req| { let mut conns = connections_inner.borrow_mut(); match req { Request::Frame(frame) => { match process_frame(frame) { Message::None() => {}, Message::Echo(frame) => { if frame.header.opcode == Opcode::Close { conns.remove(&addr); return Err(Error::new(ErrorKind::Other, "close requested")) } let tx = conns.get_mut(&addr).unwrap(); let masked_frame = new_text_frame(&frame.payload_string().unwrap(), None); mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), masked_frame).unwrap(); }, Message::Broadcast(broadcast_frame, echo_frame) => { let masked_frame = new_text_frame(&broadcast_frame.payload_string().unwrap(), None); for (&t_addr, tx) in conns.iter_mut() { mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), masked_frame.clone()).unwrap(); if addr == t_addr { mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), echo_frame.clone()).unwrap(); } } }, } }, Request::Open() => { let tx = conns.get_mut(&addr).unwrap(); mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), new_text_frame("this message is dropped", None)).unwrap(); } } Ok(()) }); let connections = connections.clone(); let writer = rx.map_err(|_| Error::new(ErrorKind::Other, "receiver error")).fold(sink, |sink, msg| { sink.send(msg) }); let reader = reader.map_err(|_| Error::new(ErrorKind::Other, "transmitter error")); let conn = reader.map(|_| ()).select(writer.map(|_| ())); handle.spawn(conn.then(move |_| { connections.borrow_mut().remove(&addr); Ok(()) })); Ok(()) }); core.run(srv).unwrap(); }
extern crate futures; extern crate tokio_core; extern crate tokio_io; extern crate websocket; extern crate serde_json; use serde_json::Value; use std::collections::HashMap; use std::rc::Rc; use std::cell::RefCell; use std::io::{Error, ErrorKind}; use tokio_core::net::TcpListener; use tokio_core::reactor::Core; use tokio_io::AsyncRead; use futures::{Future, Stream, Sink}; use futures::sync::mpsc; use websocket::{Request, WebSocketCodec, new_text_frame, Opcode, Frame}; const NULL_PAYLOAD: &'static Value = &Value::Null; enum Message { Echo(Frame), Broadcast(Frame, Frame), None(), } fn process_frame(frame: Frame) -> Message { if frame.header.opcode == Opcode::Close { return Message::Echo(frame); } if frame.header.opcode != Opcode::Text { return Message::None(); } let payload = frame.payload_string().unwrap(); if let Ok(Value::Object(obj)) = serde_json::from_str::<Value>(&pa
if frame.header.opcode == Opcode::Close { conns.remove(&addr); return Err(Error::new(ErrorKind::Other, "close requested")) } let tx = conns.get_mut(&addr).unwrap(); let masked_frame = new_text_frame(&frame.payload_string().unwrap(), None); mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), masked_frame).unwrap(); }, Message::Broadcast(broadcast_frame, echo_frame) => { let masked_frame = new_text_frame(&broadcast_frame.payload_string().unwrap(), None); for (&t_addr, tx) in conns.iter_mut() { mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), masked_frame.clone()).unwrap(); if addr == t_addr { mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), echo_frame.clone()).unwrap(); } } }, } }, Request::Open() => { let tx = conns.get_mut(&addr).unwrap(); mpsc::UnboundedSender::send(&mut std::borrow::BorrowMut::borrow_mut(tx), new_text_frame("this message is dropped", None)).unwrap(); } } Ok(()) }); let connections = connections.clone(); let writer = rx.map_err(|_| Error::new(ErrorKind::Other, "receiver error")).fold(sink, |sink, msg| { sink.send(msg) }); let reader = reader.map_err(|_| Error::new(ErrorKind::Other, "transmitter error")); let conn = reader.map(|_| ()).select(writer.map(|_| ())); handle.spawn(conn.then(move |_| { connections.borrow_mut().remove(&addr); Ok(()) })); Ok(()) }); core.run(srv).unwrap(); }
yload) { if let Some(&Value::String(ref s)) = obj.get("type") { if s == "echo" { return Message::Echo(frame); } if s == "broadcast" { let msg = format!(r#"{{"type":"broadcastResult","payload":{}}}"#, obj.get("payload").unwrap_or(NULL_PAYLOAD)); return Message::Broadcast(frame, new_text_frame(&msg, None)); } } } Message::None() } fn main() { let addr = "0.0.0.0:8084".parse().unwrap(); let mut core = Core::new().unwrap(); let handle = core.handle(); let socket = TcpListener::bind(&addr, &handle).unwrap(); let connections = Rc::new(RefCell::new(HashMap::new())); let srv = socket.incoming().for_each(move |(conn, addr)| { let (sink, stream) = conn.framed(WebSocketCodec::new()).split(); let (tx, rx) = mpsc::unbounded(); connections.borrow_mut().insert(addr, tx); let connections_inner = connections.clone(); let reader = stream.for_each(move |req| { let mut conns = connections_inner.borrow_mut(); match req { Request::Frame(frame) => { match process_frame(frame) { Message::None() => {}, Message::Echo(frame) => {
random
[ { "content": "pub fn opcode_to_u8(opcode: Opcode) -> u8 {\n\n match opcode {\n\n Opcode::Continuation => 0,\n\n Opcode::Text => 1,\n\n Opcode::Binary => 2,\n\n Opcode::Close => 8,\n\n Opcode::Ping => 9,\n\n Opcode::Pong => 10,\n\n }\n\n}\n\n\n", "file_path": "src/ws_frame.rs", "rank": 1, "score": 65178.735499985516 }, { "content": "pub fn u8_to_opcode(bits: u8) -> Option<Opcode> {\n\n match bits {\n\n 0 => Some(Opcode::Continuation),\n\n 1 => Some(Opcode::Text),\n\n 2 => Some(Opcode::Binary),\n\n 8 => Some(Opcode::Close),\n\n 9 => Some(Opcode::Ping),\n\n 10 => Some(Opcode::Pong),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/ws_frame.rs", "rank": 2, "score": 59045.01154351231 }, { "content": "pub fn new_text_frame(text: &str, masking_key: Option<u32>) -> Frame {\n\n let masked_text = match masking_key {\n\n Some(masking_key) => mask_bytes(masking_key, text.as_bytes()),\n\n None => text.as_bytes().to_vec(),\n\n };\n\n\n\n Frame {\n\n header: Header {\n\n is_final: true,\n\n opcode: Opcode::Text,\n\n is_masked: masking_key.is_some(),\n\n payload_len: text.len(),\n\n masking_key: masking_key.unwrap_or(0),\n\n },\n\n payload: masked_text,\n\n }\n\n}\n", "file_path": "src/ws_frame.rs", "rank": 4, "score": 52997.97634065099 }, { "content": "#[derive(Debug)]\n\nenum WebSocketState {\n\n Http(),\n\n Upgrade(String),\n\n Connected(),\n\n}\n\n\n\nimpl<T: AsyncRead + AsyncWrite + 'static> ServerProto<T> for WebSocket {\n\n type Request = Request;\n\n type Response = Response;\n\n type Transport = Framed<T, WebSocketCodec>;\n\n type BindTransport = io::Result<Framed<T, WebSocketCodec>>;\n\n\n\n fn bind_transport(&self, io: T) -> io::Result<Framed<T, WebSocketCodec>> {\n\n Ok(io.framed(WebSocketCodec::new()))\n\n }\n\n}\n\n\n\npub struct WebSocketCodec {\n\n state: WebSocketState,\n\n http_codec: HttpCodec,\n", "file_path": "src/lib.rs", "rank": 5, "score": 37311.67231993856 }, { "content": "enum ParseResult<T> {\n\n Complete(T, usize),\n\n Partial,\n\n}\n\n\n", "file_path": "src/ws_request.rs", "rank": 6, "score": 34727.853631677266 }, { "content": "fn mask_bytes(masking_key: u32, bytes: &[u8]) -> Vec<u8> {\n\n let mut i = 0;\n\n let masking_keys = [\n\n ((masking_key & 0xff000000) >> 24) as u8,\n\n ((masking_key & 0x00ff0000) >> 16) as u8,\n\n ((masking_key & 0x0000ff00) >> 8) as u8,\n\n (masking_key & 0x000000ff) as u8,\n\n ];\n\n let mut masked = Vec::new();\n\n masked.reserve(bytes.len());\n\n for b in bytes.iter() {\n\n masked.push(b ^ masking_keys[i]);\n\n i = (i + 1) % 4;\n\n }\n\n masked\n\n}\n\n\n", "file_path": "src/ws_frame.rs", "rank": 7, "score": 34341.71237478749 }, { "content": "fn main() {\n\n let addr = \"0.0.0.0:8084\".parse().unwrap();\n\n TcpServer::new(WebSocket, addr)\n\n .serve(|| Ok(HelloWorld));\n\n}\n", "file_path": "examples/hello-world.rs", "rank": 9, "score": 30672.726096317965 }, { "content": "fn response_len(msg: &Response) -> usize {\n\n let mut len = 2 + msg.header.payload_len;\n\n if msg.header.payload_len >= 126 && msg.header.payload_len < 65536 {\n\n len += 2;\n\n } else if msg.header.payload_len >= 65536 {\n\n len += 8;\n\n }\n\n\n\n if msg.header.is_masked {\n\n len += 4;\n\n }\n\n len\n\n}\n", "file_path": "src/ws_response.rs", "rank": 10, "score": 24593.59762512258 }, { "content": "fn hash_key(b64_key: &str) -> String {\n\n let mut input = b64_key.to_string();\n\n input.push_str(\"258EAFA5-E914-47DA-95CA-C5AB0DC85B11\");\n\n let sha_input = hash_sha1(&input);\n\n base64::encode(sha_input.as_ref())\n\n}\n\n\n", "file_path": "src/ws_response.rs", "rank": 11, "score": 23942.210783306575 }, { "content": "#[bench]\n\nfn bench_broadcast_encode(b: &mut test::Bencher) {\n\n b.iter(|| {\n\n let frame = websocket::new_text_frame(\"{\\\"type\\\":\\\"broadcast\\\",\\\"payload\\\":{\\\"foo\\\": \\\"bar\\\"}}\", Some(0x11223344));\n\n let mut buf = bytes::BytesMut::with_capacity(0);\n\n websocket::encode(frame, &mut buf)\n\n });\n\n}\n\n\n", "file_path": "benches/encode.rs", "rank": 12, "score": 23933.759908673233 }, { "content": "fn hash_sha1(input: &str) -> digest::Digest {\n\n let mut ctx = digest::Context::new(&digest::SHA1);\n\n ctx.update(input.as_bytes());\n\n ctx.finish()\n\n}\n\n\n", "file_path": "src/ws_response.rs", "rank": 13, "score": 23282.373066857224 }, { "content": "pub fn encode(msg: Response, buf: &mut BytesMut) {\n\n buf.reserve(response_len(&msg));\n\n buf.put(0u8);\n\n buf.put(0u8);\n\n if msg.header.is_final {\n\n buf[0] |= 0x80;\n\n }\n\n let op_u8 = opcode_to_u8(msg.header.opcode);\n\n buf[0] |= op_u8;\n\n if msg.header.is_masked {\n\n buf[1] |= 0x80;\n\n }\n\n if msg.header.payload_len < 126 {\n\n buf[1] |= msg.header.payload_len as u8;\n\n } else if msg.header.payload_len < 65536 {\n\n buf[1] |= 0x7e;\n\n buf.put_u16::<BigEndian>(msg.header.payload_len as u16);\n\n } else {\n\n buf[1] |= 0x7f;\n\n buf.put_u64::<BigEndian>(msg.header.payload_len as u64);\n\n }\n\n if msg.header.is_masked {\n\n buf.put_u32::<BigEndian>(msg.header.masking_key);\n\n }\n\n buf.put_slice(msg.payload.as_slice());\n\n}\n\n\n", "file_path": "src/ws_response.rs", "rank": 14, "score": 21060.38612861158 }, { "content": "pub fn make_accept(b64_key: &str) -> tokio_minihttp::Response {\n\n let mut res = tokio_minihttp::Response::new();\n\n // HTTP/1.1 101 Switching Protocols\n\n // Upgrade: websocket\n\n // Connection: Upgrade\n\n // Sec-WebSocket-Accept: key thing\n\n res.status_code(101, \"Switching Protocols\");\n\n res.header(\"Upgrade\", \"websocket\");\n\n res.header(\"Connection\", \"Upgrade\");\n\n res.header(\"Sec-WebSocket-Accept\", &hash_key(&b64_key));\n\n res\n\n}\n", "file_path": "src/ws_response.rs", "rank": 15, "score": 21050.385327308937 }, { "content": "use std::string;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Frame {\n\n pub header: Header,\n\n pub payload: Vec<u8>,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Opcode {\n\n Continuation,\n\n Text,\n\n Binary,\n\n Close,\n\n Ping,\n\n Pong,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Header {\n", "file_path": "src/ws_frame.rs", "rank": 16, "score": 20851.161880874082 }, { "content": " pub is_final: bool,\n\n pub opcode: Opcode,\n\n pub is_masked: bool,\n\n pub payload_len: usize,\n\n pub masking_key: u32,\n\n}\n\n\n\nimpl Frame {\n\n pub fn payload_string(&self) -> Result<String, string::FromUtf8Error> {\n\n if !self.header.is_masked {\n\n return String::from_utf8(self.payload.clone())\n\n }\n\n String::from_utf8(mask_bytes(self.header.masking_key, &self.payload))\n\n }\n\n}\n\n\n", "file_path": "src/ws_frame.rs", "rank": 17, "score": 20850.69502581866 }, { "content": "pub fn decode(buf: &mut BytesMut) -> io::Result<Option<Request>> {\n\n // This is after the successful upgrade\n\n // Parse header\n\n let (header, offset) = match try!(parse_header(buf)) {\n\n ParseResult::Complete(h, offset) => (h, offset),\n\n ParseResult::Partial => return Ok(None),\n\n };\n\n if header.payload_len + offset > buf.len() {\n\n return Ok(None);\n\n }\n\n // Discard header data\n\n buf.split_to(offset);\n\n let payload = buf.split_to(header.payload_len).to_vec();\n\n\n\n Ok(Some(Request::Frame(Frame {\n\n header: header,\n\n payload: payload,\n\n })))\n\n}\n", "file_path": "src/ws_request.rs", "rank": 18, "score": 19243.586365113013 }, { "content": "fn parse_header(buf: &mut BytesMut) -> io::Result<ParseResult<Header>> {\n\n if buf.len() < 2 {\n\n return Ok(ParseResult::Partial);\n\n }\n\n let is_final = buf[0] & 0x80 > 0;\n\n let opcode = match u8_to_opcode(buf[0] & 0x0f) {\n\n Some(op) => op,\n\n None => return Err(io::Error::new(io::ErrorKind::Other, \"invalid opcode\")),\n\n };\n\n let is_masked = buf[1] & 0x80 > 0;\n\n let (payload_len, buf_offset) = match buf[1] & 0x7f {\n\n 126 => {\n\n if buf.len() < 4 {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"not enough bytes\"));\n\n }\n\n let len = BigEndian::read_u16(&buf[2..]) as usize;\n\n (len, 4)\n\n }\n\n 127 => {\n\n if buf.len() < 6 {\n", "file_path": "src/ws_request.rs", "rank": 19, "score": 19195.653193368515 }, { "content": "tokio-websocket\n\n===============\n\n\n\nAn experimental, mostly unfinished implementation of the WebSocket protocol\n\nusing tokio.\n\n\n\nVery much a work in progress\n\n\n", "file_path": "README.md", "rank": 20, "score": 13762.611971034079 }, { "content": "extern crate futures;\n\nextern crate tokio_proto;\n\nextern crate tokio_service;\n\nextern crate websocket;\n\n\n\nuse std::io;\n\n\n\nuse futures::future;\n\nuse websocket::{Request, Response, WebSocket, new_text_frame};\n\nuse tokio_proto::TcpServer;\n\nuse tokio_service::Service;\n\n\n", "file_path": "examples/hello-world.rs", "rank": 22, "score": 13.395277791609947 }, { "content": "extern crate base64;\n\nextern crate bytes;\n\nextern crate ring;\n\nextern crate tokio_core;\n\nextern crate tokio_io;\n\nextern crate tokio_minihttp;\n\nextern crate tokio_proto;\n\nextern crate tokio_service;\n\n\n\nuse std::io;\n\nuse bytes::BytesMut;\n\nuse tokio_io::codec::{Encoder, Decoder, Framed};\n\nuse tokio_io::{AsyncRead, AsyncWrite};\n\nuse tokio_proto::pipeline::ServerProto;\n\nuse tokio_minihttp::HttpCodec;\n\n\n\nmod ws_frame;\n\nmod ws_request;\n\nmod ws_response;\n\n\n\npub use ws_request::{Request, decode};\n\npub use ws_response::{Response, encode};\n\npub use ws_frame::{new_text_frame, Opcode, Frame};\n\n\n\npub struct WebSocket;\n\n\n\n#[derive(Debug)]\n", "file_path": "src/lib.rs", "rank": 23, "score": 9.965908559297631 }, { "content": "use std::io;\n\n\n\nuse bytes::{BytesMut, BigEndian, ByteOrder};\n\n\n\nuse ws_frame::{Frame, Header, u8_to_opcode};\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n extern crate tokio_core;\n\n\n\n use ws_frame::Opcode;\n\n use super::*;\n\n\n\n\n\n #[test]\n\n fn fin_bin_unmasked_empty() {\n\n let data = vec![\n\n 0x80u8 + 0x02u8, // fin bin\n\n 0x00u8 + 0x00u8, // unmasked empty\n\n ];\n", "file_path": "src/ws_request.rs", "rank": 24, "score": 8.714717095422081 }, { "content": "#![feature(test)]\n\n\n\nextern crate bytes;\n\nextern crate test;\n\nextern crate websocket;\n\n\n\n#[bench]\n", "file_path": "benches/encode.rs", "rank": 25, "score": 7.726089775731866 }, { "content": "use base64;\n\nuse bytes::{BytesMut, BufMut, BigEndian};\n\nuse tokio_minihttp;\n\nuse ring::digest;\n\nuse ws_frame::{Frame, opcode_to_u8};\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use ws_frame::{Opcode, Header, new_text_frame};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_hash_key_rfc_example() {\n\n assert_eq!(hash_key(\"dGhlIHNhbXBsZSBub25jZQ==\"),\n\n \"s3pPLMBiTxaQ9kYGzzhZRbK+xOo=\");\n\n }\n\n\n\n #[test]\n\n fn fin_bin_unmasked_empty() {\n", "file_path": "src/ws_response.rs", "rank": 27, "score": 5.663952709281398 }, { "content": " 5u8];\n\n let mut buf = BytesMut::with_capacity(0);\n\n encode(frame, &mut buf);\n\n\n\n assert_eq!(buf, expected_data);\n\n }\n\n\n\n #[test]\n\n fn con_con_masked_medium_payload() {\n\n let payload = vec![5u8; 256]; // length 0x100\n\n let frame = Frame {\n\n header: Header {\n\n is_final: false,\n\n opcode: Opcode::Continuation,\n\n is_masked: true,\n\n payload_len: 256,\n\n masking_key: 0x11121314,\n\n },\n\n payload: payload.clone(),\n\n };\n", "file_path": "src/ws_response.rs", "rank": 28, "score": 5.60431918748181 }, { "content": "\n\n #[test]\n\n fn fin_bin_unmasked_small_payload() {\n\n let frame = Frame {\n\n header: Header {\n\n is_final: true,\n\n opcode: Opcode::Binary,\n\n is_masked: false,\n\n payload_len: 5,\n\n masking_key: 0,\n\n },\n\n payload: vec![1, 2, 3, 4, 5],\n\n };\n\n\n\n let expected_data = vec![0x80u8 + 0x02u8, // fin bin\n\n 0x00u8 + 0x05u8, // unmasked 5 long\n\n 1u8,\n\n 2u8,\n\n 3u8,\n\n 4u8,\n", "file_path": "src/ws_response.rs", "rank": 29, "score": 5.4056688737605505 }, { "content": " let frame = Frame {\n\n header: Header {\n\n is_final: false,\n\n opcode: Opcode::Continuation,\n\n is_masked: true,\n\n payload_len: 65536,\n\n masking_key: 0x11121314,\n\n },\n\n payload: payload.clone(),\n\n };\n\n\n\n\n\n let mut expected_data = vec![0x00u8 + 0x00u8, // continuation continuation\n\n 0x80u8 + 0x7fu8, // masked 64 bit length\n\n 0x00, // payload length = 0x00 00 00 00 00 01 00 00\n\n 0x00,\n\n 0x00,\n\n 0x00,\n\n 0x00,\n\n 0x01,\n", "file_path": "src/ws_response.rs", "rank": 30, "score": 5.381354391228376 }, { "content": " let frame = Frame {\n\n header: Header {\n\n is_final: true,\n\n opcode: Opcode::Binary,\n\n is_masked: false,\n\n payload_len: 0,\n\n masking_key: 0,\n\n },\n\n payload: Vec::new(),\n\n };\n\n\n\n let expected_data = vec![\n\n 0x80u8 + 0x02u8, // fin bin\n\n 0x00u8 + 0x00u8, // unmasked empty\n\n ];\n\n let mut buf = BytesMut::with_capacity(0);\n\n encode(frame, &mut buf);\n\n\n\n assert_eq!(buf, expected_data);\n\n }\n", "file_path": "src/ws_response.rs", "rank": 31, "score": 5.1559151875124005 }, { "content": " 0x12,\n\n 0x13,\n\n 0x14];\n\n let payload = [5u8; 256]; // length 0x100\n\n data.extend(payload.iter());\n\n let mut buf = BytesMut::from(data);\n\n let req = match decode(&mut buf) {\n\n Ok(Some(Request::Frame(req))) => req,\n\n e => panic!(\"decode failed: {:?}\", e),\n\n };\n\n assert!(!req.header.is_final);\n\n assert_eq!(req.header.opcode, Opcode::Continuation);\n\n assert!(req.header.is_masked);\n\n assert_eq!(req.header.payload_len, 256);\n\n assert_eq!(req.payload.len(), 256);\n\n for i in 0..256 {\n\n assert_eq!(payload[i], req.payload[i]);\n\n }\n\n }\n\n\n", "file_path": "src/ws_request.rs", "rank": 33, "score": 4.585984509831325 }, { "content": " Ok(Some(req)) => {\n\n for (header, value) in req.headers() {\n\n if header == \"Sec-WebSocket-Key\" {\n\n let value_str = String::from_utf8(value.to_vec()).unwrap();\n\n self.state = WebSocketState::Upgrade(value_str);\n\n let req = Request::Open();\n\n return Ok(Some(req));\n\n }\n\n }\n\n Ok(None)\n\n }\n\n _ => Ok(None),\n\n }\n\n }\n\n _ => ws_request::decode(buf),\n\n }\n\n }\n\n}\n\n\n\nimpl Encoder for WebSocketCodec {\n", "file_path": "src/lib.rs", "rank": 34, "score": 4.575714706426997 }, { "content": " Ok(Some(Request::Frame(req))) => req,\n\n e => panic!(\"decode failed: {:?}\", e),\n\n };\n\n assert!(!req.header.is_final);\n\n assert_eq!(req.header.opcode, Opcode::Continuation);\n\n assert!(req.header.is_masked);\n\n assert_eq!(req.header.payload_len, 65536);\n\n assert_eq!(req.payload.len(), 65536);\n\n for i in 0..65536 {\n\n assert_eq!(payload[i], req.payload[i]);\n\n }\n\n }\n\n\n\n #[test]\n\n fn afl_crash_0() {\n\n let data = vec![0x12, 0xff, 0xff, 0xff, 0x7f, 0x01, 0x06, 0xff, 0x7f, 0x00];\n\n let mut buf = BytesMut::from(data);\n\n let _ = decode(&mut buf);\n\n }\n\n\n", "file_path": "src/ws_request.rs", "rank": 35, "score": 4.305698614503942 }, { "content": " 5u8];\n\n let mut buf = BytesMut::from(data);\n\n let req = match decode(&mut buf) {\n\n Ok(Some(Request::Frame(req))) => req,\n\n _ => panic!(\"decode failed\"),\n\n };\n\n assert!(req.header.is_final);\n\n assert_eq!(req.header.opcode, Opcode::Binary);\n\n assert!(!req.header.is_masked);\n\n assert_eq!(req.header.payload_len, 5);\n\n assert_eq!(req.payload, vec![1, 2, 3, 4, 5]);\n\n }\n\n\n\n #[test]\n\n fn con_con_masked_medium_payload() {\n\n let mut data = vec![0x00u8 + 0x00u8, // continuation continuation\n\n 0x80u8 + 0x7eu8, // masked 16 bit length\n\n 0x01, // payload length = 0x01 00\n\n 0x00,\n\n 0x11, // maskingKey = 0x11121314\n", "file_path": "src/ws_request.rs", "rank": 36, "score": 4.061147651325962 }, { "content": " let mut buf = BytesMut::from(data);\n\n let req = match decode(&mut buf) {\n\n Ok(Some(Request::Frame(req))) => req,\n\n _ => panic!(\"decode failed\"),\n\n };\n\n\n\n assert!(req.header.is_final);\n\n assert_eq!(req.header.opcode, Opcode::Binary);\n\n assert!(!req.header.is_masked);\n\n assert_eq!(req.header.payload_len, 0);\n\n }\n\n\n\n #[test]\n\n fn fin_bin_unmasked_small_payload() {\n\n let data = vec![0x80u8 + 0x02u8, // fin bin\n\n 0x00u8 + 0x05u8, // unmasked 5 long\n\n 1u8,\n\n 2u8,\n\n 3u8,\n\n 4u8,\n", "file_path": "src/ws_request.rs", "rank": 37, "score": 3.9798020901227407 }, { "content": " ];\n\n expected_data.extend(text.as_bytes());\n\n let ttf = new_text_frame(text, None);\n\n let mut buf = BytesMut::with_capacity(0);\n\n encode(ttf, &mut buf);\n\n assert_eq!(buf, expected_data);\n\n }\n\n\n\n #[test]\n\n fn tiny_text_frame_masked() {\n\n let text = \"blub\";\n\n let expected_start = [\n\n 0x81u8,\n\n 0x84u8\n\n ];\n\n let ttf = new_text_frame(text, Some(0x11121314));\n\n assert_eq!(ttf.clone().payload_string().unwrap(), text);\n\n let mut buf = BytesMut::with_capacity(128);\n\n encode(ttf, &mut buf);\n\n\n\n assert_eq!(buf[0], expected_start[0]);\n\n assert_eq!(buf[1], expected_start[1]);\n\n }\n\n}\n\n\n\npub type Response = Frame;\n\n\n", "file_path": "src/ws_response.rs", "rank": 38, "score": 3.6590851540201292 }, { "content": "\n\n let mut expected_data = vec![0x00u8 + 0x00u8, // continuation continuation\n\n 0x80u8 + 0x7eu8, // masked 16 bit length\n\n 0x01, // payload length = 0x01 00\n\n 0x00,\n\n 0x11, // maskingKey = 0x11121314\n\n 0x12,\n\n 0x13,\n\n 0x14];\n\n expected_data.extend(payload.iter());\n\n\n\n let mut buf = BytesMut::with_capacity(0);\n\n encode(frame, &mut buf);\n\n\n\n assert_eq!(buf, expected_data);\n\n }\n\n\n\n #[test]\n\n fn con_con_masked_large_payload() {\n\n let payload = vec![5u8; 65536]; // length 0x10000\n", "file_path": "src/ws_response.rs", "rank": 39, "score": 2.9360053605136054 }, { "content": " return Err(io::Error::new(io::ErrorKind::Other, \"not enough bytes\"));\n\n }\n\n let len = BigEndian::read_u64(&buf[2..]) as usize;\n\n (len, 10)\n\n }\n\n x => (x as usize, 2),\n\n };\n\n\n\n let (masking_key, buf_offset) = if is_masked {\n\n if buf.len() < buf_offset + 4 {\n\n return Err(io::Error::new(io::ErrorKind::Other, \"not enough bytes\"));\n\n }\n\n (BigEndian::read_u32(&buf[buf_offset..]), buf_offset + 4)\n\n } else {\n\n (0, buf_offset)\n\n };\n\n\n\n Ok(ParseResult::Complete(Header {\n\n is_final: is_final,\n\n opcode: opcode,\n\n is_masked: is_masked,\n\n payload_len: payload_len,\n\n masking_key: masking_key,\n\n },\n\n buf_offset))\n\n}\n\n\n", "file_path": "src/ws_request.rs", "rank": 40, "score": 2.8364996268271634 }, { "content": " 0x00,\n\n 0x00,\n\n 0x11, // maskingKey = 0x11121314\n\n 0x12,\n\n 0x13,\n\n 0x14];\n\n expected_data.extend(payload.iter());\n\n\n\n let mut buf = BytesMut::with_capacity(0);\n\n encode(frame, &mut buf);\n\n\n\n assert_eq!(buf, expected_data);\n\n }\n\n\n\n #[test]\n\n fn tiny_text_frame() {\n\n let text = \"blub\";\n\n let mut expected_data = vec![\n\n 0x81u8,\n\n 0x04u8\n", "file_path": "src/ws_response.rs", "rank": 41, "score": 2.765808217281369 }, { "content": " #[test]\n\n fn con_con_masked_large_payload() {\n\n let mut data = vec![0x00u8 + 0x00u8, // continuation continuation\n\n 0x80u8 + 0x7fu8, // masked 64 bit length\n\n 0x00, // payload length = 0x00 00 00 00 00 01 00 00\n\n 0x00,\n\n 0x00,\n\n 0x00,\n\n 0x00,\n\n 0x01,\n\n 0x00,\n\n 0x00,\n\n 0x11, // maskingKey = 0x11121314\n\n 0x12,\n\n 0x13,\n\n 0x14];\n\n let payload = [5u8; 65536]; // length 0x10000\n\n data.extend(payload.iter());\n\n let mut buf = BytesMut::from(data);\n\n let req = match decode(&mut buf) {\n", "file_path": "src/ws_request.rs", "rank": 42, "score": 1.9963306512812786 }, { "content": " }\n\n\n\n #[test]\n\n fn afl_crash_7() {\n\n let data = vec![0x8a, 0x7e, 0x62];\n\n let mut buf = BytesMut::from(data);\n\n let _ = decode(&mut buf);\n\n }\n\n\n\n #[test]\n\n fn afl_crash_8() {\n\n let data = vec![0xf1, 0xfe, 0xd5, 0xd5, 0xfe, 0x81];\n\n let mut buf = BytesMut::from(data);\n\n let _ = decode(&mut buf);\n\n }\n\n\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum Request {\n\n Open(),\n\n Frame(Frame),\n\n}\n\n\n", "file_path": "src/ws_request.rs", "rank": 43, "score": 1.521708167478268 } ]
Rust
src/args.rs
JoshMcguigan/waysay
7d2744d8de03d5a261ea4905efe67584ec8f07d3
#[derive(Clone)] pub struct Args { pub message: String, pub buttons: Vec<ArgButton>, pub message_type: String, pub detailed_message: bool, pub detailed_message_contents: String, } #[derive(Clone)] pub struct ArgButton { pub text: String, pub action: String, } pub fn parse(args: impl Iterator<Item = String>) -> Result<Args, String> { let mut message = None; let mut message_type = None; let mut buttons = vec![]; let mut detailed_message = false; let mut args = args.skip(1); loop { match args.next().as_deref() { Some("-m") | Some("--message") => { let message_arg = args.next(); if message_arg.is_some() { message = message_arg; } else { return Err("missing required arg message (-m/--message)".into()); } } Some("-t") | Some("--type") => { let message_type_arg = args.next(); if message_type_arg.is_some() { message_type = message_type_arg; } else { return Err("missing required arg type (-t/--type)".into()); } } Some("-l") | Some("--detailed-message") => { detailed_message = true; } Some("-b") | Some("--button") | Some("-B") | Some("--button-no-terminal") => { let text = args.next(); let action = args.next(); match (text, action) { (Some(text), Some(action)) => buttons.push(ArgButton { text, action }), (None, _) => return Err("button missing text".into()), (Some(_), None) => return Err("button missing action".into()), } } Some(arg) => return Err(format!("invalid arg '{}'", arg)), None => break, } } if let Some(message) = message { Ok(Args { message, buttons, message_type: message_type.unwrap_or_else(|| "error".into()), detailed_message, detailed_message_contents: String::new(), }) } else { Err("missing required arg message (-m/--message)".into()) } } #[cfg(test)] mod tests { use super::parse; #[test] fn no_args() { let input = vec!["waysay".into()]; assert_eq!( "missing required arg message (-m/--message)", parse(input.into_iter()).err().unwrap(), ); } #[test] fn unsupported_arg() { let input = vec!["waysay".into(), "--not-a-real-thing".into()]; assert_eq!( "invalid arg '--not-a-real-thing'", parse(input.into_iter()).err().unwrap(), ); } #[test] fn message_short_flag() { let input = vec!["waysay".into(), "-m".into(), "hello from waysay".into()]; let args = parse(input.into_iter()).unwrap(); assert_eq!("hello from waysay", args.message,); } #[test] fn message_long_flag() { let input = vec![ "waysay".into(), "--message".into(), "hello from waysay".into(), ]; let args = parse(input.into_iter()).unwrap(); assert_eq!("hello from waysay", args.message,); } }
#[derive(Clone)] pub struct Args { pub message: String, pub buttons: Vec<ArgButton>, pub message_type: String, pub detailed_message: bool, pub detailed_message_contents: String, } #[derive(Clone)] pub struct ArgButton { pub text: String, pub action: String, } pub fn parse(args: impl Iterator<Item = String>) -> Result<Args, String> { let mut message = None; let mut message_type = None; let mut buttons = vec![]; let mut detailed_message = false; let mut args = args.skip(1); loop { match args.next().as_deref() { Some("-m") | Some("--message") => { let message_arg = args.next(); if message_arg.is_some() { message = message_arg; } else { return Err("missing required arg message (-m/--message)".into()); } } Some("-t") | Some("--type") => { let message_type_arg = args.next(); if message_type_arg.is_some() { message_type = message_type_arg; } else { return Err("missing required arg type (-t/--type)".into()); } } Some("-l") | Some("--detailed-message") => { detailed_message = true; } Some("-b") | Some("--button") | Some("-B") | Some("--button-no-terminal") => { let text = args.next(); let action = args.next(); match (text, action) { (Some(text), Some(action)) => buttons.push(ArgButton { text, action }), (None, _) => return Err("button missing text".into()), (Some(_), None) => return Err("button missing action".into()), } } Some(arg) => return Err(format!("invalid arg '{}'", arg)), None => break, } } if let Some(message) = message {
} else { Err("missing required arg message (-m/--message)".into()) } } #[cfg(test)] mod tests { use super::parse; #[test] fn no_args() { let input = vec!["waysay".into()]; assert_eq!( "missing required arg message (-m/--message)", parse(input.into_iter()).err().unwrap(), ); } #[test] fn unsupported_arg() { let input = vec!["waysay".into(), "--not-a-real-thing".into()]; assert_eq!( "invalid arg '--not-a-real-thing'", parse(input.into_iter()).err().unwrap(), ); } #[test] fn message_short_flag() { let input = vec!["waysay".into(), "-m".into(), "hello from waysay".into()]; let args = parse(input.into_iter()).unwrap(); assert_eq!("hello from waysay", args.message,); } #[test] fn message_long_flag() { let input = vec![ "waysay".into(), "--message".into(), "hello from waysay".into(), ]; let args = parse(input.into_iter()).unwrap(); assert_eq!("hello from waysay", args.message,); } }
Ok(Args { message, buttons, message_type: message_type.unwrap_or_else(|| "error".into()), detailed_message, detailed_message_contents: String::new(), })
call_expression
[ { "content": "struct Surface {\n\n args: Args,\n\n surface: wl_surface::WlSurface,\n\n layer_surface: Main<zwlr_layer_surface_v1::ZwlrLayerSurfaceV1>,\n\n next_render_event: Rc<Cell<Option<RenderEvent>>>,\n\n pools: DoubleMemPool,\n\n dimensions: (u32, u32),\n\n /// X, Y coordinates of current cursor position\n\n pointer_location: Option<(f64, f64)>,\n\n /// User requested exit\n\n should_exit: bool,\n\n click_targets: Vec<ClickTarget>,\n\n font_data: Vec<u8>,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 1, "score": 25637.321509196212 }, { "content": "fn main() {\n\n let mut args = match args::parse(env::args()) {\n\n Ok(args) => args,\n\n Err(message) => {\n\n eprintln!(\"{}\", message);\n\n\n\n process::exit(1);\n\n }\n\n };\n\n\n\n if args.detailed_message {\n\n let result = io::stdin().read_to_string(&mut args.detailed_message_contents);\n\n\n\n // Don't fail if we can't read this into a string, just print a message\n\n // for debugging purposes.\n\n if let Err(e) = result {\n\n eprintln!(\"WARN: failed to read detailed message from stdin {}\", e);\n\n }\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 25637.321509196212 }, { "content": "struct ClickTarget {\n\n position: (usize, usize),\n\n size: (usize, usize),\n\n handler: ClickHandler,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 24436.21117076028 }, { "content": " let (position, size) = draw_button(\"x\".into(), &self.font_data, &mut canvas);\n\n let click_target = ClickTarget {\n\n position,\n\n size,\n\n handler: ClickHandler::Exit,\n\n };\n\n self.click_targets.push(click_target);\n\n\n\n for button in self.args.buttons.iter().cloned() {\n\n let (position, size) = draw_button(button.text, &self.font_data, &mut canvas);\n\n let click_target = ClickTarget {\n\n position,\n\n size,\n\n handler: ClickHandler::RunCommand(button.action),\n\n };\n\n self.click_targets.push(click_target);\n\n }\n\n\n\n // Draw message\n\n let text = text::Text::new(\n", "file_path": "src/main.rs", "rank": 9, "score": 8.407642066932118 }, { "content": " font_data,\n\n }\n\n }\n\n\n\n /// Handles any events that have occurred since the last call, redrawing if needed.\n\n /// Returns true if the surface should be dropped.\n\n fn handle_events(&mut self) -> bool {\n\n match self.next_render_event.take() {\n\n Some(RenderEvent::Closed) => true,\n\n Some(RenderEvent::Configure { width, height }) => {\n\n self.dimensions = (width, height);\n\n self.draw();\n\n false\n\n }\n\n None => self.should_exit,\n\n }\n\n }\n\n\n\n fn handle_pointer_event(&mut self, event: &wl_pointer::Event) {\n\n match event {\n", "file_path": "src/main.rs", "rank": 10, "score": 8.241355315996069 }, { "content": " }\n\n }\n\n\n\n match matching_click_handler {\n\n Some(ClickHandler::Exit) => self.should_exit = true,\n\n Some(ClickHandler::RunCommand(cmd)) => {\n\n match Command::new(\"/bin/sh\").arg(\"-c\").arg(cmd).spawn() {\n\n Ok(_) => (),\n\n Err(e) => eprintln!(\"{:?}\", e),\n\n }\n\n }\n\n None => {}\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n fn draw(&mut self) {\n\n let pool = match self.pools.pool() {\n", "file_path": "src/main.rs", "rank": 11, "score": 8.080628894200702 }, { "content": " height as usize,\n\n 4 * width as usize,\n\n andrew::Endian::native(),\n\n );\n\n\n\n // Draw background\n\n let block = rectangle::Rectangle::new(\n\n (0, 0),\n\n (width as usize, height as usize),\n\n None,\n\n Some([255, 200, 0, 0]),\n\n );\n\n canvas.draw(&block);\n\n\n\n // Draw buttons\n\n let mut right_most_pixel = width as usize;\n\n\n\n let mut draw_button = move |text: String, font_data: &[u8], canvas: &mut Canvas| {\n\n let mut text = text::Text::new((0, 0), FONT_COLOR, font_data, text_h, 1.0, text);\n\n let text_width = text.get_width();\n", "file_path": "src/main.rs", "rank": 12, "score": 7.880791467762263 }, { "content": " }\n\n }\n\n\n\n // Return early here if all surface are gone, otherwise the event loop\n\n // dispatch will panic with an error about not handling an event.\n\n if surfaces.borrow().is_empty() {\n\n return;\n\n }\n\n\n\n display.flush().unwrap();\n\n event_loop.dispatch(None, &mut ()).unwrap();\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 13, "score": 6.5269520508949785 }, { "content": " Some(pool) => pool,\n\n None => return,\n\n };\n\n\n\n let stride = 4 * self.dimensions.0 as i32;\n\n let width = self.dimensions.0 as i32;\n\n let height = self.dimensions.1 as i32;\n\n\n\n let vertical_padding = 2;\n\n let horizontal_padding = 10;\n\n let text_h = height as f32 / 2.;\n\n let text_hh = text_h / 2.;\n\n\n\n // First make sure the pool is the right size\n\n pool.resize((stride * height) as usize).unwrap();\n\n\n\n let mut buf: Vec<u8> = vec![255; (4 * width * height) as usize];\n\n let mut canvas = andrew::Canvas::new(\n\n &mut buf,\n\n width as usize,\n", "file_path": "src/main.rs", "rank": 14, "score": 6.293556092083796 }, { "content": " wl_pointer::Event::Enter {\n\n surface_x,\n\n surface_y,\n\n ..\n\n }\n\n | wl_pointer::Event::Motion {\n\n surface_x,\n\n surface_y,\n\n ..\n\n } => self.pointer_location = Some((*surface_x, *surface_y)),\n\n wl_pointer::Event::Button {\n\n state: ButtonState::Pressed,\n\n ..\n\n } => {\n\n let mut matching_click_handler = None;\n\n for click_target in &self.click_targets {\n\n if let Some(click_position) = self.pointer_location {\n\n if let Some(handler) = click_target.process_click(click_position) {\n\n matching_click_handler = Some(handler);\n\n }\n", "file_path": "src/main.rs", "rank": 15, "score": 5.341237335810407 }, { "content": " let button_width = text_width + 2 * horizontal_padding;\n\n let block_height = height as usize - vertical_padding * 2;\n\n let block_pos = (\n\n right_most_pixel as usize - button_width - horizontal_padding,\n\n vertical_padding,\n\n );\n\n let text_pos = (\n\n block_pos.0 + horizontal_padding,\n\n ((block_height as f32 - text_h) / 2.) as usize,\n\n );\n\n text.pos = text_pos;\n\n let size = (button_width as usize, block_height as usize);\n\n let block = rectangle::Rectangle::new(block_pos, size, None, Some([255, 100, 0, 0]));\n\n canvas.draw(&block);\n\n canvas.draw(&text);\n\n\n\n right_most_pixel = block_pos.0;\n\n (block_pos, size)\n\n };\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 4.808742526363574 }, { "content": " // .get_regular_family_fonts(\"monospace\")\n\n .get_fonts()\n\n .unwrap()\n\n .pop()\n\n .expect(\"should find at least one font\"),\n\n )\n\n .unwrap()\n\n .read_to_end(&mut font_data)\n\n .unwrap();\n\n\n\n Self {\n\n args,\n\n surface,\n\n layer_surface,\n\n next_render_event,\n\n pools,\n\n dimensions: (0, 0),\n\n pointer_location: None,\n\n should_exit: false,\n\n click_targets: vec![],\n", "file_path": "src/main.rs", "rank": 17, "score": 4.683837376561671 }, { "content": " env.listen_for_outputs(move |output, info, _| output_handler(output, info));\n\n\n\n let mut event_loop = calloop::EventLoop::<()>::new().unwrap();\n\n\n\n WaylandSource::new(queue)\n\n .quick_insert(event_loop.handle())\n\n .unwrap();\n\n\n\n loop {\n\n // This is ugly, let's hope that some version of drain_filter() gets stabilized soon\n\n // https://github.com/rust-lang/rust/issues/43244\n\n {\n\n let mut surfaces = surfaces.borrow_mut();\n\n let mut i = 0;\n\n while i != surfaces.len() {\n\n if surfaces[i].1.handle_events() {\n\n surfaces.remove(i);\n\n } else {\n\n i += 1;\n\n }\n", "file_path": "src/main.rs", "rank": 18, "score": 4.0504003518070855 }, { "content": " (horizontal_padding, height as usize / 2 - text_hh as usize),\n\n FONT_COLOR,\n\n &self.font_data,\n\n text_h,\n\n 1.0,\n\n &self.args.message,\n\n );\n\n canvas.draw(&text);\n\n\n\n pool.seek(SeekFrom::Start(0)).unwrap();\n\n pool.write_all(canvas.buffer).unwrap();\n\n pool.flush().unwrap();\n\n\n\n // Create a new buffer from the pool\n\n let buffer = pool.buffer(0, width, height, stride, wl_shm::Format::Argb8888);\n\n\n\n // Attach the buffer to the surface and mark the entire surface as damaged\n\n self.surface.attach(Some(&buffer), 0, 0);\n\n self.surface\n\n .damage_buffer(0, 0, width as i32, height as i32);\n", "file_path": "src/main.rs", "rank": 19, "score": 3.6713001447484888 }, { "content": "# waysay\n\n\n\n![image](https://user-images.githubusercontent.com/22216761/85361418-b6b90f00-b4d0-11ea-9beb-6ffc34f26253.png)\n\n\n\nwaysay is a native wayland client for providing desktop notifications. It aims to be a drop in replacement for swaynag.\n\n\n\n## Usage\n\n\n\n```bash\n\nwaysay --message \"Hello, world!\"\n\n\n\n# add buttons to allow the user to trigger an action\n\nwaysay --message \"Do it?\" \\\n\n --button \"Yes\" \"echo 'I did it'\" \\\n\n --button \"No\" \"echo 'I did not do it'\"\n\n```\n\n\n\nTo use waysay as a swaynag replacement, add the following line to your sway config:\n\n\n\n```\n\nswaynag_command waysay\n\n```\n\n\n\n## waysay vs swaynag\n\n\n\nMost users will be better off using swaynag. Use waysay if you are interested in using Rust to write native wayland clients, and/or you want to support the Rust wayland/GUI ecosystem.\n\n\n\n#### Missing features\n\n\n\nSeveral swaynag features have not yet been implemented:\n\n\n\n* Display of detailed message\n\n* configuration and theming\n\n\n\nOverall waysay is quite rough around the edges at this point, although it is usable.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 20, "score": 3.4185761173030245 }, { "content": "\n\n // Finally, commit the surface\n\n self.surface.commit();\n\n }\n\n}\n\n\n\nimpl Drop for Surface {\n\n fn drop(&mut self) {\n\n self.layer_surface.destroy();\n\n self.surface.destroy();\n\n }\n\n}\n\n\n\nimpl ClickTarget {\n\n fn process_click(&self, click_position: (f64, f64)) -> Option<ClickHandler> {\n\n let (click_x, click_y) = click_position;\n\n let (position_x, position_y) = (self.position.0 as f64, self.position.1 as f64);\n\n let (size_x, size_y) = (self.size.0 as f64, self.size.1 as f64);\n\n\n\n if click_x >= position_x\n", "file_path": "src/main.rs", "rank": 21, "score": 2.853936987259141 }, { "content": " // TODO\n\n // handle type warn vs error\n\n\n\n let (env, display, queue) =\n\n init_default_environment!(Env, fields = [layer_shell: SimpleGlobal::new(),])\n\n .expect(\"Initial roundtrip failed!\");\n\n\n\n let surfaces = Rc::new(RefCell::new(Vec::new()));\n\n\n\n let layer_shell = env.require_global::<zwlr_layer_shell_v1::ZwlrLayerShellV1>();\n\n\n\n let env_handle = env.clone();\n\n let surfaces_handle = Rc::clone(&surfaces);\n\n let output_handler = move |output: wl_output::WlOutput, info: &OutputInfo| {\n\n if info.obsolete {\n\n // an output has been removed, release it\n\n surfaces_handle.borrow_mut().retain(|(i, _)| *i != info.id);\n\n output.release();\n\n } else {\n\n // an output has been created, construct a surface for it\n", "file_path": "src/main.rs", "rank": 22, "score": 2.3865353210145916 }, { "content": "use andrew::{\n\n shapes::rectangle,\n\n text::{self, fontconfig},\n\n Canvas,\n\n};\n\n\n\nuse smithay_client_toolkit::{\n\n default_environment,\n\n environment::SimpleGlobal,\n\n init_default_environment,\n\n output::{with_output_info, OutputInfo},\n\n reexports::{\n\n calloop,\n\n client::protocol::{\n\n wl_output,\n\n wl_pointer::{self, ButtonState},\n\n wl_shm, wl_surface,\n\n },\n\n client::{Attached, Main},\n\n protocols::wlr::unstable::layer_shell::v1::client::{\n", "file_path": "src/main.rs", "rank": 23, "score": 2.1919780429864124 }, { "content": " serial,\n\n width,\n\n height,\n\n },\n\n next,\n\n ) if next != Some(RenderEvent::Closed) => {\n\n layer_surface.ack_configure(serial);\n\n next_render_event_handle.set(Some(RenderEvent::Configure { width, height }));\n\n }\n\n (_, _) => {}\n\n }\n\n });\n\n\n\n // Commit so that the server will send a configure event\n\n surface.commit();\n\n\n\n let mut font_data = Vec::new();\n\n std::fs::File::open(\n\n &fontconfig::FontConfig::new()\n\n .expect(\"failed to find font config file\")\n", "file_path": "src/main.rs", "rank": 24, "score": 2.0694089489447096 }, { "content": " zwlr_layer_shell_v1, zwlr_layer_surface_v1,\n\n },\n\n },\n\n seat,\n\n shm::DoubleMemPool,\n\n WaylandSource,\n\n};\n\n\n\nuse std::{\n\n cell::{Cell, RefCell},\n\n env,\n\n io::{self, Read, Seek, SeekFrom, Write},\n\n process::{self, Command},\n\n rc::Rc,\n\n};\n\n\n\nmod args;\n\nuse args::Args;\n\n\n\nconst FONT_COLOR: [u8; 4] = [255, 255, 255, 255];\n", "file_path": "src/main.rs", "rank": 25, "score": 1.9750895801856463 }, { "content": " && click_x < position_x + size_x\n\n && click_y >= position_y\n\n && click_y < position_y + size_y\n\n {\n\n Some(self.handler.clone())\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 26, "score": 1.7819260652736533 }, { "content": " let surface = env_handle.create_surface().detach();\n\n let pools = env_handle\n\n .create_double_pool(|_| {})\n\n .expect(\"Failed to create a memory pool!\");\n\n (*surfaces_handle.borrow_mut()).push((\n\n info.id,\n\n Surface::new(args.clone(), &output, surface, &layer_shell.clone(), pools),\n\n ));\n\n }\n\n };\n\n\n\n for seat in env.get_all_seats() {\n\n if let Some(has_ptr) = seat::with_seat_data(&seat, |seat_data| {\n\n seat_data.has_pointer && !seat_data.defunct\n\n }) {\n\n if has_ptr {\n\n let pointer = seat.get_pointer();\n\n // let surface = window.surface().clone();\n\n let surfaces_handle = surfaces.clone();\n\n pointer.quick_assign(move |_, event, _| {\n", "file_path": "src/main.rs", "rank": 27, "score": 1.658152471061224 }, { "content": " );\n\n\n\n let height = 32;\n\n layer_surface.set_size(0, height);\n\n layer_surface.set_anchor(\n\n zwlr_layer_surface_v1::Anchor::Top\n\n | zwlr_layer_surface_v1::Anchor::Left\n\n | zwlr_layer_surface_v1::Anchor::Right,\n\n );\n\n layer_surface.set_exclusive_zone(height as i32);\n\n\n\n let next_render_event = Rc::new(Cell::new(None::<RenderEvent>));\n\n let next_render_event_handle = Rc::clone(&next_render_event);\n\n layer_surface.quick_assign(move |layer_surface, event, _| {\n\n match (event, next_render_event_handle.get()) {\n\n (zwlr_layer_surface_v1::Event::Closed, _) => {\n\n next_render_event_handle.set(Some(RenderEvent::Closed));\n\n }\n\n (\n\n zwlr_layer_surface_v1::Event::Configure {\n", "file_path": "src/main.rs", "rank": 28, "score": 1.4920824997861386 }, { "content": " for surface in (*surfaces_handle).borrow_mut().iter_mut() {\n\n // We should be filtering this down so we only pass\n\n // the event on to the appropriate surface. TODO\n\n surface.1.handle_pointer_event(&event);\n\n }\n\n });\n\n }\n\n }\n\n }\n\n\n\n // Process currently existing outputs\n\n for output in env.get_all_outputs() {\n\n if let Some(info) = with_output_info(&output, Clone::clone) {\n\n output_handler(output, &info);\n\n }\n\n }\n\n\n\n // Setup a listener for changes\n\n // The listener will live for as long as we keep this handle alive\n\n let _listner_handle =\n", "file_path": "src/main.rs", "rank": 29, "score": 1.463349515974941 } ]
Rust
src/application/engine.rs
hoangpq/crayon
f37a5e8c23b7c1d12583e585012960ab0ee922fb
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use crate::sched::prelude::LatchProbe; use crate::window::prelude::{Event, EventListener, EventListenerHandle, WindowEvent}; use super::lifecycle::LifecycleListener; use super::Params; type Result<T> = ::std::result::Result<T, ::failure::Error>; pub struct EngineSystem { events: EventListenerHandle, state: Arc<EngineState>, headless: bool, } struct EngineState { alive: AtomicBool, } impl EventListener for Arc<EngineState> { fn on(&mut self, v: &Event) -> Result<()> { if let Event::Window(WindowEvent::Closed) = *v { self.alive.store(false, Ordering::Relaxed); } Ok(()) } } impl Drop for EngineSystem { fn drop(&mut self) { crate::window::detach(self.events); unsafe { crate::res::discard(); crate::input::discard(); crate::video::discard(); crate::window::discard(); crate::sched::discard(); } } } impl EngineSystem { pub unsafe fn new(params: Params) -> Result<Self> { #[cfg(not(target_arch = "wasm32"))] crate::sched::setup(4, None, None); #[cfg(target_arch = "wasm32")] crate::sched::setup(0, None, None); crate::window::setup(params.window)?; crate::video::setup()?; crate::input::setup(params.input); crate::res::setup(params.res)?; let state = Arc::new(EngineState { alive: AtomicBool::new(true), }); let sys = EngineSystem { events: crate::window::attach(state.clone()), state, headless: false, }; Ok(sys) } pub unsafe fn new_headless(params: Params) -> Result<Self> { #[cfg(not(target_arch = "wasm32"))] crate::sched::setup(4, None, None); #[cfg(target_arch = "wasm32")] crate::sched::setup(0, None, None); crate::window::headless(); crate::video::headless(); crate::input::setup(params.input); crate::res::setup(params.res)?; let state = Arc::new(EngineState { alive: AtomicBool::new(false), }); let sys = EngineSystem { events: crate::window::attach(state.clone()), state, headless: true, }; Ok(sys) } #[inline] pub fn shutdown(&self) { self.state.alive.store(false, Ordering::Relaxed); } #[inline] pub fn headless(&self) -> bool { self.headless } pub fn run_oneshot(&self) -> Result<()> { super::foreach(|v| v.on_pre_update())?; super::foreach(|v| v.on_update())?; super::foreach(|v| v.on_render())?; super::foreach_rev(|v| v.on_post_update())?; Ok(()) } pub fn run<L, T, T2>(&self, latch: L, closure: T) -> Result<()> where L: LatchProbe + 'static, T: FnOnce() -> Result<T2> + 'static, T2: LifecycleListener + Send + 'static, { let state = self.state.clone(); let mut closure = Some(closure); super::sys::run_forever( move || { super::foreach(|v| v.on_pre_update())?; super::foreach_rev(|v| v.on_post_update())?; Ok(!latch.is_set()) }, move || { let mut v = None; std::mem::swap(&mut closure, &mut v); let application = crate::application::attach(v.unwrap()()?); let state = state.clone(); super::sys::run_forever( move || { super::foreach(|v| v.on_pre_update())?; super::foreach(|v| v.on_update())?; super::foreach(|v| v.on_render())?; super::foreach_rev(|v| v.on_post_update())?; Ok(state.alive.load(Ordering::Relaxed)) }, move || { unsafe { crate::sched::terminate() }; crate::application::detach(application); unsafe { super::late_discard() }; Ok(()) }, )?; Ok(()) }, )?; Ok(()) } }
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use crate::sched::prelude::LatchProbe; use crate::window::prelude::{Event, EventListener, EventListenerHandle, WindowEvent}; use super::lifecycle::LifecycleListener; use super::Params; type Result<T> = ::std::result::Result<T, ::failure::Error>; pub struct EngineSystem { events: EventListenerHandle, state: Arc<EngineState>, headless: bool, } struct EngineState { alive: AtomicBool, } impl EventListener for Arc<EngineState> { fn on(&mut self, v: &Event) -> Result<()> { if let Event::Window(WindowEvent::Closed) = *v { self.alive.store(false, Ordering::Relaxed); } Ok(()) } } impl Drop for EngineSystem { fn drop(&mut self) { crate::window::detach(self.events); unsafe { crate::res::discard(); crate::input::discard(); crate::video::discard(); crate::window::discard(); crate::sched::discard(); } } } impl EngineSystem { pub unsafe fn new(params: Params) -> Result<Self> { #[cfg(not(target_arch = "wasm32"))] crate::sched::setup(4, None, None); #[cfg(target_arch = "wasm32")] crate::sched::setup(0, None, None); crate::window::setup(params.window)?; crate::video::setup()?; crate::input::setup(params.input); crate::res::setup(params.res)?; let state = Arc::new(EngineState { alive: AtomicBool::new(true), }); let sys = EngineSystem { events: crate::window::attach(state.clone()), state, headless: false, }; Ok(sys) }
#[inline] pub fn shutdown(&self) { self.state.alive.store(false, Ordering::Relaxed); } #[inline] pub fn headless(&self) -> bool { self.headless } pub fn run_oneshot(&self) -> Result<()> { super::foreach(|v| v.on_pre_update())?; super::foreach(|v| v.on_update())?; super::foreach(|v| v.on_render())?; super::foreach_rev(|v| v.on_post_update())?; Ok(()) } pub fn run<L, T, T2>(&self, latch: L, closure: T) -> Result<()> where L: LatchProbe + 'static, T: FnOnce() -> Result<T2> + 'static, T2: LifecycleListener + Send + 'static, { let state = self.state.clone(); let mut closure = Some(closure); super::sys::run_forever( move || { super::foreach(|v| v.on_pre_update())?; super::foreach_rev(|v| v.on_post_update())?; Ok(!latch.is_set()) }, move || { let mut v = None; std::mem::swap(&mut closure, &mut v); let application = crate::application::attach(v.unwrap()()?); let state = state.clone(); super::sys::run_forever( move || { super::foreach(|v| v.on_pre_update())?; super::foreach(|v| v.on_update())?; super::foreach(|v| v.on_render())?; super::foreach_rev(|v| v.on_post_update())?; Ok(state.alive.load(Ordering::Relaxed)) }, move || { unsafe { crate::sched::terminate() }; crate::application::detach(application); unsafe { super::late_discard() }; Ok(()) }, )?; Ok(()) }, )?; Ok(()) } }
pub unsafe fn new_headless(params: Params) -> Result<Self> { #[cfg(not(target_arch = "wasm32"))] crate::sched::setup(4, None, None); #[cfg(target_arch = "wasm32")] crate::sched::setup(0, None, None); crate::window::headless(); crate::video::headless(); crate::input::setup(params.input); crate::res::setup(params.res)?; let state = Arc::new(EngineState { alive: AtomicBool::new(false), }); let sys = EngineSystem { events: crate::window::attach(state.clone()), state, headless: true, }; Ok(sys) }
function_block-full_function
[ { "content": "#[inline]\n\npub fn headless() -> bool {\n\n ctx().headless()\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 0, "score": 275338.7219091743 }, { "content": "#[inline]\n\npub fn create_surface(params: SurfaceParams) -> Result<SurfaceHandle> {\n\n ctx().create_surface(params)\n\n}\n\n\n\n/// Gets the `SurfaceParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 1, "score": 216953.8458249544 }, { "content": "pub fn from_event(source: glutin::Event, dimensions: Vector2<u32>) -> Option<Event> {\n\n match source {\n\n glutin::Event::WindowEvent { event, .. } => from_window_event(&event, dimensions),\n\n\n\n glutin::Event::Awakened => Some(Event::Window(WindowEvent::Awakened)),\n\n\n\n glutin::Event::Suspended(v) => if v {\n\n Some(Event::Window(WindowEvent::Suspended))\n\n } else {\n\n Some(Event::Window(WindowEvent::Resumed))\n\n },\n\n\n\n glutin::Event::DeviceEvent { .. } => None,\n\n }\n\n}\n\n\n", "file_path": "src/window/backends/glutin/types.rs", "rank": 2, "score": 210493.80809522507 }, { "content": "pub fn new(params: WindowParams) -> Result<Box<Visitor>> {\n\n let visitor = self::visitor::GlutinVisitor::from(params)?;\n\n Ok(Box::new(visitor))\n\n}\n", "file_path": "src/window/backends/glutin/mod.rs", "rank": 3, "score": 209791.77468534422 }, { "content": "pub fn new(params: WindowParams) -> Result<Box<Visitor>> {\n\n let visitor = visitor::WebVisitor::new(params)?;\n\n Ok(Box::new(visitor))\n\n}\n", "file_path": "src/window/backends/web/mod.rs", "rank": 4, "score": 209791.7746853442 }, { "content": "#[inline]\n\npub fn create_render_texture(params: RenderTextureParams) -> Result<RenderTextureHandle> {\n\n ctx().create_render_texture(params)\n\n}\n\n\n\n/// Gets the `RenderTextureParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 5, "score": 207224.59379792953 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n// /// Blocks current thread until latch is set. Try to keep busy by popping and stealing jobs\n\n// /// as necessary.\n\n// #[inline]\n\n// pub fn wait_until<T>(latch: &T)\n\n// where\n\n// T: LatchWaitProbe,\n\n// {\n\n// ctx().wait_until(latch);\n\n// }\n\n\n", "file_path": "src/sched/mod.rs", "rank": 6, "score": 203788.79520726227 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !LIFECYCLE_CTX.is_null() }\n\n}\n\n\n\n/// Checks if the engine is running in headless mode.\n", "file_path": "src/application/mod.rs", "rank": 7, "score": 203788.79520726227 }, { "content": "#[inline]\n\npub fn is_current() -> bool {\n\n ctx().is_current()\n\n}\n\n\n\n/// Returns the position of the lower-left hand corner of the window relative to the lower-left\n\n/// hand corner of the desktop. Note that the lower-left hand corner of the desktop is not\n\n/// necessarily the same as the screen. If the user uses a desktop with multiple monitors,\n\n/// the lower-left hand corner of the desktop is the lower-left hand corner of the monitor at\n\n/// the lower-left of the desktop.\n\n///\n\n/// The coordinates can be negative if the lower-left hand corner of the window is outside of\n\n/// the visible screen region.\n", "file_path": "src/window/mod.rs", "rank": 8, "score": 203788.79520726227 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n/// Resolve shortcuts in the provided string recursively and return None if not exists.\n", "file_path": "src/res/mod.rs", "rank": 9, "score": 203788.79520726227 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n/// Reset input to initial states.\n", "file_path": "src/input/mod.rs", "rank": 10, "score": 203788.79520726227 }, { "content": "#[doc(hidden)]\n\npub fn oneshot() -> Result<()> {\n\n unsafe {\n\n debug_assert!(LIFECYCLE_CTX.is_null(), \"duplicated setup of crayon.\");\n\n\n\n let params = Params::default();\n\n\n\n sys::init();\n\n LIFECYCLE_CTX = Box::into_raw(Box::new(LifecycleSystem::new()));\n\n TIME_CTX = Box::into_raw(Box::new(TimeSystem::new(&params)));\n\n CTX = Box::into_raw(Box::new(EngineSystem::new_headless(params)?));\n\n\n\n ctx().run_oneshot()\n\n }\n\n}\n\n\n\n/// Discard the core system.\n", "file_path": "src/application/mod.rs", "rank": 11, "score": 203441.07449231576 }, { "content": "/// Setup the core system.\n\npub fn setup<T, T2>(mut params: Params, closure: T) -> Result<()>\n\nwhere\n\n T: FnOnce() -> Result<T2> + 'static,\n\n T2: LifecycleListener + Send + 'static,\n\n{\n\n unsafe {\n\n debug_assert!(LIFECYCLE_CTX.is_null(), \"duplicated setup of crayon.\");\n\n\n\n sys::init();\n\n params.validate();\n\n\n\n let dirs = params.res.dirs.clone();\n\n LIFECYCLE_CTX = Box::into_raw(Box::new(LifecycleSystem::new()));\n\n TIME_CTX = Box::into_raw(Box::new(TimeSystem::new(&params)));\n\n\n\n if std::env::args().any(|v| v == \"headless\") {\n\n CTX = Box::into_raw(Box::new(EngineSystem::new_headless(params)?));\n\n } else {\n\n CTX = Box::into_raw(Box::new(EngineSystem::new(params)?));\n\n };\n\n\n\n let latch = crate::res::load_manifests(dirs)?;\n\n ctx().run(latch, closure)\n\n }\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 12, "score": 200700.171174575 }, { "content": "#[inline]\n\npub fn has_touchpad_attached() -> bool {\n\n ctx().has_touchpad_attached()\n\n}\n\n\n\n/// Checks if the `n`th finger is touched during last frame.\n", "file_path": "src/input/mod.rs", "rank": 13, "score": 200174.7836475894 }, { "content": "#[inline]\n\npub fn has_mouse_attached() -> bool {\n\n ctx().has_mouse_attached()\n\n}\n\n\n\n/// Checks if a mouse buttoAn is held down.\n", "file_path": "src/input/mod.rs", "rank": 14, "score": 200174.7836475894 }, { "content": "#[inline]\n\npub fn has_keyboard_attached() -> bool {\n\n ctx().has_keyboard_attached()\n\n}\n\n\n\n/// Checks if a key is currently held down.\n", "file_path": "src/input/mod.rs", "rank": 15, "score": 200174.7836475894 }, { "content": "#[inline]\n\npub fn make_current() -> Result<()> {\n\n ctx().make_current()\n\n}\n\n\n\n/// Returns true if this context is the current one in this thread.\n", "file_path": "src/window/mod.rs", "rank": 16, "score": 199829.86169429403 }, { "content": "#[inline]\n\npub fn play<T>(params: T) -> Result<AudioSourceHandle>\n\nwhere\n\n T: Into<AudioSource>,\n\n{\n\n ctx().play(params)\n\n}\n\n\n\n/// Stops a played audio source.\n", "file_path": "modules/audio/src/lib.rs", "rank": 17, "score": 197788.6864621225 }, { "content": "#[inline]\n\npub fn create_shader(params: ShaderParams, vs: String, fs: String) -> Result<ShaderHandle> {\n\n ctx().create_shader(params, vs, fs)\n\n}\n\n\n\n/// Gets the `ShaderParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 18, "score": 191399.14319804293 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n info!(\"Create headless audio mixer.\",);\n\n\n\n Builder::new()\n\n .name(\"Audio\".into())\n\n .spawn(move || {\n\n //\n\n loop {\n\n {\n\n let mut rx = rx.write().unwrap();\n\n rx.clear();\n\n }\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n }\n\n }).expect(\"Failed to create thread for `AudioSystem`.\");\n\n\n\n Ok(())\n\n}\n", "file_path": "modules/audio/src/mixer/headless.rs", "rank": 19, "score": 191109.64342947022 }, { "content": "#[inline]\n\npub fn exists(uuid: Uuid) -> bool {\n\n ctx().exists(uuid)\n\n}\n\n\n\n/// Loads file asynchronously with response callback.\n", "file_path": "src/res/mod.rs", "rank": 20, "score": 186308.25271101377 }, { "content": "#[inline]\n\npub fn is_finger_touched(n: usize) -> bool {\n\n ctx().is_finger_touched(n)\n\n}\n\n\n\n/// Gets the position of the `n`th touched finger.\n", "file_path": "src/input/mod.rs", "rank": 21, "score": 186308.25271101377 }, { "content": "#[inline]\n\npub fn is_key_down(key: Key) -> bool {\n\n ctx().is_key_down(key)\n\n}\n\n\n\n/// Checks if a key has been pressed down during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 22, "score": 186308.25271101377 }, { "content": "#[inline]\n\npub fn is_key_repeat(key: Key) -> bool {\n\n ctx().is_key_repeat(key)\n\n}\n\n\n\n/// Gets captured text during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 23, "score": 183088.91803145641 }, { "content": "#[inline]\n\npub fn is_key_press(key: Key) -> bool {\n\n ctx().is_key_press(key)\n\n}\n\n\n\n/// Checks if a key has been released during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 24, "score": 183088.91803145641 }, { "content": "#[inline]\n\npub fn is_key_release(key: Key) -> bool {\n\n ctx().is_key_release(key)\n\n}\n\n\n\n/// Checks if a key has been repeated during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 25, "score": 183088.91803145641 }, { "content": "#[inline]\n\npub fn is_mouse_down(button: MouseButton) -> bool {\n\n ctx().is_mouse_down(button)\n\n}\n\n\n\n/// Checks if a mouse button has been pressed during last frame.\n", "file_path": "src/input/mod.rs", "rank": 26, "score": 183088.91803145641 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn new() -> Result<Box<Visitor>> {\n\n let visitor = unsafe { webgl::visitor::WebGLVisitor::new()? };\n\n Ok(Box::new(visitor))\n\n}\n\n\n", "file_path": "src/video/backends/mod.rs", "rank": 27, "score": 182787.04860803107 }, { "content": "pub fn white() -> Result<TextureHandle> {\n\n let mut params = TextureParams::default();\n\n params.dimensions = (2, 2).into();\n\n\n\n let bytes = vec![255; 16];\n\n let data = TextureData {\n\n bytes: vec![bytes.into_boxed_slice()],\n\n };\n\n\n\n let texture = video::create_texture(params, data)?;\n\n Ok(texture)\n\n}\n", "file_path": "modules/world/src/assets/texture_builder.rs", "rank": 28, "score": 181705.54146875226 }, { "content": "pub fn quad() -> Result<MeshHandle> {\n\n let verts: [Vertex; 4] = [\n\n Vertex::new([-0.5, -0.5, 0.0], [0.0, 0.0, -1.0], [0.0, 0.0]),\n\n Vertex::new([0.5, -0.5, 0.0], [0.0, 0.0, -1.0], [1.0, 0.0]),\n\n Vertex::new([0.5, 0.5, 0.0], [0.0, 0.0, -1.0], [1.0, 1.0]),\n\n Vertex::new([-0.5, 0.5, 0.0], [0.0, 0.0, -1.0], [0.0, 1.0]),\n\n ];\n\n\n\n let idxes: [u16; 6] = [0, 1, 2, 0, 2, 3];\n\n\n\n let mut params = MeshParams::default();\n\n params.num_verts = verts.len();\n\n params.num_idxes = idxes.len();\n\n params.layout = Vertex::layout();\n\n\n\n let data = MeshData {\n\n vptr: Vertex::encode(&verts[..]).into(),\n\n iptr: IndexFormat::encode(&idxes).into(),\n\n };\n\n\n\n let mesh = video::create_mesh(params, Some(data))?;\n\n Ok(mesh)\n\n}\n\n\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 29, "score": 181705.54146875226 }, { "content": "pub fn cube() -> Result<MeshHandle> {\n\n let texcoords = [[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0]];\n\n\n\n let points = [\n\n [-0.5, -0.5, 0.5],\n\n [0.5, -0.5, 0.5],\n\n [0.5, 0.5, 0.5],\n\n [-0.5, 0.5, 0.5],\n\n [-0.5, -0.5, -0.5],\n\n [0.5, -0.5, -0.5],\n\n [0.5, 0.5, -0.5],\n\n [-0.5, 0.5, -0.5],\n\n ];\n\n\n\n let normals = [\n\n [0.0, 0.0, 1.0],\n\n [1.0, 0.0, 0.0],\n\n [0.0, 0.0, -1.0],\n\n [-1.0, 0.0, 0.0],\n\n [0.0, 1.0, 0.0],\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 30, "score": 181705.54146875226 }, { "content": "#[inline]\n\npub fn is_mouse_press(button: MouseButton) -> bool {\n\n ctx().is_mouse_press(button)\n\n}\n\n\n\n/// Checks if a mouse button has been released during last frame.\n", "file_path": "src/input/mod.rs", "rank": 31, "score": 180043.10491287604 }, { "content": "#[inline]\n\npub fn is_mouse_release(button: MouseButton) -> bool {\n\n ctx().is_mouse_release(button)\n\n}\n\n\n\n/// Checks if a mouse button has been clicked during last frame.\n", "file_path": "src/input/mod.rs", "rank": 32, "score": 180043.10491287604 }, { "content": "#[inline]\n\npub fn is_mouse_click(button: MouseButton) -> bool {\n\n ctx().is_mouse_click(button)\n\n}\n\n\n\n/// Checks if a mouse button has been double clicked during last frame.\n", "file_path": "src/input/mod.rs", "rank": 33, "score": 180043.10491287604 }, { "content": "#[inline]\n\npub fn is_mouse_double_click(button: MouseButton) -> bool {\n\n ctx().is_mouse_double_click(button)\n\n}\n\n\n\n/// Gets the mouse position relative to the lower-left hand corner of the window.\n", "file_path": "src/input/mod.rs", "rank": 34, "score": 177157.15234677127 }, { "content": "#[inline]\n\npub fn create_mesh<T>(params: MeshParams, data: T) -> CrResult<MeshHandle>\n\nwhere\n\n T: Into<Option<MeshData>>,\n\n{\n\n ctx().create_mesh(params, data)\n\n}\n\n\n\n/// Creates a mesh object from file asynchronously.\n", "file_path": "src/video/mod.rs", "rank": 35, "score": 173848.1815970789 }, { "content": "#[inline]\n\npub fn create_texture<T>(params: TextureParams, data: T) -> CrResult<TextureHandle>\n\nwhere\n\n T: Into<Option<TextureData>>,\n\n{\n\n ctx().create_texture(params, data)\n\n}\n\n\n\n/// Creates a texture object from file asynchronously.\n", "file_path": "src/video/mod.rs", "rank": 36, "score": 173848.1815970789 }, { "content": "#[inline]\n\npub fn create_prefab(prefab: Prefab) -> Result<PrefabHandle> {\n\n ctx().create_prefab(prefab)\n\n}\n\n\n\n/// Create a prefab object from file asynchronously.\n\n///\n\n/// A prefab asset acts as a template from which you can create new entity instances\n\n/// in the world. It stores a entity and its children complete with components and\n\n/// properties internally.\n", "file_path": "modules/world/src/lib.rs", "rank": 37, "score": 169859.2240391117 }, { "content": "fn from_touch_state(state: glutin::TouchPhase) -> TouchState {\n\n match state {\n\n glutin::TouchPhase::Started => TouchState::Start,\n\n glutin::TouchPhase::Moved => TouchState::Move,\n\n glutin::TouchPhase::Ended => TouchState::End,\n\n glutin::TouchPhase::Cancelled => TouchState::Cancel,\n\n }\n\n}\n\n\n\nimpl From<glutin::MouseButton> for MouseButton {\n\n fn from(mouse: glutin::MouseButton) -> Self {\n\n match mouse {\n\n glutin::MouseButton::Left => MouseButton::Left,\n\n glutin::MouseButton::Right => MouseButton::Right,\n\n glutin::MouseButton::Middle => MouseButton::Middle,\n\n glutin::MouseButton::Other(id) => MouseButton::Other(id),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/window/backends/glutin/types.rs", "rank": 38, "score": 168099.65981558556 }, { "content": "pub fn sphere(iteration: usize) -> Result<MeshHandle> {\n\n use std::f32::consts::FRAC_1_PI;\n\n\n\n fn normalize(v: [f32; 3]) -> Vertex {\n\n let l = (v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).sqrt();\n\n let v = [v[0] / l, v[1] / l, v[2] / l];\n\n let uv = [v[0].asin() * FRAC_1_PI + 0.5, v[1].asin() * FRAC_1_PI + 0.5];\n\n\n\n Vertex::new(v, v, uv)\n\n }\n\n\n\n let t = (1.0f32 + 5.0f32.sqrt()) / 2.0f32;\n\n let mut verts = vec![\n\n normalize([-1.0, t, 0.0]),\n\n normalize([1.0, t, 0.0]),\n\n normalize([-1.0, -t, 0.0]),\n\n normalize([1.0, -t, 0.0]),\n\n normalize([0.0, -1.0, t]),\n\n normalize([0.0, 1.0, t]),\n\n normalize([0.0, -1.0, -t]),\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 39, "score": 167259.43957386923 }, { "content": "/// Loads file asynchronously. This method will returns a `Request` object immediatedly,\n\n/// its user's responsibility to store the object and frequently check it for completion.\n\npub fn load(uuid: Uuid) -> Result<Request, failure::Error> {\n\n ctx().load(uuid)\n\n}\n\n\n", "file_path": "src/res/mod.rs", "rank": 40, "score": 165465.829400603 }, { "content": "#[inline]\n\npub fn create_clip_from_uuid(uuid: Uuid) -> Result<AudioClipHandle> {\n\n ctx().create_clip_from_uuid(uuid)\n\n}\n\n\n", "file_path": "modules/audio/src/lib.rs", "rank": 41, "score": 164786.1472201527 }, { "content": "#[inline]\n\npub fn surface_state(handle: SurfaceHandle) -> ResourceState {\n\n ctx().surface_state(handle)\n\n}\n\n\n\n/// Deletes surface object.\n", "file_path": "src/video/mod.rs", "rank": 42, "score": 163199.2443483853 }, { "content": "#[inline]\n\npub fn texture_state(handle: TextureHandle) -> ResourceState {\n\n ctx().texture_state(handle)\n\n}\n\n\n\n/// Update a contiguous subregion of an existing two-dimensional texture object.\n", "file_path": "src/video/mod.rs", "rank": 43, "score": 163199.2443483853 }, { "content": "#[inline]\n\npub fn shader_state(handle: ShaderHandle) -> ResourceState {\n\n ctx().shader_state(handle)\n\n}\n\n\n\n/// Delete shader state object.\n", "file_path": "src/video/mod.rs", "rank": 44, "score": 163199.2443483853 }, { "content": "#[inline]\n\npub fn mesh_state(handle: MeshHandle) -> ResourceState {\n\n ctx().mesh_state(handle)\n\n}\n\n\n\n/// Update a subset of dynamic vertex buffer. Use `offset` specifies the offset\n\n/// into the buffer object's data store where data replacement will begin, measured\n\n/// in bytes.\n", "file_path": "src/video/mod.rs", "rank": 45, "score": 163199.2443483853 }, { "content": "#[inline]\n\npub fn prefab_state(handle: PrefabHandle) -> ResourceState {\n\n ctx().prefab_state(handle)\n\n}\n\n\n\n/// Delete a prefab object from this world.\n", "file_path": "modules/world/src/lib.rs", "rank": 46, "score": 160563.90115228546 }, { "content": "/// Removes a event listener from window.\n\npub fn detach(handle: EventListenerHandle) {\n\n ctx().remove_event_listener(handle)\n\n}\n\n\n\n/// Shows the window if it was hidden.\n\n///\n\n/// # Platform-specific\n\n///\n\n/// Has no effect on mobile platform.\n", "file_path": "src/window/mod.rs", "rank": 47, "score": 160461.73531841795 }, { "content": "pub fn new_headless() -> Box<Visitor> {\n\n Box::new(self::headless::HeadlessVisitor {})\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\nmod glutin;\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub use self::glutin::new;\n\n\n\n#[cfg(target_arch = \"wasm32\")]\n\nmod web;\n\n#[cfg(target_arch = \"wasm32\")]\n\npub use self::web::new;\n", "file_path": "src/window/backends/mod.rs", "rank": 48, "score": 160430.64435050846 }, { "content": "pub fn new_headless() -> Box<Visitor> {\n\n Box::new(self::headless::HeadlessVisitor::new())\n\n}\n", "file_path": "src/video/backends/mod.rs", "rank": 49, "score": 160430.64435050846 }, { "content": "#[inline]\n\npub fn render_texture_state(handle: RenderTextureHandle) -> ResourceState {\n\n ctx().render_texture_state(handle)\n\n}\n\n\n\n/// Delete the render texture object.\n", "file_path": "src/video/mod.rs", "rank": 50, "score": 158046.69681366274 }, { "content": "#[inline]\n\npub fn clip_state(handle: AudioClipHandle) -> ResourceState {\n\n ctx().clip_state(handle)\n\n}\n\n\n\n/// Deletes a `AudioClip` resource from `AudioSystem`.\n", "file_path": "modules/audio/src/lib.rs", "rank": 51, "score": 158046.6968136627 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n info!(\"Create web audio mixer.\",);\n\n\n\n let ctx = AudioContext::new().unwrap();\n\n\n\n let closure = Rc::new(RefCell::new(None));\n\n let clone = closure.clone();\n\n let mut sampler = Sampler::new(CHANNELS, ctx.sample_rate() as u32);\n\n\n\n let mut bufs = Vec::new();\n\n for _ in 0..CHANNELS {\n\n bufs.push(Vec::new());\n\n }\n\n\n\n *closure.borrow_mut() = Some(Closure::wrap(Box::new(move |e: AudioProcessingEvent| {\n\n if clone.borrow().is_some() {}\n\n\n\n {\n\n let mut rx = rx.write().unwrap();\n\n sampler.update(rx.drain(..));\n", "file_path": "modules/audio/src/mixer/webaudio.rs", "rank": 52, "score": 157656.5175316969 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n let device = cpal::default_output_device()\n\n .ok_or_else(|| format_err!(\"No avaiable audio output device\"))?;\n\n\n\n let format = device\n\n .default_output_format()\n\n .expect(\"The device doesn't support any format.\");\n\n\n\n let events = EventLoop::new();\n\n let stream = events.build_output_stream(&device, &format).unwrap();\n\n\n\n info!(\n\n \"Create audio mixer based on CPAL. [{:?}] {:?}.\",\n\n device.name(),\n\n format\n\n );\n\n\n\n let mut sampler = Sampler::new(format.channels as u8, format.sample_rate.0 as u32);\n\n Builder::new()\n\n .name(\"Audio\".into())\n", "file_path": "modules/audio/src/mixer/cpal.rs", "rank": 53, "score": 157656.5175316969 }, { "content": "/// Executes `f` and captures any panic, translating that panic into a\n\n/// `Err` result. The assumption is that any panic will be propagated\n\n/// later with `resume_unwinding`, and hence `f` can be treated as\n\n/// exception safe.\n\npub fn halt_unwinding<F, R>(func: F) -> thread::Result<R>\n\nwhere\n\n F: FnOnce() -> R,\n\n{\n\n panic::catch_unwind(AssertUnwindSafe(func))\n\n}\n\n\n", "file_path": "src/sched/unwind.rs", "rank": 54, "score": 157120.52600333933 }, { "content": "#[inline]\n\npub fn create_prefab_from<T: AsRef<str>>(url: T) -> Result<PrefabHandle> {\n\n ctx().create_prefab_from(url)\n\n}\n\n\n\n/// Return the prefab obejct if exists.\n", "file_path": "modules/world/src/lib.rs", "rank": 55, "score": 152287.15004076154 }, { "content": "#[inline]\n\npub fn load_with_callback<T>(uuid: Uuid, func: T) -> Result<(), failure::Error>\n\nwhere\n\n T: FnOnce(Response) + Send + 'static,\n\n{\n\n ctx().load_with_callback(uuid, func)\n\n}\n\n\n\n/// Loads file asynchronously with response callback.\n", "file_path": "src/res/mod.rs", "rank": 56, "score": 151924.85444882067 }, { "content": "fn from_window_event(source: &glutin::WindowEvent, dimensions: Vector2<u32>) -> Option<Event> {\n\n match *source {\n\n glutin::WindowEvent::CloseRequested => Some(Event::Window(WindowEvent::Closed)),\n\n\n\n glutin::WindowEvent::Focused(v) => if v {\n\n Some(Event::Window(WindowEvent::GainFocus))\n\n } else {\n\n Some(Event::Window(WindowEvent::LostFocus))\n\n },\n\n\n\n glutin::WindowEvent::Resized(glutin::dpi::LogicalSize { width, height }) => Some(\n\n Event::Window(WindowEvent::Resized(width as u32, height as u32)),\n\n ),\n\n\n\n glutin::WindowEvent::CursorMoved { position, .. } => {\n\n Some(Event::InputDevice(InputEvent::MouseMoved {\n\n position: (position.x as f32, dimensions.y as f32 - position.y as f32),\n\n }))\n\n }\n\n\n", "file_path": "src/window/backends/glutin/types.rs", "rank": 57, "score": 151635.98780122862 }, { "content": "#[inline]\n\npub fn create_clip_from<T: AsRef<str>>(url: T) -> Result<AudioClipHandle> {\n\n ctx().create_clip_from(url)\n\n}\n\n\n\n/// Creates a clip object from file asynchronously.\n", "file_path": "modules/audio/src/lib.rs", "rank": 58, "score": 150040.64092077475 }, { "content": "/// Loads file asynchronously. This method will returns a `Request` object immediatedly,\n\n/// its user's responsibility to store the object and frequently check it for completion.\n\npub fn load_from<T: AsRef<str>>(filename: T) -> Result<Request, failure::Error> {\n\n ctx().load_from(filename)\n\n}\n\n\n\nmod ins {\n\n use super::system::ResourceSystem;\n\n\n\n pub static mut CTX: *const ResourceSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static ResourceSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"resource system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n\n }\n\n}\n", "file_path": "src/res/mod.rs", "rank": 59, "score": 149460.7749926309 }, { "content": "#[inline]\n\npub fn surface(handle: SurfaceHandle) -> Option<SurfaceParams> {\n\n ctx().surface(handle)\n\n}\n\n\n\n/// Get the resource state of specified surface.\n", "file_path": "src/video/mod.rs", "rank": 60, "score": 148730.30397214624 }, { "content": "#[inline]\n\npub fn shader(handle: ShaderHandle) -> Option<ShaderParams> {\n\n ctx().shader(handle)\n\n}\n\n\n\n/// Get the resource state of specified shader.\n", "file_path": "src/video/mod.rs", "rank": 61, "score": 148730.30397214624 }, { "content": "#[inline]\n\npub fn mesh(handle: MeshHandle) -> Option<MeshParams> {\n\n ctx().mesh(handle)\n\n}\n\n\n\n/// Get the resource state of specified mesh.\n", "file_path": "src/video/mod.rs", "rank": 62, "score": 148730.30397214624 }, { "content": "/// Adds a event listener.\n\npub fn attach<T: EventListener + 'static>(lis: T) -> EventListenerHandle {\n\n ctx().add_event_listener(lis)\n\n}\n\n\n", "file_path": "src/window/mod.rs", "rank": 63, "score": 148481.22010832507 }, { "content": "#[inline]\n\npub fn load_from_with_callback<T1, T2>(filename: T1, func: T2) -> Result<(), failure::Error>\n\nwhere\n\n T1: AsRef<str>,\n\n T2: FnOnce(Response) + Send + 'static,\n\n{\n\n ctx().load_from_with_callback(filename, func)\n\n}\n\n\n", "file_path": "src/res/mod.rs", "rank": 65, "score": 147104.9623184655 }, { "content": "#[inline]\n\npub fn create_texture_from_uuid(uuid: Uuid) -> CrResult<TextureHandle> {\n\n ctx().create_texture_from_uuid(uuid)\n\n}\n\n\n\n/// Get the resource state of specified texture.\n", "file_path": "src/video/mod.rs", "rank": 66, "score": 143282.61302379824 }, { "content": "#[inline]\n\npub fn create_mesh_from_uuid(uuid: Uuid) -> CrResult<MeshHandle> {\n\n ctx().create_mesh_from_uuid(uuid)\n\n}\n\n\n\n/// Gets the `MeshParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 67, "score": 143282.61302379824 }, { "content": "pub fn from_virtual_key_code(key: &str) -> Option<Key> {\n\n match key {\n\n \"1\" => Some(Key::Key1),\n\n \"2\" => Some(Key::Key2),\n\n \"3\" => Some(Key::Key3),\n\n \"4\" => Some(Key::Key4),\n\n \"5\" => Some(Key::Key5),\n\n \"6\" => Some(Key::Key6),\n\n \"7\" => Some(Key::Key7),\n\n \"8\" => Some(Key::Key8),\n\n \"9\" => Some(Key::Key9),\n\n \"0\" => Some(Key::Key0),\n\n \"A\" | \"a\" => Some(Key::A),\n\n \"B\" | \"b\" => Some(Key::B),\n\n \"C\" | \"c\" => Some(Key::C),\n\n \"D\" | \"d\" => Some(Key::D),\n\n \"E\" | \"e\" => Some(Key::E),\n\n \"F\" | \"f\" => Some(Key::F),\n\n \"G\" | \"g\" => Some(Key::G),\n\n \"H\" | \"h\" => Some(Key::H),\n", "file_path": "src/window/backends/web/types.rs", "rank": 68, "score": 143258.72930613166 }, { "content": "#[inline]\n\npub fn render_texture(handle: RenderTextureHandle) -> Option<RenderTextureParams> {\n\n ctx().render_texture(handle)\n\n}\n\n\n\n/// Get the resource state of specified render texture.\n", "file_path": "src/video/mod.rs", "rank": 69, "score": 140912.84206874718 }, { "content": "#[inline]\n\npub fn show() {\n\n ctx().show();\n\n}\n\n\n\n/// Hides the window if it was visible.\n\n///\n\n/// # Platform-specific\n\n///\n\n/// Has no effect on mobile platform.\n", "file_path": "src/window/mod.rs", "rank": 70, "score": 139327.09653263702 }, { "content": "#[inline]\n\npub fn reset() {\n\n ctx().reset();\n\n}\n\n\n\n/// Returns true if a keyboard is attached\n", "file_path": "src/input/mod.rs", "rank": 71, "score": 139327.09653263702 }, { "content": "#[inline]\n\npub fn hide() {\n\n ctx().hide();\n\n}\n\n\n\n/// Set the context as the active context in this thread.\n", "file_path": "src/window/mod.rs", "rank": 72, "score": 139327.09653263702 }, { "content": "#[inline]\n\npub fn discard() {\n\n ctx().shutdown()\n\n}\n\n\n\npub(crate) unsafe fn late_discard() {\n\n drop(Box::from_raw(CTX as *mut EngineSystem));\n\n CTX = std::ptr::null();\n\n\n\n drop(Box::from_raw(TIME_CTX as *mut TimeSystem));\n\n TIME_CTX = std::ptr::null();\n\n\n\n drop(Box::from_raw(LIFECYCLE_CTX as *mut LifecycleSystem));\n\n LIFECYCLE_CTX = std::ptr::null();\n\n}\n\n\n\n/// Checks if the engine is enabled.\n", "file_path": "src/application/mod.rs", "rank": 73, "score": 139327.09653263702 }, { "content": "#[test]\n\npub fn hierachy() {\n\n let mut scene = Scene::new(HeadlessRenderer::new());\n\n let e1 = scene.create(\"e1\");\n\n let e2 = scene.create(\"e2\");\n\n let e3 = scene.create(\"e3\");\n\n let e4 = scene.create(\"e4\");\n\n\n\n scene.set_parent(e4, e3, false).unwrap();\n\n scene.set_parent(e3, e1, false).unwrap();\n\n scene.set_parent(e2, e1, false).unwrap();\n\n // e1 <- (e2, e3 <- (e4))\n\n\n\n assert!(scene.is_ancestor(e2, e1));\n\n assert!(scene.is_ancestor(e3, e1));\n\n assert!(scene.is_ancestor(e4, e1));\n\n assert!(scene.is_ancestor(e4, e3));\n\n\n\n assert!(!scene.is_ancestor(e1, e1));\n\n assert!(!scene.is_ancestor(e1, e2));\n\n assert!(!scene.is_ancestor(e1, e3));\n", "file_path": "modules/world/tests/graph.rs", "rank": 74, "score": 136781.1167329041 }, { "content": "type Wrapper<T1, T2> = Box<dyn for<'r> FnMut(&'r T1) -> Result<T2> + Send>;\n\n\n\nimpl<T1: LatchProbe, T2: LifecycleListener + 'static> Launcher<T1, T2> {\n\n pub fn new<F: for<'r> FnOnce(&'r T1) -> Result<T2> + Send + 'static>(\n\n resources: T1,\n\n closure: F,\n\n ) -> Self {\n\n let mut v = Some(closure);\n\n let wrapper: Wrapper<T1, T2> = Box::new(move |r| {\n\n let mut w = None;\n\n std::mem::swap(&mut v, &mut w);\n\n w.unwrap()(r)\n\n });\n\n\n\n Launcher {\n\n resources,\n\n state: LaunchState::NotReady(wrapper),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/application/launcher.rs", "rank": 75, "score": 133510.65138982594 }, { "content": "#[inline]\n\npub fn text() -> String {\n\n ctx().text()\n\n}\n\n\n\n/// Returns true if a mouse is attached\n", "file_path": "src/input/mod.rs", "rank": 76, "score": 132673.63856744283 }, { "content": "#[inline]\n\npub fn fps() -> u32 {\n\n time_ctx().fps()\n\n}\n\n\n\n/// Gets the duration duraing last frame.\n", "file_path": "src/application/mod.rs", "rank": 77, "score": 132673.63856744283 }, { "content": "#[inline]\n\npub fn create_texture_from<T: AsRef<str>>(url: T) -> CrResult<TextureHandle> {\n\n ctx().create_texture_from(url)\n\n}\n\n\n\n/// Creates a texture object from file asynchronously.\n", "file_path": "src/video/mod.rs", "rank": 78, "score": 132018.0801736912 }, { "content": "#[inline]\n\npub fn create_mesh_from<T: AsRef<str>>(url: T) -> CrResult<MeshHandle> {\n\n ctx().create_mesh_from(url)\n\n}\n\n\n\n/// Creates a mesh object from file asynchronously.\n", "file_path": "src/video/mod.rs", "rank": 79, "score": 132018.08017369124 }, { "content": "#[inline]\n\npub fn hash<T: Hash + ?Sized>(v: &T) -> usize {\n\n let mut state = hasher::FxHasher::default();\n\n v.hash(&mut state);\n\n state.finish() as usize\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn basic() {\n\n let mut v: HashMap<&'static str, i32> = Default::default();\n\n v.insert(\"hahah\", 123);\n\n }\n\n}\n\n\n\nmod hasher {\n\n use std::hash::Hasher;\n\n use std::ops::BitXor;\n\n\n\n const ROTATE: u32 = 5;\n\n const SEED64: u64 = 0x517c_c1b7_2722_0a95;\n\n const SEED32: u32 = (SEED64 & 0xFFFF_FFFF) as u32;\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n const SEED: usize = SEED32 as usize;\n\n #[cfg(target_pointer_width = \"64\")]\n\n const SEED: usize = SEED64 as usize;\n\n\n", "file_path": "src/utils/hash.rs", "rank": 80, "score": 131845.58884265262 }, { "content": "#[inline]\n\npub fn hash64<T: Hash + ?Sized>(v: &T) -> u64 {\n\n let mut state = hasher::FxHasher64::default();\n\n v.hash(&mut state);\n\n state.finish()\n\n}\n\n\n\n/// A convenience function for when you need a quick 32-bit hash.\n", "file_path": "src/utils/hash.rs", "rank": 81, "score": 131845.58884265262 }, { "content": "#[inline]\n\npub fn hash32<T: Hash + ?Sized>(v: &T) -> u32 {\n\n let mut state = hasher::FxHasher32::default();\n\n v.hash(&mut state);\n\n state.finish() as u32\n\n}\n\n\n\n/// A convenience function for when you need a quick usize hash.\n", "file_path": "src/utils/hash.rs", "rank": 82, "score": 131845.58884265262 }, { "content": "pub fn timestamp() -> Timestamp {\n\n let duration = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap();\n\n\n\n let ms = u64::from(duration.subsec_millis()) + duration.as_secs() * 1000;\n\n Timestamp::from_millis(ms)\n\n}\n\n\n\npub(crate) fn init() {}\n\n\n\npub(crate) fn run_forever<F, F2>(mut advance: F, mut finished: F2) -> Result<(), failure::Error>\n\nwhere\n\n F: FnMut() -> Result<bool, failure::Error> + 'static,\n\n F2: FnMut() -> Result<(), failure::Error> + 'static,\n\n{\n\n while advance()? {}\n\n finished()\n\n}\n", "file_path": "src/application/sys/glutin.rs", "rank": 83, "score": 130276.9573736786 }, { "content": "pub fn timestamp() -> Timestamp {\n\n let ms = web_sys::window()\n\n .expect(\"should have a window in this context\")\n\n .performance()\n\n .expect(\"performance should be available\")\n\n .now();\n\n\n\n Timestamp::from_millis(ms as u64)\n\n}\n\n\n\npub(crate) fn init() {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n log::set_boxed_logger(Box::new(WebBrowserLogger {})).unwrap();\n\n log::set_max_level(log::LevelFilter::Info);\n\n}\n\n\n\npub(crate) fn run_forever<F, F2>(mut advance: F, mut finished: F2) -> Result<(), failure::Error>\n\nwhere\n\n F: FnMut() -> Result<bool, failure::Error> + 'static,\n\n F2: FnMut() -> Result<(), failure::Error> + 'static,\n", "file_path": "src/application/sys/web.rs", "rank": 84, "score": 130276.9573736786 }, { "content": "#[inline]\n\npub fn update_vertex_buffer(handle: MeshHandle, offset: usize, data: &[u8]) -> CrResult<()> {\n\n ctx().update_vertex_buffer(handle, offset, data)\n\n}\n\n\n\n/// Update a subset of dynamic index buffer. Use `offset` specifies the offset\n\n/// into the buffer object's data store where data replacement will begin, measured\n\n/// in bytes.\n", "file_path": "src/video/mod.rs", "rank": 85, "score": 129771.57105370448 }, { "content": "#[inline]\n\npub fn update_index_buffer(handle: MeshHandle, offset: usize, data: &[u8]) -> CrResult<()> {\n\n ctx().update_index_buffer(handle, offset, data)\n\n}\n\n\n\n/// Delete mesh object.\n", "file_path": "src/video/mod.rs", "rank": 86, "score": 129771.57105370448 }, { "content": "#[inline]\n\npub fn update_texture(handle: TextureHandle, area: Aabb2<u32>, data: &[u8]) -> CrResult<()> {\n\n ctx().update_texture(handle, area, data)\n\n}\n\n\n\n/// Delete the texture object.\n", "file_path": "src/video/mod.rs", "rank": 87, "score": 128047.04464382716 }, { "content": "#[inline]\n\npub fn finger_tap() -> GestureTap {\n\n ctx().finger_tap()\n\n}\n\n\n\n/// Gets the double tap gesture.\n", "file_path": "src/input/mod.rs", "rank": 88, "score": 128016.8163567785 }, { "content": "#[inline]\n\npub fn device_pixel_ratio() -> f32 {\n\n ctx().device_pixel_ratio()\n\n}\n\n\n\nmod ins {\n\n use super::system::WindowSystem;\n\n\n\n pub static mut CTX: *const WindowSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static WindowSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"window system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n\n }\n\n}\n", "file_path": "src/window/mod.rs", "rank": 89, "score": 128016.8163567785 }, { "content": "#[inline]\n\npub fn finger_pan() -> GesturePan {\n\n ctx().finger_pan()\n\n}\n\n\n\nmod ins {\n\n use super::system::InputSystem;\n\n\n\n pub static mut CTX: *const InputSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static InputSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"input system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n\n }\n\n}\n", "file_path": "src/input/mod.rs", "rank": 90, "score": 128016.8163567785 }, { "content": "#[inline]\n\npub fn dimensions() -> Vector2<u32> {\n\n ctx().dimensions()\n\n}\n\n\n\n/// Returns the ratio between the backing framebuffer resolution and the window size in\n\n/// screen pixels. This is typically one for a normal display and two for a retina display.\n", "file_path": "src/window/mod.rs", "rank": 91, "score": 126739.96228675361 }, { "content": "#[inline]\n\npub fn position() -> Vector2<i32> {\n\n ctx().position()\n\n}\n\n\n\n/// Returns the size in *points* of the client area of the window.\n\n///\n\n/// The client area is the content of the window, excluding the title bar and borders. These are\n\n/// the size of the frame buffer.\n", "file_path": "src/window/mod.rs", "rank": 92, "score": 126739.96228675361 }, { "content": "#[inline]\n\npub fn finger_double_tap() -> GestureTap {\n\n ctx().finger_double_tap()\n\n}\n\n\n\n/// Gets the panning gesture.\n", "file_path": "src/input/mod.rs", "rank": 93, "score": 125881.8705339637 }, { "content": "#[inline]\n\npub fn default() -> WorldDefaultResources {\n\n ctx().default\n\n}\n\n\n\nmod inside {\n\n use super::system::WorldSystem;\n\n\n\n static mut CTX: *const WorldSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static WorldSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"world system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n\n }\n", "file_path": "modules/world/src/lib.rs", "rank": 94, "score": 125881.8705339637 }, { "content": "#[inline]\n\nfn foreach<T>(func: T) -> Result<()>\n\nwhere\n\n T: Fn(&mut dyn LifecycleListener) -> Result<()>,\n\n{\n\n lifecycle_ctx().foreach(func)\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 95, "score": 124847.86103174095 }, { "content": "#[inline]\n\npub fn mouse_movement() -> Vector2<f32> {\n\n ctx().mouse_movement()\n\n}\n\n\n\n/// Gets the scroll movement of mouse, usually provided by mouse wheel.\n", "file_path": "src/input/mod.rs", "rank": 96, "score": 124479.8212698535 }, { "content": "#[inline]\n\npub fn mouse_position() -> Vector2<f32> {\n\n ctx().mouse_position()\n\n}\n\n\n\n/// Gets mouse movement since last frame.\n", "file_path": "src/input/mod.rs", "rank": 97, "score": 124479.8212698535 }, { "content": "#[inline]\n\npub fn mouse_scroll() -> Vector2<f32> {\n\n ctx().mouse_scroll()\n\n}\n\n\n\n/// Returns true if a touchpad is attached\n", "file_path": "src/input/mod.rs", "rank": 98, "score": 124479.8212698535 } ]
Rust
src/tests.rs
korken89/lbfgs-rs
a7a58926622da66c1893cc3cff22999226983528
use crate::*; #[test] #[should_panic] fn lbfgs_panic_zero_n() { let mut _e = Lbfgs::new(0, 1); } #[test] #[should_panic] fn lbfgs_panic_zero_mem() { let mut _e = Lbfgs::new(1, 0); } #[test] #[should_panic] fn lbfgs_panic_apply_size_grad() { let mut e = Lbfgs::new(5, 5); e.update_hessian(&[0.0; 4], &[0.0; 5]); } #[test] #[should_panic] fn lbfgs_panic_apply_state() { let mut e = Lbfgs::new(5, 5); e.update_hessian(&[0.0; 5], &[0.0; 4]); } #[test] #[should_panic] fn lbfgs_panic_cbfgs_alpha() { let mut _e = Lbfgs::new(5, 5).with_cbfgs_alpha(-1.0); } #[test] #[should_panic] fn lbfgs_panic_cbfgs_epsilon() { let mut _e = Lbfgs::new(5, 5).with_cbfgs_epsilon(-1.0); } #[test] fn lbfgs_buffer_storage() { let mut e = Lbfgs::new(2, 3); e.update_hessian(&[1.0, 1.0], &[1.5, 1.5]); assert_eq!(e.active_size, 0); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[2.0, 2.0], &[2.5, 2.5]) ); assert_eq!(e.active_size, 1); assert_eq!(&e.s[0], &[1.0, 1.0]); assert_eq!(&e.y[0], &[1.0, 1.0]); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-3.0, -3.0], &[-3.5, -3.5]) ); assert_eq!(e.active_size, 2); assert_eq!(&e.s[0], &[-6.0, -6.0]); assert_eq!(&e.s[1], &[1.0, 1.0]); assert_eq!(&e.y[0], &[-5.0, -5.0]); assert_eq!(&e.y[1], &[1.0, 1.0]); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-4.0, -4.0], &[-4.5, -4.5]) ); assert_eq!(e.active_size, 3); assert_eq!(&e.s[0], &[-1.0, -1.0]); assert_eq!(&e.s[1], &[-6.0, -6.0]); assert_eq!(&e.s[2], &[1.0, 1.0]); assert_eq!(&e.y[0], &[-1.0, -1.0]); assert_eq!(&e.y[1], &[-5.0, -5.0]); assert_eq!(&e.y[2], &[1.0, 1.0]); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[5.0, 5.0], &[5.5, 5.5]) ); assert_eq!(e.active_size, 3); assert_eq!(&e.s[0], &[10.0, 10.0]); assert_eq!(&e.s[1], &[-1.0, -1.0]); assert_eq!(&e.s[2], &[-6.0, -6.0]); assert_eq!(&e.y[0], &[9.0, 9.0]); assert_eq!(&e.y[1], &[-1.0, -1.0]); assert_eq!(&e.y[2], &[-5.0, -5.0]); } #[test] fn lbfgs_apply_finite() { let mut e = Lbfgs::new(2, 3); e.update_hessian(&[1.0, 1.0], &[1.5, 1.5]); let mut g = [1.0, 1.0]; e.apply_hessian(&mut g); unit_test_utils::assert_is_finite_array(&g, "g"); } #[test] fn correctneess_buff_empty() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); e.apply_hessian(&mut g); let correct_dir = [-3.1, 1.5, 2.1]; unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_buff_1() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); e.apply_hessian(&mut g); let correct_dir = [-1.100601247872944, -0.086568349404424, 0.948633011911515]; let alpha_correct = -1.488372093023256; let rho_correct = 2.325581395348837; unit_test_utils::assert_nearly_equal(alpha_correct, e.alpha[0], 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal(rho_correct, e.rho[0], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_buff_2() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.75, 0.9, -1.9], &[0.19, 0.19, -0.44]) ); e.apply_hessian(&mut g); let correct_dir = [-1.814749861477524, 0.895232314736337, 1.871795942557546]; unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_buff_overfull() { let mut e = Lbfgs::new(3, 3); let mut g = [-2.0, 0.2, -0.3]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::Rejection, e.update_hessian( &[-0.5, 0.6, -1.2], &[0.419058177461747, 0.869843029576958, 0.260313940846084] ) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.75, 0.9, -1.9], &[0.19, 0.19, -0.44]) ); for _i in 1..10 { assert_eq!( UpdateStatus::Rejection, e.update_hessian( &[1., 2., 3.], &[-0.534522483824849, 0.774541920588438, -0.338187119117343] ) ); } assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-2.25, 3.5, -3.1], &[0.39, 0.39, -0.84]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-3.75, 6.3, -4.3], &[0.49, 0.59, -1.24]) ); e.apply_hessian(&mut g); println!("{:#.3?}", e); let gamma_correct = 0.077189939288812; let alpha_correct = [-0.044943820224719, -0.295345104333868, -1.899418829910887]; let rho_correct = [1.123595505617978, 1.428571428571429, 13.793103448275861]; let dir_correct = [-0.933604237447365, -0.078865807539102, 1.016318412551302]; unit_test_utils::assert_nearly_equal(gamma_correct, e.gamma, 1e-8, 1e-10, "gamma"); unit_test_utils::assert_nearly_equal_array(&alpha_correct, &e.alpha, 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal_array(&rho_correct, &e.rho[0..3], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&dir_correct, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_reset() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); e.apply_hessian(&mut g); let correct_dir = [-1.100601247872944, -0.086568349404424, 0.948633011911515]; let alpha_correct = -1.488372093023256; let rho_correct = 2.325581395348837; unit_test_utils::assert_nearly_equal(alpha_correct, e.alpha[0], 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal(rho_correct, e.rho[0], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); e.reset(); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); e.apply_hessian(&mut g); unit_test_utils::assert_nearly_equal(alpha_correct, e.alpha[0], 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal(rho_correct, e.rho[0], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn reject_perpendicular_sy() { let n = 3; let mem = 5; let mut lbfgs = Lbfgs::new(n, mem).with_sy_epsilon(1e-8); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!(0, lbfgs.active_size); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian( &[-0.5, 0.6, -1.2], &[0.419058177461747, 0.869843029576958, 0.260313940846084] ) ); assert_eq!(0, lbfgs.active_size); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); assert_eq!(1, lbfgs.active_size); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian(&[1.1, 2., 3.], &[-0.5, 0.7, -0.3]) ); assert_eq!(1, lbfgs.active_size); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[-0.75, 0.9, -1.9], &[0.19, 0.19, -0.44]) ); assert_eq!(2, lbfgs.active_size); } #[test] fn reject_norm_s_zero() { let n = 3; let mem = 5; let mut lbfgs = Lbfgs::new(n, mem); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[1.0, 2.0, -1.0], &[5.0, 5.0, 5.0]) ); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian( &[ 1.0 + std::f64::MIN_POSITIVE, 2.0 + std::f64::MIN_POSITIVE, -1.0 + std::f64::MIN_POSITIVE ], &[5.0, 5.0, 5.0] ) ); } #[test] fn reject_cfbs_condition() { let n = 3; let mem = 5; let mut lbfgs = Lbfgs::new(n, mem) .with_sy_epsilon(1e-8) .with_cbfgs_alpha(1.0) .with_cbfgs_epsilon(1e-4); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian(&[-0.838, 0.260, 0.479], &[-0.5, 0.6, -1.2]) ); }
use crate::*; #[test] #[should_panic] fn lbfgs_panic_zero_n() { let mut _e = Lbfgs::new(0, 1); } #[test] #[should_panic] fn lbfgs_panic_zero_mem() { let mut _e = Lbfgs::new(1, 0); } #[test] #[should_panic] fn lbfgs_panic_apply_size_grad() { let mut e = Lbfgs::new(5, 5); e.update_hessian(&[0.0; 4], &[0.0; 5]); } #[test] #[should_panic] fn lbfgs_panic_apply_state() { let mut e = Lbfgs::new(5, 5); e.update_hessian(&[0.0; 5], &[0.0; 4]); } #[test] #[should_panic] fn lbfgs_panic_cbfgs_alpha() { let mut _e = Lbfgs::new(5, 5).with_cbfgs_alpha(-1.0); } #[test] #[should_panic] fn lbfgs_panic_cbfgs_epsilon() { let mut _e = Lbfgs::new(5, 5).with_cbfgs_epsilon(-1.0); } #[test] fn lbfgs_buffer_storage() { let mut e = Lbfgs::new(2, 3); e.update_hessian(&[1.0, 1.0], &[1.5, 1.5]); assert_eq!(e.active_size, 0); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[2.0, 2.0], &[2.5, 2.5]) ); assert_eq!(e.active_size, 1); assert_eq!(&e.s[0], &[1.0, 1.0]); assert_eq!(&e.y[0], &[1.0, 1.0]); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-3.0, -3.0], &[-3.5, -3.5]) ); assert_eq!(e.active_size, 2); assert_eq!(&e.s[0], &[-6.0, -6.0]); assert_eq!(&e.s[1], &[1.0, 1.0]); assert_eq!(&e.y[0], &[-5.0, -5.0]); assert_eq!(&e.y[1], &[1.0, 1.0]); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-4.0, -4.0], &[-4.5, -4.5]) ); assert_eq!(e.active_size, 3); assert_eq!(&e.s[0], &[-1.0, -1.0]); assert_eq!(&e.s[1], &[-6.0, -6.0]); assert_eq!(&e.s[2], &[1.0, 1.0]); assert_eq!(&e.y[0], &[-1.0, -1.0]); assert_eq!(&e.y[1], &[-5.0, -5.0]); assert_eq!(&e.y[2], &[1.0, 1.0]); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[5.0, 5.0], &[5.5, 5.5]) ); assert_eq!(e.active_size, 3); assert_eq!(&e.s[0], &[10.0, 10.0]); assert_eq!(&e.s[1], &[-1.0, -1.0]); assert_eq!(&e.s[2], &[-6.0, -6.0]); assert_eq!(&e.y[0], &[9.0, 9.0]); assert_eq!(&e.y[1], &[-1.0, -1.0]); assert_eq!(&e.y[2], &[-5.0, -5.0]); } #[test] fn lbfgs_apply_finite() { let mut e = Lbfgs::new(2, 3); e.update_hessian(&[1.0, 1.0], &[1.5, 1.5]); let mut g = [1.0, 1.0]; e.apply_hessian(&mut g); unit_test_utils::assert_is_finite_array(&g, "g"); } #[test] fn correctneess_buff_empty() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); e.apply_hessian(&mut g); let correct_dir = [-3.1, 1.5, 2.1]; unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_buff_1() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); e.apply_hessian(&mut g); let correct_dir = [-1.100601247872944, -0.086568349404424, 0.948633011911515]; let alpha_correct = -1.488372093023256; let rho_correct = 2.325581395348837; unit_test_utils::assert_nearly_equal(alpha_correct, e.alpha[0], 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal(rho_correct, e.rho[0], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_buff_2() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.75, 0.9, -1.9], &[0.19, 0.19, -0.44]) ); e.apply_hessian(&mut g); let correct_dir = [-1.814749861477524, 0.895232314736337, 1.871795942557546]; unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn correctneess_buff_overfull() { let mut e = Lbfgs::new(3, 3); let mut g = [-2.0, 0.2, -0.3]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::Rejection, e.update_hessian( &[-0.5, 0.6, -1.2], &[0.419058177461747, 0.869843029576958, 0.260313940846084] ) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.75, 0.9, -1.9], &[0.19, 0.19, -0.44]) ); for _i in 1..10 { assert_eq!( UpdateStatus::Rejection, e.update_hessian( &[1., 2., 3.], &[-0.534522483824849, 0.774541920588438, -0.338187119117343] ) ); } assert_
#[test] fn correctneess_reset() { let mut e = Lbfgs::new(3, 3); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); e.apply_hessian(&mut g); let correct_dir = [-1.100601247872944, -0.086568349404424, 0.948633011911515]; let alpha_correct = -1.488372093023256; let rho_correct = 2.325581395348837; unit_test_utils::assert_nearly_equal(alpha_correct, e.alpha[0], 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal(rho_correct, e.rho[0], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); e.reset(); let mut g = [-3.1, 1.5, 2.1]; assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); e.apply_hessian(&mut g); unit_test_utils::assert_nearly_equal(alpha_correct, e.alpha[0], 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal(rho_correct, e.rho[0], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&correct_dir, &g, 1e-8, 1e-10, "direction"); } #[test] fn reject_perpendicular_sy() { let n = 3; let mem = 5; let mut lbfgs = Lbfgs::new(n, mem).with_sy_epsilon(1e-8); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!(0, lbfgs.active_size); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian( &[-0.5, 0.6, -1.2], &[0.419058177461747, 0.869843029576958, 0.260313940846084] ) ); assert_eq!(0, lbfgs.active_size); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]) ); assert_eq!(1, lbfgs.active_size); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian(&[1.1, 2., 3.], &[-0.5, 0.7, -0.3]) ); assert_eq!(1, lbfgs.active_size); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[-0.75, 0.9, -1.9], &[0.19, 0.19, -0.44]) ); assert_eq!(2, lbfgs.active_size); } #[test] fn reject_norm_s_zero() { let n = 3; let mem = 5; let mut lbfgs = Lbfgs::new(n, mem); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[1.0, 2.0, -1.0], &[5.0, 5.0, 5.0]) ); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian( &[ 1.0 + std::f64::MIN_POSITIVE, 2.0 + std::f64::MIN_POSITIVE, -1.0 + std::f64::MIN_POSITIVE ], &[5.0, 5.0, 5.0] ) ); } #[test] fn reject_cfbs_condition() { let n = 3; let mem = 5; let mut lbfgs = Lbfgs::new(n, mem) .with_sy_epsilon(1e-8) .with_cbfgs_alpha(1.0) .with_cbfgs_epsilon(1e-4); assert_eq!( UpdateStatus::UpdateOk, lbfgs.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]) ); assert_eq!( UpdateStatus::Rejection, lbfgs.update_hessian(&[-0.838, 0.260, 0.479], &[-0.5, 0.6, -1.2]) ); }
eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-2.25, 3.5, -3.1], &[0.39, 0.39, -0.84]) ); assert_eq!( UpdateStatus::UpdateOk, e.update_hessian(&[-3.75, 6.3, -4.3], &[0.49, 0.59, -1.24]) ); e.apply_hessian(&mut g); println!("{:#.3?}", e); let gamma_correct = 0.077189939288812; let alpha_correct = [-0.044943820224719, -0.295345104333868, -1.899418829910887]; let rho_correct = [1.123595505617978, 1.428571428571429, 13.793103448275861]; let dir_correct = [-0.933604237447365, -0.078865807539102, 1.016318412551302]; unit_test_utils::assert_nearly_equal(gamma_correct, e.gamma, 1e-8, 1e-10, "gamma"); unit_test_utils::assert_nearly_equal_array(&alpha_correct, &e.alpha, 1e-8, 1e-10, "alpha"); unit_test_utils::assert_nearly_equal_array(&rho_correct, &e.rho[0..3], 1e-8, 1e-10, "rho"); unit_test_utils::assert_nearly_equal_array(&dir_correct, &g, 1e-8, 1e-10, "direction"); }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn scalar_mult<T>(a: &mut [T], s: T)\n\nwhere\n\n T: Float,\n\n{\n\n a.iter_mut().for_each(|out| *out = s * (*out));\n\n}\n\n\n\n/// Calculates out = out + s * a\n", "file_path": "src/vec_ops.rs", "rank": 8, "score": 39979.04309943656 }, { "content": "#[inline]\n\npub fn difference_and_save<T>(out: &mut [T], a: &[T], b: &[T])\n\nwhere\n\n T: Float,\n\n{\n\n debug_assert!(a.len() == b.len());\n\n debug_assert!(out.len() == a.len());\n\n\n\n out.iter_mut()\n\n .zip(a.iter().zip(b.iter()))\n\n .for_each(|(out, (a, b))| *out = (*a) - (*b));\n\n}\n\n\n\n/// Calculates a scalar times slice: out = s * out\n", "file_path": "src/vec_ops.rs", "rank": 17, "score": 37355.41033062912 }, { "content": "#[inline]\n\npub fn inplace_vec_add<T>(out: &mut [T], a: &[T], s: T)\n\nwhere\n\n T: Float,\n\n{\n\n debug_assert!(out.len() == a.len());\n\n\n\n out.iter_mut()\n\n .zip(a.iter())\n\n .for_each(|(out, a)| *out = (*out) + s * (*a));\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::*;\n\n\n\n #[test]\n\n fn inner_product_test() {\n\n assert_eq!(\n\n vec_ops::inner_product(&vec![1.0, 2.0, 3.0], &vec![1.0, 2.0, 3.0]),\n\n 14.0\n", "file_path": "src/vec_ops.rs", "rank": 18, "score": 36295.010664812304 }, { "content": "#[inline]\n\npub fn norm1<T>(a: &[T]) -> T\n\nwhere\n\n T: Float + Sum<T>,\n\n{\n\n a.iter().map(|x| x.abs()).sum()\n\n}\n\n\n\n/// Calculate the squared 2-norm of a slice\n", "file_path": "src/vec_ops.rs", "rank": 26, "score": 17934.1216202262 }, { "content": "#[inline]\n\npub fn norm2<T>(a: &[T]) -> T\n\nwhere\n\n T: Float + Sum<T>,\n\n{\n\n norm2_sq(a).sqrt()\n\n}\n\n\n\n/// Calculate the infinity-norm of a slice\n", "file_path": "src/vec_ops.rs", "rank": 27, "score": 17934.1216202262 }, { "content": "#[inline]\n\npub fn norm_inf<T>(a: &[T]) -> T\n\nwhere\n\n T: Float + Zero,\n\n{\n\n a.iter()\n\n .fold(T::zero(), |current_max, x| x.abs().max(current_max))\n\n}\n\n\n\n/// Calculates the difference of two slices and saves it in the third: out = a - b\n", "file_path": "src/vec_ops.rs", "rank": 28, "score": 17294.62557850924 }, { "content": "#[inline]\n\npub fn norm2_sq<T>(a: &[T]) -> T\n\nwhere\n\n T: Float + Sum<T>,\n\n{\n\n inner_product(a, a)\n\n}\n\n\n\n/// Calculate the 2-norm of a slice\n", "file_path": "src/vec_ops.rs", "rank": 29, "score": 17294.62557850924 }, { "content": "#[inline]\n\npub fn inner_product<T>(a: &[T], b: &[T]) -> T\n\nwhere\n\n T: Float + Sum<T> + Mul<T, Output = T>,\n\n{\n\n debug_assert!(a.len() == b.len());\n\n\n\n a.iter().zip(b.iter()).map(|(x, y)| (*x) * (*y)).sum()\n\n}\n\n\n\n/// Calculate the 1-norm of a slice\n", "file_path": "src/vec_ops.rs", "rank": 30, "score": 16203.413686059192 }, { "content": " let active_y = &self.y[0..self.active_size];\n\n let rho = &self.rho[0..self.active_size];\n\n let alpha = &mut self.alpha;\n\n\n\n let q = g;\n\n\n\n // Perform the forward L-BFGS algorithm\n\n for (s_k, (y_k, (rho_k, alpha_k))) in active_s\n\n .iter()\n\n .zip(active_y.iter().zip(rho.iter().zip(alpha.iter_mut())))\n\n {\n\n let a = rho_k * vec_ops::inner_product(s_k, q);\n\n\n\n *alpha_k = a;\n\n\n\n vec_ops::inplace_vec_add(q, y_k, -a);\n\n }\n\n\n\n // Apply the initial Hessian estimate and form r = H_0 * q, where H_0 = gamma * I\n\n vec_ops::scalar_mult(q, self.gamma);\n", "file_path": "src/lib.rs", "rank": 31, "score": 6.749319434058401 }, { "content": " );\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn inner_product_test_panic() {\n\n vec_ops::inner_product(&vec![2.0, 3.0], &vec![1.0, 2.0, 3.0]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn diff_and_save_test_panic1() {\n\n let mut out = vec![0.0];\n\n vec_ops::difference_and_save(&mut out, &vec![3.0, 4.0], &vec![1.0, 1.0]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn diff_and_save_test_panic2() {\n\n let mut out = vec![0.0, 0.0];\n", "file_path": "src/vec_ops.rs", "rank": 32, "score": 4.992067313826618 }, { "content": " vec_ops::difference_and_save(&mut out, &vec![4.0], &vec![1.0, 1.0]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn diff_and_save_test_panic3() {\n\n let mut out = vec![0.0, 0.0];\n\n vec_ops::difference_and_save(&mut out, &vec![3.0, 4.0], &vec![1.0]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n fn inplace_vec_add_panic() {\n\n let mut out = vec![0.0, 0.0];\n\n vec_ops::inplace_vec_add(&mut out, &vec![1.0], 1.0);\n\n }\n\n\n\n #[test]\n\n fn norm1_test() {\n\n assert_eq!(vec_ops::norm1(&vec![1.0, -2.0, -3.0]), 6.0);\n", "file_path": "src/vec_ops.rs", "rank": 33, "score": 4.942127079731008 }, { "content": " fn new_s_and_y_valid(&mut self, g: &[f64]) -> bool {\n\n let s = self.s.last().unwrap();\n\n let y = self.y.last().unwrap();\n\n let rho = self.rho.last_mut().unwrap();\n\n let ys = vec_ops::inner_product(s, y);\n\n let norm_s_squared = vec_ops::inner_product(s, s);\n\n\n\n *rho = 1.0 / ys;\n\n\n\n if norm_s_squared <= std::f64::MIN_POSITIVE\n\n || (self.sy_epsilon > 0.0 && ys <= self.sy_epsilon)\n\n {\n\n // In classic L-BFGS, the buffer should be updated only if\n\n // y'*s is strictly positive and |s| is nonzero\n\n false\n\n } else if self.cbfgs_epsilon > 0.0 && self.cbfgs_alpha > 0.0 {\n\n // Check the CBFGS condition of Li and Fukushima\n\n // Condition: (y^T * s) / ||s||^2 > epsilon * ||grad(x)||^alpha\n\n let lhs_cbfgs = ys / norm_s_squared;\n\n let rhs_cbfgs = self.cbfgs_epsilon * vec_ops::norm2(g).powf(self.cbfgs_alpha);\n", "file_path": "src/lib.rs", "rank": 34, "score": 4.859425297690767 }, { "content": "\n\npub mod vec_ops;\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n/// The default `sy_epsilon`\n\npub const DEFAULT_SY_EPSILON: f64 = 1e-10;\n\n\n\n/// LBFGS Buffer\n\n///\n\n/// The Limited-memory BFGS algorithm is used to estimate curvature information for the\n\n/// gradient of a function as well as other operators and is often used in numerical\n\n/// optimization and numerical methods in general.\n\n///\n\n/// `Lbfgs` maintains a buffer of pairs `(s,y)` and values `rho` (inverse of inner products\n\n/// of `s` and `y`)\n\n///\n\n///\n\n#[derive(Debug)]\n", "file_path": "src/lib.rs", "rank": 35, "score": 4.513782659722968 }, { "content": "\n\n #[test]\n\n fn inplace_vec_add_test() {\n\n let mut out = vec![1.0, 1.0];\n\n let input = vec![1.0, 1.0];\n\n let out_result = vec![3.0, 3.0];\n\n vec_ops::inplace_vec_add(&mut out, &input, 2.0);\n\n\n\n assert_eq!(out, out_result);\n\n }\n\n}\n", "file_path": "src/vec_ops.rs", "rank": 36, "score": 4.492194908099469 }, { "content": " let mut out = vec![0.0, 0.0];\n\n let out_result = vec![2.0, 3.0];\n\n\n\n vec_ops::difference_and_save(&mut out, &vec![3.0, 4.0], &vec![1.0, 1.0]);\n\n\n\n assert_eq!(&out, &out_result);\n\n }\n\n\n\n #[test]\n\n fn scalar_vector_test() {\n\n let mut out = vec![1.0, 1.0];\n\n let out_result = vec![2.0, 2.0];\n\n let out_result2 = vec![4.0, 4.0];\n\n\n\n vec_ops::scalar_mult(&mut out, 2.0);\n\n assert_eq!(out, out_result);\n\n\n\n vec_ops::scalar_mult(&mut out, 2.0);\n\n assert_eq!(out, out_result2);\n\n }\n", "file_path": "src/vec_ops.rs", "rank": 37, "score": 4.379381146136093 }, { "content": " assert!(alpha >= 0.0, \"Negative alpha\");\n\n\n\n self.cbfgs_alpha = alpha;\n\n self\n\n }\n\n\n\n /// Update the default C-BFGS epsilon\n\n pub fn with_cbfgs_epsilon(mut self, epsilon: f64) -> Self {\n\n assert!(epsilon >= 0.0);\n\n\n\n self.cbfgs_epsilon = epsilon;\n\n self\n\n }\n\n\n\n /// Update the default sy_epsilon\n\n pub fn with_sy_epsilon(mut self, sy_epsilon: f64) -> Self {\n\n assert!(sy_epsilon >= 0.0);\n\n\n\n self.sy_epsilon = sy_epsilon;\n\n self\n", "file_path": "src/lib.rs", "rank": 38, "score": 4.292046030360094 }, { "content": " assert!(buffer_size > 0);\n\n\n\n Lbfgs {\n\n active_size: 0,\n\n gamma: 1.0,\n\n s: vec![vec![0.0; problem_size]; buffer_size + 1], // +1 for the temporary checking area\n\n y: vec![vec![0.0; problem_size]; buffer_size + 1], // +1 for the temporary checking area\n\n alpha: vec![0.0; buffer_size],\n\n rho: vec![0.0; buffer_size + 1],\n\n cbfgs_alpha: 0.0,\n\n cbfgs_epsilon: 0.0,\n\n sy_epsilon: DEFAULT_SY_EPSILON,\n\n old_state: vec![0.0; problem_size],\n\n old_g: vec![0.0; problem_size],\n\n first_old: true,\n\n }\n\n }\n\n\n\n /// Update the default C-BFGS alpha\n\n pub fn with_cbfgs_alpha(mut self, alpha: f64) -> Self {\n", "file_path": "src/lib.rs", "rank": 39, "score": 4.195147676801748 }, { "content": " let r = q;\n\n\n\n // Perform the backward L-BFGS algorithm\n\n for (s_k, (y_k, (rho_k, alpha_k))) in active_s\n\n .iter()\n\n .zip(active_y.iter().zip(rho.iter().zip(alpha.iter())))\n\n .rev()\n\n {\n\n let beta = rho_k * vec_ops::inner_product(y_k, r);\n\n\n\n vec_ops::inplace_vec_add(r, s_k, alpha_k - beta);\n\n }\n\n\n\n // The g with the Hessian applied is available in the input g\n\n // r = H_k * grad f\n\n }\n\n\n\n /// Check the validity of the newly added s and y vectors. Based on the condition in:\n\n /// D.-H. Li and M. Fukushima, \"On the global convergence of the BFGS method for nonconvex\n\n /// unconstrained optimization problems,\" vol. 11, no. 4, pp. 1054–1064, jan 2001.\n", "file_path": "src/lib.rs", "rank": 40, "score": 3.901877363458804 }, { "content": "\n\n return UpdateStatus::UpdateOk;\n\n }\n\n\n\n // Form the new s_k in the temporary area\n\n vec_ops::difference_and_save(self.s.last_mut().unwrap(), &state, &self.old_state);\n\n\n\n // Form the new y_k in the temporary area\n\n vec_ops::difference_and_save(self.y.last_mut().unwrap(), &g, &self.old_g);\n\n\n\n // Check that the s and y are valid to use\n\n if !self.new_s_and_y_valid(g) {\n\n return UpdateStatus::Rejection;\n\n }\n\n\n\n self.old_state.copy_from_slice(state);\n\n self.old_g.copy_from_slice(g);\n\n\n\n // Move the new s_0, y_0 and rho_0 to the front\n\n self.s.rotate_right(1);\n", "file_path": "src/lib.rs", "rank": 41, "score": 3.830657957650292 }, { "content": "pub struct Lbfgs {\n\n /// The number of vectors in s and y that are currently in use\n\n active_size: usize,\n\n /// Used to warm-start the Hessian estimation with H_0 = gamma * I\n\n gamma: f64,\n\n /// s holds the vectors of state difference s_k = x_{k+1} - x_k, s_0 holds the most recent s\n\n s: Vec<Vec<f64>>,\n\n /// y holds the vectors of the function g (usually cost function gradient) difference:\n\n /// y_k = g_{k+1} - g_k, y_0 holds the most recent y\n\n y: Vec<Vec<f64>>,\n\n /// Intermediary storage for the forward L-BFGS pass\n\n alpha: Vec<f64>,\n\n /// Intermediary storage for the forward L-BFGS pass\n\n rho: Vec<f64>,\n\n /// The alpha parameter of the C-BFGS criterion\n\n cbfgs_alpha: f64,\n\n /// The epsilon parameter of the C-BFGS criterion\n\n cbfgs_epsilon: f64,\n\n /// Limit on the inner product s'*y for acceptance in the buffer\n\n sy_epsilon: f64,\n", "file_path": "src/lib.rs", "rank": 42, "score": 3.818305770576802 }, { "content": " self.y.rotate_right(1);\n\n self.rho.rotate_right(1);\n\n\n\n // Update the Hessian estimate\n\n self.gamma = (1.0 / self.rho[0]) / vec_ops::inner_product(&self.y[0], &self.y[0]);\n\n\n\n // Update the indexes and number of active, -1 comes from the temporary area used in\n\n // the end of s and y to check if they are valid\n\n self.active_size = (self.s.len() - 1).min(self.active_size + 1);\n\n\n\n UpdateStatus::UpdateOk\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 43, "score": 3.4520467527019703 }, { "content": "//! # vec_ops\n\n//!\n\n//! Matrix operations used by the L-BFGS algorithm.\n\n//!\n\n\n\nuse num_traits::{float::Float, identities::Zero};\n\nuse std::iter::Sum;\n\nuse std::ops::Mul;\n\n\n\n/// Calculate the inner product of two slices\n\n#[inline]\n", "file_path": "src/vec_ops.rs", "rank": 44, "score": 2.7813018586381326 }, { "content": " }\n\n\n\n #[test]\n\n fn norm2_sq_test() {\n\n assert_eq!(vec_ops::norm2_sq(&vec![3.0, 4.0]), 25.0);\n\n }\n\n\n\n #[test]\n\n fn norm2_test() {\n\n assert_eq!(vec_ops::norm2(&vec![3.0, 4.0]), 5.0);\n\n }\n\n\n\n #[test]\n\n fn norm_inf_test() {\n\n assert_eq!(vec_ops::norm_inf(&vec![1.0, -2.0, -3.0]), 3.0);\n\n assert_eq!(vec_ops::norm_inf(&vec![1.0, -8.0, -3.0, 0.0]), 8.0);\n\n }\n\n\n\n #[test]\n\n fn diff_and_save_test() {\n", "file_path": "src/vec_ops.rs", "rank": 45, "score": 2.747282253477815 }, { "content": "//! let problem_size = 3;\n\n//! let lbfgs_memory_size = 5;\n\n//!\n\n//! // Create the L-BFGS instance with curvature and C-BFGS checks enabled\n\n//! let mut lbfgs = Lbfgs::new(problem_size, lbfgs_memory_size)\n\n//! .with_sy_epsilon(1e-8) // L-BFGS acceptance condition on s'*y > sy_espsilon\n\n//! .with_cbfgs_alpha(1.0) // C-BFGS condition:\n\n//! .with_cbfgs_epsilon(1e-4); // y'*s/||s||^2 > epsilon * ||grad(x)||^alpha\n\n//!\n\n//! // Starting value is always accepted (no s or y vectors yet)\n\n//! assert_eq!(\n\n//! lbfgs.update_hessian(&[0.0, 0.0, 0.0], &[0.0, 0.0, 0.0]),\n\n//! UpdateStatus::UpdateOk\n\n//! );\n\n//!\n\n//! // Rejected because of CBFGS condition\n\n//! assert_eq!(\n\n//! lbfgs.update_hessian(&[-0.838, 0.260, 0.479], &[-0.5, 0.6, -1.2]),\n\n//! UpdateStatus::Rejection\n\n//! );\n", "file_path": "src/lib.rs", "rank": 46, "score": 2.376347869342882 }, { "content": " }\n\n\n\n /// \"Empties\" the buffer\n\n ///\n\n /// This is a cheap operation as it amount to setting certain internal flags\n\n pub fn reset(&mut self) {\n\n self.active_size = 0;\n\n self.first_old = true;\n\n }\n\n\n\n /// Apply the current Hessian estimate to an input vector\n\n pub fn apply_hessian(&mut self, g: &mut [f64]) {\n\n assert!(g.len() == self.old_g.len());\n\n\n\n if self.active_size == 0 {\n\n // No Hessian available, the g is the best we can do for now\n\n return;\n\n }\n\n\n\n let active_s = &self.s[0..self.active_size];\n", "file_path": "src/lib.rs", "rank": 47, "score": 2.1693163990134496 }, { "content": "//! lbfgs.apply_hessian(&mut g);\n\n//!\n\n//! assert!((g[0] - correct_dir[0]).abs() < 1e-12);\n\n//! assert!((g[1] - correct_dir[1]).abs() < 1e-12);\n\n//! assert!((g[2] - correct_dir[2]).abs() < 1e-12);\n\n//! }\n\n//! ```\n\n//!\n\n//! # Errors\n\n//!\n\n//! `update_hessian` will give errors if the C-BFGS or L-BFGS curvature conditions are not met.\n\n//!\n\n//! # Panics\n\n//!\n\n//! `with_sy_epsilon`, `with_cbfgs_alpha`, and `with_cbfgs_epsilon` will panic if given negative\n\n//! values.\n\n//!\n\n//! `update_hessian` and `apply_hessian` will panic if given slices which are not the same length\n\n//! as the `problem_size`.\n\n//!\n", "file_path": "src/lib.rs", "rank": 48, "score": 2.1425335721526997 }, { "content": "\n\n lhs_cbfgs > rhs_cbfgs\n\n } else {\n\n // The standard L-BFGS conditions are satisfied and C-BFGS is\n\n // not active (either cbfgs_epsilon <= 0.0 or cbfgs_alpha <= 0.0)\n\n true\n\n }\n\n }\n\n\n\n /// Saves vectors to update the Hessian estimate\n\n pub fn update_hessian(&mut self, g: &[f64], state: &[f64]) -> UpdateStatus {\n\n assert!(g.len() == self.old_state.len());\n\n assert!(state.len() == self.old_state.len());\n\n\n\n // First iteration, only save\n\n if self.first_old {\n\n self.first_old = false;\n\n\n\n self.old_state.copy_from_slice(state);\n\n self.old_g.copy_from_slice(g);\n", "file_path": "src/lib.rs", "rank": 49, "score": 1.9748421522717274 }, { "content": "//! # lbfgs\n\n//!\n\n//! The `L-BFGS` is an Hessian approximation algorithm commonly used by optimization algorithms in\n\n//! the family of quasi-Newton methods that approximates the Broyden–Fletcher–Goldfarb–Shanno (BFGS)\n\n//! algorithm using a limited amount of computer memory. In this implementation extra condition are\n\n//! added to have convergence properties for non-convex problems, based on the [C-BFGS conditions],\n\n//! together with basic checks on the local curvature.\n\n//!\n\n//! [C-BFGS conditions]: https://pdfs.semanticscholar.org/5b90/45b7d27a53b1e3c3b3f0dc6aab908cc3e0b2.pdf\n\n//!\n\n//! # Example\n\n//!\n\n//! Create a fully featured instance of the L-BFGS algorithm with curvature and C-BFGS checks\n\n//! enabled.\n\n//!\n\n//! ```\n\n//! use lbfgs::*;\n\n//!\n\n//! fn main() {\n\n//! // Problem size and the number of stored vectors in L-BFGS cannot be zero\n", "file_path": "src/lib.rs", "rank": 50, "score": 1.7422056075865835 }, { "content": "//!\n\n//! // This will fail because y'*s == 0 (curvature condition)\n\n//! assert_eq!(\n\n//! lbfgs.update_hessian(\n\n//! &[-0.5, 0.6, -1.2],\n\n//! &[0.419058177461747, 0.869843029576958, 0.260313940846084]\n\n//! ),\n\n//! UpdateStatus::Rejection\n\n//! );\n\n//!\n\n//! // A proper update that will be accepted\n\n//! assert_eq!(\n\n//! lbfgs.update_hessian(&[-0.5, 0.6, -1.2], &[0.1, 0.2, -0.3]),\n\n//! UpdateStatus::UpdateOk\n\n//! );\n\n//!\n\n//! // Apply Hessian approximation on a gradient\n\n//! let mut g = [-3.1, 1.5, 2.1];\n\n//! let correct_dir = [-1.100601247872944, -0.086568349404424, 0.948633011911515];\n\n//!\n", "file_path": "src/lib.rs", "rank": 51, "score": 1.3928075672323548 }, { "content": " /// Holds the state of the last `update_hessian`, used to calculate the `s_k` vectors\n\n old_state: Vec<f64>,\n\n /// Holds the g of the last `update_hessian`, used to calculate the `y_k` vectors\n\n old_g: Vec<f64>,\n\n /// Check to see if the `old_*` variables have valid data\n\n first_old: bool,\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum UpdateStatus {\n\n /// The g and state was accepted to update the Hessian estimate\n\n UpdateOk,\n\n /// The g and state was rejected by the C-BFGS criteria\n\n Rejection,\n\n}\n\n\n\nimpl Lbfgs {\n\n /// Create a new L-BFGS instance with a specific problem and L-BFGS buffer size\n\n pub fn new(problem_size: usize, buffer_size: usize) -> Lbfgs {\n\n assert!(problem_size > 0);\n", "file_path": "src/lib.rs", "rank": 52, "score": 1.367714773619709 }, { "content": "# `L-BFGS` [![Build Status](https://travis-ci.org/korken89/lbfgs-rs.svg?branch=master)](https://travis-ci.org/korken89/lbfgs-rs)\n\n\n\n`L-BFGS` ([Low-memory Broyden–Fletcher–Goldfarb–Shanno](https://en.wikipedia.org/wiki/Limited-memory_BFGS)) is a library for doing\n\nestimation and application of Hessians in numerical optimization while using\n\nlimited memory and never explicitly creating the Hessian. Only simple vector\n\noperation are used, as specified by the L-BFGS algorithm.\n\n\n\nThe specific L-BFGS algorithm implemented here can be found in\n\n[Algorithm 9.1 (L-BFGS two-loop recursion)](http://www.bioinfo.org.cn/~wangchao/maa/Numerical_Optimization.pdf).\n\n\n\nMoreover, the condition for the Cautious-BFGS (C-BFGS) algorithm, specified in\n\n[D.-H. Li and M. Fukushima, \"On the global convergence of the BFGS method for\n\nnonconvex unconstrained optimization problems\"](https://pdfs.semanticscholar.org/5b90/45b7d27a53b1e3c3b3f0dc6aab908cc3e0b2.pdf),\n\nis used to check the updates of the L-BFGS.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the\n\nwork by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any\n\nadditional terms or conditions.\n", "file_path": "README.md", "rank": 53, "score": 1.1305525691185307 }, { "content": "# Change Log\n\n\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](http://keepachangelog.com/)\n\nand this project adheres to [Semantic Versioning](http://semver.org/).\n\n\n\n\n\n## [Unreleased]\n\n\n\n### Fixed\n\n\n\n### Added\n\n\n\n### Changes\n\n\n\n## [v0.2.1] - 2019-06-03\n\n\n\n### Added\n\n\n\n* Added Pantelis to authors\n\n\n\n## [v0.2.0] - 2019-06-03\n\n\n\nThis is a breaking API change.\n\n\n\n### Added\n\n\n\n* `CHANGELOG.md` added\n\n\n\n### Changes\n\n\n\n* Stoped using `NonZeroUsize`, was too much cluttering.\n\n\n\n## [v0.1.1]\n\n\n\n\n\nFirst release.\n\n\n\n\n\n[Unreleased]: https://github.com/korken89/lbfgs-rs/compare/v0.2.1...master\n\n[v0.2.0]: https://github.com/korken89/lbfgs-rs/compare/v0.2.0...v0.2.1\n\n[v0.2.0]: https://github.com/korken89/lbfgs-rs/compare/v0.1.1...v0.2.0\n\n[v0.1.1]: https://github.com/korken89/lbfgs-rs/compare/v0.1.0...v0.1.1\n", "file_path": "CHANGELOG.md", "rank": 54, "score": 0.7486604788844886 } ]
Rust
src/iam.rs
NathanHowell/google-cloud-storage-rs
f599aa4009c9475225ba1a6fbcd2961836992e69
use crate::google::iam::v1::{Policy, TestIamPermissionsResponse}; use crate::google::storage::v1::{ GetIamPolicyRequest, SetIamPolicyRequest, TestIamPermissionsRequest, }; use crate::query::{PushIf, Query}; use crate::request::Request; use crate::urls::Urls; use crate::{Client, Result}; use reqwest::Method; use std::fmt::Debug; use url::Url; fn iam_url<'a, R, F>(base_url: Url, iam_request: Option<&'a R>, resource: F) -> Result<Url> where F: FnOnce(&'a R) -> &'a str, { let request = iam_request.ok_or(crate::Error::Other { source: "Expected iam_request field".into(), #[cfg(feature = "backtrace")] backtrace: std::backtrace::Backtrace::capture(), })?; let resource = resource(request); base_url.bucket(resource)?.join_segment("iam") } impl Query for GetIamPolicyRequest { fn request_query(&mut self) -> Vec<(&'static str, String)> { let mut query = self.common_request_params.take().request_query(); let mut requested_policy_version = self .iam_request .take() .and_then(|r| r.options) .map(|o| o.requested_policy_version); query.push_if_opt( "optionsRequestedPolicyVersion", &mut requested_policy_version, ); query } } impl Request for GetIamPolicyRequest { const REQUEST_METHOD: Method = Method::GET; type Response = Policy; fn request_path(&self, base_url: Url) -> Result<Url> { iam_url(base_url, self.iam_request.as_ref(), |r| &r.resource) } } impl Query for SetIamPolicyRequest { fn request_query(&mut self) -> Vec<(&'static str, String)> { self.common_request_params.take().request_query() } } impl Request for SetIamPolicyRequest { const REQUEST_METHOD: Method = Method::PUT; type Response = Policy; fn scope(&self) -> &'static str { crate::request::Scope::FULL_CONTROL } fn request_path(&self, base_url: Url) -> Result<Url> { iam_url(base_url, self.iam_request.as_ref(), |r| &r.resource) } } impl Query for TestIamPermissionsRequest { fn request_query(&mut self) -> Vec<(&'static str, String)> { let mut query = self.common_request_params.take().request_query(); query.extend( self.iam_request .take() .into_iter() .flat_map(|request| request.permissions) .map(|v| ("permissions", v)) .collect::<Vec<_>>(), ); query } } impl Request for TestIamPermissionsRequest { const REQUEST_METHOD: Method = Method::GET; type Response = TestIamPermissionsResponse; fn request_path(&self, base_url: Url) -> Result<Url> { Ok( iam_url(base_url, self.iam_request.as_ref(), |r| &r.resource)? .join("testPermissions")?, ) } } impl Client { #[doc = " Gets the IAM policy for the specified bucket."] #[tracing::instrument] pub async fn get_bucket_iam_policy( &self, request: impl Into<GetIamPolicyRequest> + Debug, ) -> crate::Result<Policy> { let request = request.into(); self.invoke(request).await } #[doc = " Updates an IAM policy for the specified bucket."] #[tracing::instrument] pub async fn set_bucket_iam_policy( &self, request: impl Into<SetIamPolicyRequest> + Debug, ) -> crate::Result<Policy> { let mut request = request.into(); let policy = request.iam_request.take().and_then(|r| r.policy); self.invoke_json(request, policy).await } #[doc = " Tests a set of permissions on the given bucket to see which, if"] #[doc = " any, are held by the caller."] #[tracing::instrument] pub async fn test_bucket_iam_permissions( &self, request: impl Into<TestIamPermissionsRequest> + Debug, ) -> crate::Result<TestIamPermissionsResponse> { let request = request.into(); self.invoke(request).await } }
use crate::google::iam::v1::{Policy, TestIamPermissionsResponse}; use crate::google::storage::v1::{ GetIamPolicyRequest, SetIamPolicyRequest, TestIamPermissionsRequest, }; use crate::query::{PushIf, Query}; use crate::request::Request; use crate::urls::Urls; use crate::{Client, Result}; use reqwest::Method; use std::fmt::Debug; use url::Url; fn iam_url<'a, R, F>(base_url: Url, iam_request: Option<&'a R>, resource: F) -> Result<Url> where F: FnOnce(&'a R) -> &'a str, { let request = iam_request.ok_or(crate::Error::Other { source: "Expected iam_request field".into(), #[cfg(feature = "backtrace")] backtrace: std::backtrace::Backtrace::capture(), })?; let resource = resource(request); base_url.bucket(resource)?.join_segment("iam") } impl Query for GetIamPolicyRequest { fn request_query(&mut self) -> Vec<(&'static str, String)> { let mut query = self.common_request_params.tak
} impl Request for GetIamPolicyRequest { const REQUEST_METHOD: Method = Method::GET; type Response = Policy; fn request_path(&self, base_url: Url) -> Result<Url> { iam_url(base_url, self.iam_request.as_ref(), |r| &r.resource) } } impl Query for SetIamPolicyRequest { fn request_query(&mut self) -> Vec<(&'static str, String)> { self.common_request_params.take().request_query() } } impl Request for SetIamPolicyRequest { const REQUEST_METHOD: Method = Method::PUT; type Response = Policy; fn scope(&self) -> &'static str { crate::request::Scope::FULL_CONTROL } fn request_path(&self, base_url: Url) -> Result<Url> { iam_url(base_url, self.iam_request.as_ref(), |r| &r.resource) } } impl Query for TestIamPermissionsRequest { fn request_query(&mut self) -> Vec<(&'static str, String)> { let mut query = self.common_request_params.take().request_query(); query.extend( self.iam_request .take() .into_iter() .flat_map(|request| request.permissions) .map(|v| ("permissions", v)) .collect::<Vec<_>>(), ); query } } impl Request for TestIamPermissionsRequest { const REQUEST_METHOD: Method = Method::GET; type Response = TestIamPermissionsResponse; fn request_path(&self, base_url: Url) -> Result<Url> { Ok( iam_url(base_url, self.iam_request.as_ref(), |r| &r.resource)? .join("testPermissions")?, ) } } impl Client { #[doc = " Gets the IAM policy for the specified bucket."] #[tracing::instrument] pub async fn get_bucket_iam_policy( &self, request: impl Into<GetIamPolicyRequest> + Debug, ) -> crate::Result<Policy> { let request = request.into(); self.invoke(request).await } #[doc = " Updates an IAM policy for the specified bucket."] #[tracing::instrument] pub async fn set_bucket_iam_policy( &self, request: impl Into<SetIamPolicyRequest> + Debug, ) -> crate::Result<Policy> { let mut request = request.into(); let policy = request.iam_request.take().and_then(|r| r.policy); self.invoke_json(request, policy).await } #[doc = " Tests a set of permissions on the given bucket to see which, if"] #[doc = " any, are held by the caller."] #[tracing::instrument] pub async fn test_bucket_iam_permissions( &self, request: impl Into<TestIamPermissionsRequest> + Debug, ) -> crate::Result<TestIamPermissionsResponse> { let request = request.into(); self.invoke(request).await } }
e().request_query(); let mut requested_policy_version = self .iam_request .take() .and_then(|r| r.options) .map(|o| o.requested_policy_version); query.push_if_opt( "optionsRequestedPolicyVersion", &mut requested_policy_version, ); query }
function_block-function_prefixed
[ { "content": "fn acl_url(base_url: Url, bucket: &str, object: &str) -> Result<Url> {\n\n base_url.bucket(bucket)?.object(object)?.join_segment(\"acl\")\n\n}\n\n\n\nimpl Query for InsertObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, generation);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for InsertObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = ObjectAccessControl;\n\n\n\n fn scope(&self) -> &'static str {\n", "file_path": "src/object_access_control.rs", "rank": 1, "score": 152860.15065887698 }, { "content": "fn notification_configs_url(base_url: Url, bucket: &str) -> Result<Url> {\n\n Ok(base_url\n\n .bucket(bucket)?\n\n .join_segment(\"notificationConfigs\")?)\n\n}\n\n\n\nimpl Query for DeleteNotificationRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.request_query()\n\n }\n\n}\n\n\n\nimpl Request for DeleteNotificationRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n\n\n\n type Response = ();\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(notification_configs_url(base_url, &self.bucket)?.join(&self.notification)?)\n\n }\n", "file_path": "src/notifications.rs", "rank": 2, "score": 150491.51352673885 }, { "content": "fn acl_url(base_url: Url, bucket: &str) -> Result<Url> {\n\n base_url.bucket(bucket)?.join_segment(\"acl/\")\n\n}\n\n\n\nimpl Query for InsertBucketAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for InsertBucketAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = BucketAccessControl;\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n", "file_path": "src/bucket_access_control.rs", "rank": 3, "score": 147950.93448316812 }, { "content": "fn hmac_keys_url(base_url: Url, project_id: &str) -> Result<Url> {\n\n Ok(base_url\n\n .join(\"projects/\")?\n\n .join(project_id)?\n\n .join(\"hmacKeys/\")?)\n\n}\n\n\n\nimpl Query for CreateHmacKeyRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, service_account_email);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for CreateHmacKeyRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n", "file_path": "src/hmac_key.rs", "rank": 4, "score": 145540.4542167228 }, { "content": "fn default_object_acl_url(base_url: Url, bucket: &str) -> Result<Url> {\n\n base_url.bucket(bucket)?.join_segment(\"defaultObjectAcl\")\n\n}\n\n\n\nimpl Query for InsertDefaultObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for InsertDefaultObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = ObjectAccessControl;\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n", "file_path": "src/default_object_access_control.rs", "rank": 5, "score": 141069.582829759 }, { "content": "#[test]\n\nfn rewrite_object_url() {\n\n let request = RewriteObjectRequest {\n\n source_bucket: \"bucket1\".to_string(),\n\n source_object: \"foo/bar/baz\".to_string(),\n\n destination_bucket: \"bucket1\".to_string(),\n\n destination_object: \"foo/bar/baz\".to_string(),\n\n ..Default::default()\n\n };\n\n\n\n let url = request\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/bucket1/o/foo%2Fbar%2Fbaz/rewriteTo/b/bucket1/o/foo/bar/baz\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/object_tests.rs", "rank": 6, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn delete_bucket_url() {\n\n let bucket = \"gs://bucket\".parse::<DeleteBucketRequest>().unwrap();\n\n\n\n let url = bucket\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/bucket\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/bucket_tests.rs", "rank": 7, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn list_buckets_url() {\n\n let bucket = ListBucketsRequest::default();\n\n\n\n let url = bucket\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(url.as_str(), \"https://storage.googleapis.com/storage/v1/b\");\n\n}\n\n\n", "file_path": "src/tests/bucket_tests.rs", "rank": 8, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn copy_object_url() {\n\n let request = CopyObjectRequest {\n\n source_bucket: \"bucket1\".to_string(),\n\n source_object: \"foo/bar/baz\".to_string(),\n\n destination_bucket: \"bucket1\".to_string(),\n\n destination_object: \"foo/bar/baz\".to_string(),\n\n ..Default::default()\n\n };\n\n\n\n let url = request\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/bucket1/o/foo%2Fbar%2Fbaz/copyTo/b/bucket1/o/foo/bar/baz\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/object_tests.rs", "rank": 9, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn list_objects_url() {\n\n let bucket = \"gs://bucket/object\".parse::<ListObjectsRequest>().unwrap();\n\n\n\n let url = bucket\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/bucket/o\"\n\n );\n\n}\n\n\n\n// example from https://cloud.google.com/storage/docs/request-endpoints\n", "file_path": "src/tests/object_tests.rs", "rank": 10, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn get_object_url() {\n\n let bucket = \"gs://bucket/object\".parse::<GetObjectRequest>().unwrap();\n\n\n\n let url = bucket\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/bucket/o/object\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/object_tests.rs", "rank": 11, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn get_object_url_q() {\n\n let request = GetObjectRequest {\n\n bucket: \"example-bucket\".to_string(),\n\n object: \"foo??bar\".to_string(),\n\n ..Default::default()\n\n };\n\n\n\n let url = request\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/example-bucket/o/foo%3F%3Fbar\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/object_tests.rs", "rank": 12, "score": 60321.63695054365 }, { "content": "#[test]\n\nfn get_bucket_url() {\n\n let bucket = \"gs://bucket\".parse::<GetBucketRequest>().unwrap();\n\n\n\n let url = bucket\n\n .request_path(\n\n \"https://storage.googleapis.com/storage/v1/\"\n\n .parse()\n\n .unwrap(),\n\n )\n\n .unwrap();\n\n\n\n assert_eq!(\n\n url.as_str(),\n\n \"https://storage.googleapis.com/storage/v1/b/bucket\"\n\n );\n\n}\n\n\n", "file_path": "src/tests/bucket_tests.rs", "rank": 13, "score": 60321.63695054365 }, { "content": "fn main() {\n\n let mut config = prost_build::Config::new();\n\n\n\n config.type_attribute(\".\", r#\"#[derive(serde::Deserialize)]\"#);\n\n config.type_attribute(\".\", r#\"#[derive(serde::Serialize)]\"#);\n\n config.type_attribute(\".\", r#\"#[serde(rename_all = \"camelCase\")]\"#);\n\n\n\n config.field_attribute(\"in\", r#\"#[serde(rename = \"in\")]\"#);\n\n config.field_attribute(\"type\", r#\"#[serde(rename = \"type\")]\"#);\n\n\n\n config.field_attribute(\n\n \"crc32c\",\n\n r#\"#[serde(with = \"crate::serde::optional_crc32c\")]\"#,\n\n );\n\n\n\n for field in &[\"generation\", \"metageneration\", \"project_number\", \"size\"] {\n\n config.field_attribute(field, r#\"#[serde(with = \"crate::serde::into_string\")]\"#);\n\n }\n\n\n\n for path in &[\n", "file_path": "build.rs", "rank": 14, "score": 39419.302889738 }, { "content": "pub fn init() {\n\n INIT.call_once(|| {\n\n tracing_subscriber::fmt()\n\n .with_env_filter(EnvFilter::new(\"debug\"))\n\n .pretty()\n\n .with_span_events(FmtSpan::FULL)\n\n .init();\n\n });\n\n}\n", "file_path": "tests/util/mod.rs", "rank": 15, "score": 35017.346454513594 }, { "content": "#[test]\n\nfn valid_bucket() {\n\n let res = serde_json::from_str::<Bucket>(include_str!(\"valid_bucket.json\")).unwrap();\n\n\n\n assert_eq!(res.metageneration, 1);\n\n assert_eq!(res.name, \"new-bucket\");\n\n assert_eq!(res.project_number, 115258717311);\n\n}\n\n\n", "file_path": "src/tests/bucket_tests.rs", "rank": 16, "score": 34546.87158584791 }, { "content": "#[test]\n\nfn valid_crc32c() {\n\n let res = serde_json::from_str::<ListObjectsResponse>(include_str!(\"valid_objects_list.json\"))\n\n .unwrap();\n\n assert_eq!(res.items.len(), 2);\n\n\n\n let object = res.items.get(0).unwrap();\n\n assert_eq!(object.name, \"BingSiteAuth.xml\");\n\n assert_eq!(\n\n object.crc32c,\n\n Some(crc32c::crc32c(include_bytes!(\"BingSiteAuth.xml\")))\n\n );\n\n}\n", "file_path": "src/tests/object_tests.rs", "rank": 17, "score": 34546.87158584791 }, { "content": "#[test]\n\nfn valid_objects_list() {\n\n let res = serde_json::from_str::<ListObjectsResponse>(include_str!(\"valid_objects_list.json\"))\n\n .unwrap();\n\n assert_eq!(res.items.len(), 2);\n\n\n\n let object = res.items.get(0).unwrap();\n\n assert_eq!(object.name, \"BingSiteAuth.xml\");\n\n assert_eq!(object.bucket, \"old-website\");\n\n assert_eq!(object.crc32c, Some(1714892481));\n\n assert_eq!(object.id, \"old-website/BingSiteAuth.xml/1500357863879418\");\n\n assert_eq!(object.name, \"BingSiteAuth.xml\");\n\n assert_eq!(object.generation, 1500357863879418);\n\n assert_eq!(object.metageneration, 3);\n\n assert_eq!(object.content_type, \"text/xml\");\n\n assert_eq!(object.storage_class, \"MULTI_REGIONAL\");\n\n assert_eq!(object.size, 85);\n\n assert_eq!(object.md5_hash, \"7EST5TcVullac1DmfdqZGA==\");\n\n assert_eq!(object.cache_control, \"public, max-age=3600\");\n\n assert_eq!(object.etag, \"CPrljMyUktUCEAM=\");\n\n\n", "file_path": "src/tests/object_tests.rs", "rank": 18, "score": 33548.446260774384 }, { "content": "#[test]\n\nfn valid_bucket_list() {\n\n let res = serde_json::from_str::<ListBucketsResponse>(include_str!(\"valid_bucket_list.json\"))\n\n .unwrap();\n\n assert_eq!(res.items.len(), 2);\n\n let bucket = res.items.get(0).unwrap();\n\n assert_eq!(bucket.name, \"new-bucket\");\n\n\n\n let bucket = res.items.get(1).unwrap();\n\n assert_eq!(bucket.name, \"old-website\");\n\n}\n", "file_path": "src/tests/bucket_tests.rs", "rank": 19, "score": 33548.446260774384 }, { "content": "pub(crate) trait Request: Query {\n\n const REQUEST_METHOD: Method;\n\n\n\n type Response: DeserializeOwned;\n\n\n\n fn scope(&self) -> &'static str {\n\n if Self::REQUEST_METHOD == Method::GET {\n\n Scope::READ_ONLY\n\n } else {\n\n Scope::READ_WRITE\n\n }\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url>;\n\n\n\n fn request_headers(&self) -> HeaderMap {\n\n HeaderMap::new()\n\n }\n\n}\n", "file_path": "src/request.rs", "rank": 20, "score": 33178.49875564751 }, { "content": "use crate::query::Query;\n\nuse crate::Result;\n\nuse reqwest::header::HeaderMap;\n\nuse reqwest::Method;\n\nuse serde::de::DeserializeOwned;\n\nuse url::Url;\n\n\n\npub(crate) struct Scope;\n\n\n\nimpl Scope {\n\n pub(crate) const READ_ONLY: &'static str =\n\n \"https://www.googleapis.com/auth/devstorage.read_only\";\n\n\n\n pub(crate) const READ_WRITE: &'static str =\n\n \"https://www.googleapis.com/auth/devstorage.read_write\";\n\n\n\n pub(crate) const FULL_CONTROL: &'static str =\n\n \"https://www.googleapis.com/auth/devstorage.full_control\";\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 21, "score": 33178.158018424474 }, { "content": "use crate::google::storage::v1::common_enums::{\n\n PredefinedBucketAcl, PredefinedObjectAcl, Projection,\n\n};\n\nuse crate::google::storage::v1::CommonRequestParams;\n\nuse crate::push_if;\n\nuse std::mem;\n\n\n\npub(crate) trait Query {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)>;\n\n}\n\n\n\nimpl<T: Query> Query for Option<T> {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.take()\n\n .as_mut()\n\n .map(|q| q.request_query())\n\n .unwrap_or_default()\n\n }\n\n}\n\n\n", "file_path": "src/query.rs", "rank": 22, "score": 33079.00502442095 }, { "content": "impl Query for CommonRequestParams {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = Vec::new();\n\n\n\n push_if!(self, query, quota_user);\n\n\n\n if let Some(ref fields) = self.fields.take() {\n\n query.push((\"fields\", fields.paths.join(\",\")));\n\n }\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Query for PredefinedBucketAcl {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n use PredefinedBucketAcl::*;\n\n\n\n if let Some(predefined_acl) = match mem::take(self) {\n\n Unspecified => None,\n", "file_path": "src/query.rs", "rank": 23, "score": 33078.550848261264 }, { "content": " ObjectAclBucketOwnerRead => Some(\"bucketOwnerRead\"),\n\n ObjectAclPrivate => Some(\"private\"),\n\n ObjectAclProjectPrivate => Some(\"projectPrivate\"),\n\n ObjectAclPublicRead => Some(\"publicRead\"),\n\n } {\n\n *self = Unspecified;\n\n vec![(\n\n \"predefinedDefaultObjectAcl\",\n\n predefined_default_object_acl.to_string(),\n\n )]\n\n } else {\n\n vec![]\n\n }\n\n }\n\n}\n\n\n\nimpl Query for Projection {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n use Projection::*;\n\n if let Some(projection) = match self {\n", "file_path": "src/query.rs", "rank": 24, "score": 33074.41975414814 }, { "content": " BucketAclAuthenticatedRead => Some(\"authenticatedRead\"),\n\n BucketAclPrivate => Some(\"private\"),\n\n BucketAclProjectPrivate => Some(\"projectPrivate\"),\n\n BucketAclPublicRead => Some(\"publicRead\"),\n\n BucketAclPublicReadWrite => Some(\"publicReadWrite\"),\n\n } {\n\n vec![(\"predefinedAcl\", predefined_acl.to_string())]\n\n } else {\n\n vec![]\n\n }\n\n }\n\n}\n\n\n\nimpl Query for PredefinedObjectAcl {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n use PredefinedObjectAcl::*;\n\n if let Some(predefined_default_object_acl) = match self {\n\n Unspecified => None,\n\n ObjectAclAuthenticatedRead => Some(\"authenticatedRead\"),\n\n ObjectAclBucketOwnerFullControl => Some(\"bucketOwnerFullControl\"),\n", "file_path": "src/query.rs", "rank": 25, "score": 33072.026244889676 }, { "content": " Unspecified => None,\n\n NoAcl => Some(\"noAcl\"),\n\n Full => Some(\"full\"),\n\n } {\n\n *self = Unspecified;\n\n vec![(\"projection\", projection.to_string())]\n\n } else {\n\n vec![]\n\n }\n\n }\n\n}\n\n\n\npub(crate) trait PushIf<T> {\n\n fn push_if(&mut self, key: &'static str, value: &mut T);\n\n fn push_if_opt(&mut self, key: &'static str, value: &mut Option<T>);\n\n}\n\n\n\nimpl<T: Default + PartialEq + ToString> PushIf<T> for Vec<(&'static str, String)> {\n\n fn push_if(&mut self, key: &'static str, value: &mut T) {\n\n if value != &Default::default() {\n", "file_path": "src/query.rs", "rank": 26, "score": 33069.66235657782 }, { "content": " self.push((key, value.to_string()));\n\n }\n\n }\n\n\n\n fn push_if_opt(&mut self, key: &'static str, value: &mut Option<T>) {\n\n match value.take().as_mut() {\n\n Some(value) => self.push_if(key, value),\n\n None => {}\n\n }\n\n }\n\n}\n", "file_path": "src/query.rs", "rank": 27, "score": 33066.24453295678 }, { "content": "use crate::encode;\n\nuse crate::Result;\n\nuse url::Url;\n\n\n\npub(crate) trait Urls: Sized {\n\n fn join_segment(self, segment: impl AsRef<str>) -> crate::Result<Self>;\n\n fn bucket(self, bucket: impl AsRef<str>) -> Result<Self>;\n\n fn object(self, object: impl AsRef<str>) -> Result<Self>;\n\n fn slash_object(self, object: impl AsRef<str>) -> Result<Self>;\n\n}\n\n\n\nimpl Urls for Url {\n\n fn join_segment(mut self, segment: impl AsRef<str>) -> crate::Result<Self> {\n\n if !self.path().ends_with('/') {\n\n let path_segments = self.path_segments_mut();\n\n\n\n match path_segments {\n\n Err(_) => {\n\n drop(path_segments);\n\n return Err(crate::Error::InvalidRequestUrl {\n", "file_path": "src/urls.rs", "rank": 28, "score": 32690.56070025148 }, { "content": " url: self.clone(),\n\n #[cfg(feature = \"backtrace\")]\n\n backtrace: std::backtrace::Backtrace::capture(),\n\n });\n\n }\n\n Ok(mut path_segments) => {\n\n path_segments.push(\"/\");\n\n }\n\n };\n\n }\n\n\n\n Ok(self.join(segment.as_ref())?)\n\n }\n\n\n\n fn bucket(self, bucket: impl AsRef<str>) -> Result<Self> {\n\n Ok(self\n\n .join_segment(\"b\")?\n\n .join_segment(&encode::normal(bucket))?)\n\n }\n\n\n", "file_path": "src/urls.rs", "rank": 29, "score": 32683.74239943934 }, { "content": " fn object(self, object: impl AsRef<str>) -> Result<Self> {\n\n Ok(self\n\n .join_segment(\"o\")?\n\n .join_segment(&encode::normal(object))?)\n\n }\n\n\n\n fn slash_object(self, object: impl AsRef<str>) -> Result<Self> {\n\n Ok(self\n\n .join_segment(\"o\")?\n\n .join_segment(&encode::slash(object))?)\n\n }\n\n}\n", "file_path": "src/urls.rs", "rank": 30, "score": 32677.735529434143 }, { "content": "\n\n fn try_from(value: Url) -> Result<Self> {\n\n let object: Object = value.try_into()?;\n\n Ok(object.into())\n\n }\n\n}\n\n\n\nimpl FromStr for GetObjectRequest {\n\n type Err = crate::Error;\n\n\n\n fn from_str(value: &str) -> Result<Self> {\n\n Ok(value.parse::<Object>()?.into())\n\n }\n\n}\n\n\n\nimpl Query for ComposeObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if_opt!(self, query, if_metageneration_match);\n", "file_path": "src/object.rs", "rank": 31, "score": 28.287665856691522 }, { "content": " fn try_from(value: Url) -> Result<DeleteObjectRequest> {\n\n let object: Object = value.try_into()?;\n\n Ok(object.into())\n\n }\n\n}\n\n\n\nimpl FromStr for DeleteObjectRequest {\n\n type Err = crate::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n Ok(s.parse::<Object>()?.into())\n\n }\n\n}\n\n\n\nimpl Query for UpdateObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n unimplemented!()\n\n }\n\n}\n\n\n", "file_path": "src/object.rs", "rank": 32, "score": 27.302109489002408 }, { "content": "impl Request for CopyObjectRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = Object;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url\n\n .bucket(&self.source_bucket)?\n\n .slash_object(&self.source_object)?\n\n .join_segment(\"copyTo\")?\n\n .bucket(&self.destination_bucket)?\n\n .object(&self.destination_object)\n\n }\n\n}\n\n\n\nimpl Query for RewriteObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n query.extend(self.common_object_request_params.request_query());\n\n\n", "file_path": "src/object.rs", "rank": 33, "score": 26.426899893220504 }, { "content": " fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(base_url.join(\"b\")?)\n\n }\n\n}\n\n\n\nimpl From<Bucket> for InsertBucketRequest {\n\n fn from(value: Bucket) -> Self {\n\n InsertBucketRequest {\n\n bucket: Some(value),\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl Query for DeleteBucketRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if_opt!(self, query, if_metageneration_match);\n\n push_if_opt!(self, query, if_metageneration_not_match);\n", "file_path": "src/bucket.rs", "rank": 34, "score": 26.213386098251966 }, { "content": "}\n\n\n\nimpl Query for GetNotificationRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.request_query()\n\n }\n\n}\n\n\n\nimpl Request for GetNotificationRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = Notification;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(notification_configs_url(base_url, &self.bucket)?.join(&self.notification)?)\n\n }\n\n}\n\n\n\nimpl Query for InsertNotificationRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n", "file_path": "src/notifications.rs", "rank": 36, "score": 25.759030340226634 }, { "content": " ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for DeleteBucketRequest {\n\n type Err = crate::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n let bucket = s.parse::<Bucket>()?;\n\n\n\n Ok(bucket.into())\n\n }\n\n}\n\n\n\nimpl Query for GetBucketRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if_opt!(self, query, if_metageneration_match);\n", "file_path": "src/bucket.rs", "rank": 37, "score": 24.903826305516954 }, { "content": " acl_url(base_url, &self.bucket, &self.object)\n\n }\n\n}\n\n\n\nimpl Query for GetObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, generation);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for GetObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ObjectAccessControl;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n", "file_path": "src/object_access_control.rs", "rank": 38, "score": 24.178538092451966 }, { "content": " type Response = CreateHmacKeyResponse;\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n hmac_keys_url(base_url, &self.project_id)\n\n }\n\n}\n\n\n\nimpl Query for ListHmacKeysRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, max_results);\n\n push_if!(self, query, page_token);\n\n push_if!(self, query, service_account_email);\n\n push_if!(self, query, show_deleted_keys);\n\n\n", "file_path": "src/hmac_key.rs", "rank": 40, "score": 23.93972028594934 }, { "content": "impl Request for UpdateObjectRequest {\n\n const REQUEST_METHOD: Method = Method::PUT;\n\n\n\n type Response = Object;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url.bucket(&self.bucket)?.object(&self.object)\n\n }\n\n}\n\n\n\nimpl Query for ListObjectsRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, delimiter);\n\n push_if!(self, query, include_trailing_delimiter);\n\n push_if!(self, query, max_results);\n\n push_if!(self, query, page_token);\n\n push_if!(self, query, prefix);\n\n push_if!(self, query, page_token);\n", "file_path": "src/object.rs", "rank": 41, "score": 23.920342337953645 }, { "content": " crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(acl_url(base_url, &self.bucket, &self.object)?.join(&self.entity)?)\n\n }\n\n}\n\n\n\nimpl Query for DeleteObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, generation);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for DeleteObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n", "file_path": "src/object_access_control.rs", "rank": 42, "score": 23.854299654781414 }, { "content": " self.common_request_params.request_query()\n\n }\n\n}\n\n\n\nimpl Request for InsertNotificationRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = Notification;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n notification_configs_url(base_url, &self.bucket)\n\n }\n\n}\n\n\n\nimpl Query for ListNotificationsRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.request_query()\n\n }\n\n}\n\n\n", "file_path": "src/notifications.rs", "rank": 43, "score": 23.809887820426464 }, { "content": " pub fn new() -> Result<Self> {\n\n Self::builder().build()\n\n }\n\n}\n\n\n\nimpl Client {\n\n fn request_builder<R: Request>(&self, mut request: R) -> Result<RequestBuilder> {\n\n let path = request.request_path(self.base_url.clone())?;\n\n\n\n tracing::debug!(request_path = %path);\n\n\n\n Ok(self\n\n .client\n\n .request(R::REQUEST_METHOD, path)\n\n .headers(self.headers.headers(request.scope())?)\n\n .headers(request.request_headers())\n\n .query(&request.request_query()))\n\n }\n\n\n\n async fn request<R: Request>(&self, request: R) -> Result<Response> {\n", "file_path": "src/client.rs", "rank": 44, "score": 23.762922946276227 }, { "content": " fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(hmac_keys_url(base_url, &self.project_id)?.join(&self.access_id)?)\n\n }\n\n}\n\n\n\nimpl Query for DeleteHmacKeyRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.request_query()\n\n }\n\n}\n\n\n\nimpl Request for DeleteHmacKeyRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n\n\n\n type Response = ();\n\n\n", "file_path": "src/hmac_key.rs", "rank": 45, "score": 23.43409471828997 }, { "content": "impl Query for DeleteBucketAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for DeleteBucketAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n\n\n\n type Response = ();\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(acl_url(base_url, &self.bucket)?.join(&self.entity)?)\n\n }\n\n}\n\n\n", "file_path": "src/bucket_access_control.rs", "rank": 46, "score": 23.434094718289966 }, { "content": " }\n\n}\n\n\n\nimpl From<Object> for Bucket {\n\n fn from(value: Object) -> Self {\n\n Bucket {\n\n name: value.bucket,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl Query for ListBucketsRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, project);\n\n push_if!(self, query, max_results);\n\n push_if!(self, query, page_token);\n\n push_if!(self, query, prefix);\n", "file_path": "src/bucket.rs", "rank": 47, "score": 23.409986641349644 }, { "content": "impl Query for UpdateBucketAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for UpdateBucketAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::PUT;\n\n\n\n type Response = BucketAccessControl;\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(acl_url(base_url, &self.bucket)?.join(&self.entity)?)\n\n }\n\n}\n\n\n", "file_path": "src/bucket_access_control.rs", "rank": 48, "score": 23.15184969447332 }, { "content": " crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n acl_url(base_url, &self.bucket, &self.object)\n\n }\n\n}\n\n\n\nimpl Query for ListObjectAccessControlsRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl Request for ListObjectAccessControlsRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListObjectAccessControlsResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n", "file_path": "src/object_access_control.rs", "rank": 49, "score": 23.062690396715727 }, { "content": "\n\nimpl Query for DeleteDefaultObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for DeleteDefaultObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n\n\n\n type Response = ();\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(default_object_acl_url(base_url, &self.bucket)?.join(&self.entity)?)\n\n }\n\n}\n", "file_path": "src/default_object_access_control.rs", "rank": 50, "score": 23.05959338988043 }, { "content": " base_url.bucket(&resource.bucket)?.join_segment(\"o\")\n\n }\n\n\n\n fn request_headers(&self) -> HeaderMap<HeaderValue> {\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl Query for GetObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n query.extend(self.common_object_request_params.request_query());\n\n push_enum!(self, query, Projection, projection);\n\n unimplemented!()\n\n }\n\n}\n\n\n\nimpl Request for GetObjectRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n", "file_path": "src/object.rs", "rank": 51, "score": 22.846174444752847 }, { "content": " acl_url(base_url, &self.bucket)\n\n }\n\n}\n\n\n\nimpl Query for GetBucketAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for GetBucketAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = BucketAccessControl;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(acl_url(base_url, &self.bucket)?.join(&self.entity)?)\n\n }\n\n}\n\n\n", "file_path": "src/bucket_access_control.rs", "rank": 52, "score": 22.789420373606436 }, { "content": "\n\nimpl Query for UpdateDefaultObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for UpdateDefaultObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::PUT;\n\n\n\n type Response = ObjectAccessControl;\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(default_object_acl_url(base_url, &self.bucket)?.join(&self.entity)?)\n\n }\n\n}\n", "file_path": "src/default_object_access_control.rs", "rank": 53, "score": 22.788096111917966 }, { "content": "/// Convert gs://bucket/prefix Urls to an Object\n\nimpl TryFrom<Url> for Object {\n\n type Error = crate::Error;\n\n\n\n fn try_from(value: Url) -> Result<Self> {\n\n if value.scheme() != \"gs\" {\n\n return Err(crate::Error::Other {\n\n source: \"Unexpected scheme {}\".into(),\n\n #[cfg(feature = \"backtrace\")]\n\n backtrace: std::backtrace::Backtrace::capture(),\n\n });\n\n }\n\n\n\n Ok(Object {\n\n bucket: value.host_str().unwrap_or_default().to_string(),\n\n name: value.path().to_string().trim_start_matches('/').to_string(),\n\n ..Default::default()\n\n })\n\n }\n\n}\n", "file_path": "src/object.rs", "rank": 54, "score": 22.721264912950186 }, { "content": " fn request_path(&self, base_url: Url) -> Result<Url> {\n\n default_object_acl_url(base_url, &self.bucket)\n\n }\n\n}\n\n\n\nimpl Query for GetDefaultObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for GetDefaultObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ObjectAccessControl;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(default_object_acl_url(base_url, &self.bucket)?.join(&self.entity)?)\n\n }\n\n}\n", "file_path": "src/default_object_access_control.rs", "rank": 55, "score": 22.583327253088754 }, { "content": " fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url\n\n .bucket(&self.destination_bucket)?\n\n .object(&self.destination_object)?\n\n .join_segment(\"compose\")\n\n }\n\n}\n\n\n\nimpl Query for CopyObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n query.extend(self.common_object_request_params.request_query());\n\n\n\n push_if!(self, query, destination_kms_key_name);\n\n\n\n query.extend(\n\n PredefinedObjectAcl::from_i32(mem::take(&mut self.destination_predefined_acl))\n\n .request_query()\n\n .into_iter()\n\n .map(|(_, v)| (constants::destination_predefined_acl, v)),\n", "file_path": "src/object.rs", "rank": 56, "score": 22.45997489190554 }, { "content": " const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = HmacKeyMetadata;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(hmac_keys_url(base_url, &self.project_id)?.join(&self.access_id)?)\n\n }\n\n}\n\n\n\nimpl Query for UpdateHmacKeyRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.request_query()\n\n }\n\n}\n\n\n\nimpl Request for UpdateHmacKeyRequest {\n\n const REQUEST_METHOD: Method = Method::PUT;\n\n\n\n type Response = HmacKeyMetadata;\n\n\n", "file_path": "src/hmac_key.rs", "rank": 57, "score": 22.251585026266717 }, { "content": " Ok(acl_url(base_url, &self.bucket, &self.object)?.join(&self.entity)?)\n\n }\n\n}\n\n\n\nimpl Query for UpdateObjectAccessControlRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if!(self, query, generation);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for UpdateObjectAccessControlRequest {\n\n const REQUEST_METHOD: Method = Method::PUT;\n\n\n\n type Response = ObjectAccessControl;\n\n\n\n fn scope(&self) -> &'static str {\n", "file_path": "src/object_access_control.rs", "rank": 58, "score": 22.150200181435974 }, { "content": "impl Query for ListBucketAccessControlsRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.take().request_query()\n\n }\n\n}\n\n\n\nimpl Request for ListBucketAccessControlsRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListBucketAccessControlsResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n acl_url(base_url, &self.bucket)\n\n }\n\n}\n\n\n\nimpl Client {\n\n #[doc = \" Creates a new ACL entry on the specified bucket.\"]\n\n #[tracing::instrument]\n\n pub async fn insert_bucket_access_control(\n", "file_path": "src/bucket_access_control.rs", "rank": 59, "score": 22.033717823046306 }, { "content": " default_object_acl_url(base_url, &self.bucket)\n\n }\n\n}\n\n\n\nimpl Query for ListDefaultObjectAccessControlsRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.take().request_query();\n\n\n\n push_if_opt!(self, query, if_metageneration_match);\n\n push_if_opt!(self, query, if_metageneration_not_match);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for ListDefaultObjectAccessControlsRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListObjectAccessControlsResponse;\n\n\n", "file_path": "src/default_object_access_control.rs", "rank": 60, "score": 20.77126611786764 }, { "content": " base64::encode(mem::take(&mut self.encryption_key_sha256).as_bytes()),\n\n ));\n\n }\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Query for InsertObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n // obviously this needs some work\n\n let insert_object_spec = match self.first_message.as_mut().unwrap() {\n\n FirstMessage::UploadId(_) => panic!(),\n\n FirstMessage::InsertObjectSpec(spec) => spec,\n\n };\n\n\n\n let mut resource = insert_object_spec.resource.take().unwrap();\n\n\n\n let mut query = self.common_request_params.request_query();\n\n query.extend(self.common_object_request_params.request_query());\n", "file_path": "src/object.rs", "rank": 61, "score": 20.493148349897194 }, { "content": " fn extract_items(response: ListBucketsResponse) -> Vec<Self::Item> {\n\n response.items\n\n }\n\n\n\n fn into_request(self, response: &ListBucketsResponse) -> Option<Self> {\n\n if response.next_page_token.is_empty() {\n\n None\n\n } else {\n\n Some(ListBucketsRequest {\n\n page_token: response.next_page_token.clone(),\n\n ..self\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl Query for InsertBucketRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.take().request_query();\n\n\n", "file_path": "src/bucket.rs", "rank": 62, "score": 20.329909359845693 }, { "content": " query.push((\"uploadType\", \"media\".to_string()));\n\n push_if!(resource, query, name);\n\n query\n\n }\n\n}\n\n\n\nimpl Request for InsertObjectRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = Object;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n let insert_object_spec = match self.first_message.as_ref().unwrap() {\n\n FirstMessage::UploadId(_) => panic!(),\n\n FirstMessage::InsertObjectSpec(spec) => spec,\n\n };\n\n\n\n let resource = insert_object_spec.resource.as_ref().unwrap();\n\n let base_url = base_url.join(\"/upload/storage/v1/\")?;\n\n\n", "file_path": "src/object.rs", "rank": 63, "score": 20.006277979526224 }, { "content": "\n\n fn into_request(self, response: &ListHmacKeysResponse) -> Option<Self> {\n\n if response.next_page_token.is_empty() {\n\n None\n\n } else {\n\n Some(ListHmacKeysRequest {\n\n page_token: response.next_page_token.clone(),\n\n ..self\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl Query for GetHmacKeyRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n self.common_request_params.request_query()\n\n }\n\n}\n\n\n\nimpl Request for GetHmacKeyRequest {\n", "file_path": "src/hmac_key.rs", "rank": 64, "score": 19.77597234299506 }, { "content": "use tracing::Instrument;\n\n\n\nimpl FromStr for Bucket {\n\n type Err = crate::Error;\n\n\n\n fn from_str(value: &str) -> Result<Self> {\n\n let object = value.parse::<Object>()?;\n\n Ok(Bucket {\n\n name: object.bucket,\n\n ..Default::default()\n\n })\n\n }\n\n}\n\n\n\nimpl TryFrom<Url> for Bucket {\n\n type Error = crate::Error;\n\n\n\n fn try_from(value: Url) -> Result<Self> {\n\n let object: Object = value.try_into()?;\n\n Ok(object.into())\n", "file_path": "src/bucket.rs", "rank": 65, "score": 19.391351109102644 }, { "content": "\n\n push_enum!(self, query, Projection, projection);\n\n\n\n push_if!(self, query, rewrite_token);\n\n push_if!(self, query, source_generation);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for RewriteObjectRequest {\n\n const REQUEST_METHOD: Method = Method::POST;\n\n\n\n type Response = RewriteResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url\n\n .bucket(&self.source_bucket)?\n\n .slash_object(&self.source_object)?\n\n .join_segment(\"rewriteTo\")?\n", "file_path": "src/object.rs", "rank": 66, "score": 19.142114334689246 }, { "content": " prefix: self.name,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n/// Convert gs://bucket/prefix Urls to a ListObjectsRequest\n\nimpl TryInto<ListObjectsRequest> for Url {\n\n type Error = crate::Error;\n\n\n\n fn try_into(self) -> Result<ListObjectsRequest> {\n\n let object: Object = self.try_into()?;\n\n Ok(object.into())\n\n }\n\n}\n\n\n\nimpl FromStr for ListObjectsRequest {\n\n type Err = crate::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n", "file_path": "src/object.rs", "rank": 67, "score": 19.022722492464656 }, { "content": " }\n\n }\n\n}\n\n\n\n/// Convert gs://bucket/prefix Urls to a GetObjectMediaRequest\n\nimpl TryFrom<Url> for GetObjectMediaRequest {\n\n type Error = crate::Error;\n\n\n\n fn try_from(value: Url) -> Result<GetObjectMediaRequest> {\n\n let object: Object = value.try_into()?;\n\n Ok(object.into())\n\n }\n\n}\n\n\n\nimpl FromStr for GetObjectMediaRequest {\n\n type Err = crate::Error;\n\n\n\n fn from_str(value: &str) -> Result<Self> {\n\n Ok(value.parse::<Object>()?.into())\n\n }\n", "file_path": "src/object.rs", "rank": 68, "score": 18.73024184920904 }, { "content": "mod util;\n\n\n\nuse google_cloud_storage::storage::v1::{Bucket, InsertBucketRequest};\n\nuse google_cloud_storage::Client;\n\nuse httptest::{matchers::*, responders::*, Expectation, Server};\n\nuse url::Url;\n\n\n\n#[tokio::test]\n\nasync fn insert_bucket() -> Result<(), Box<dyn std::error::Error>> {\n\n util::init();\n\n\n\n let server = Server::run();\n\n\n\n server.expect(\n\n Expectation::matching(request::method_path(\"POST\", \"/storage/v1/b\"))\n\n .respond_with(status_code(200).body(include_str!(\"../src/tests/valid_bucket.json\"))),\n\n );\n\n\n\n let base_url = Url::parse(server.url_str(\"/storage/v1/\").as_str())?;\n\n\n", "file_path": "tests/bucket.rs", "rank": 69, "score": 18.68317650999353 }, { "content": "use futures::{Stream, StreamExt, TryStreamExt};\n\nuse reqwest::header::{HeaderMap, HeaderValue};\n\nuse reqwest::{Body, Method, Url};\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::fmt::Debug;\n\nuse std::mem;\n\nuse std::pin::Pin;\n\nuse std::str::FromStr;\n\nuse tracing::Instrument;\n\n\n\nimpl FromStr for Object {\n\n type Err = crate::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n let url = s.parse::<Url>()?;\n\n\n\n url.try_into()\n\n }\n\n}\n\n\n", "file_path": "src/object.rs", "rank": 70, "score": 18.484049778986172 }, { "content": " predefined_default_object_acl\n\n );\n\n push_enum!(self, query, Projection, projection);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for UpdateBucketRequest {\n\n const REQUEST_METHOD: Method = Method::PUT;\n\n\n\n type Response = Bucket;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url.bucket(&self.bucket)\n\n }\n\n}\n\n\n\nimpl From<Bucket> for UpdateBucketRequest {\n\n fn from(value: Bucket) -> Self {\n", "file_path": "src/bucket.rs", "rank": 71, "score": 18.464035169766746 }, { "content": "}\n\n\n\nimpl Query for DeleteObjectRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n query.extend(self.common_object_request_params.request_query());\n\n\n\n push_if_opt!(self, query, if_generation_match);\n\n push_if_opt!(self, query, if_generation_not_match);\n\n push_if_opt!(self, query, if_metageneration_match);\n\n push_if_opt!(self, query, if_metageneration_not_match);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for DeleteObjectRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n\n\n\n type Response = ();\n", "file_path": "src/object.rs", "rank": 72, "score": 18.143452762982452 }, { "content": " .bucket(&self.destination_bucket)?\n\n .object(&self.destination_object)\n\n }\n\n}\n\n\n\nimpl Query for GetObjectMediaRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n query.extend(self.common_object_request_params.request_query());\n\n\n\n query.push((\"alt\", \"media\".to_string()));\n\n push_if!(self, query, generation);\n\n push_if_opt!(self, query, if_generation_match);\n\n push_if_opt!(self, query, if_generation_not_match);\n\n push_if_opt!(self, query, if_metageneration_match);\n\n push_if_opt!(self, query, if_metageneration_not_match);\n\n\n\n query\n\n }\n\n}\n", "file_path": "src/object.rs", "rank": 73, "score": 17.937968047940462 }, { "content": "\n\n query\n\n }\n\n}\n\n\n\nimpl Request for DeleteBucketRequest {\n\n const REQUEST_METHOD: Method = Method::DELETE;\n\n\n\n type Response = ();\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url.bucket(&self.bucket)\n\n }\n\n}\n\n\n\nimpl From<Bucket> for DeleteBucketRequest {\n\n fn from(value: Bucket) -> Self {\n\n DeleteBucketRequest {\n\n bucket: value.name,\n\n if_metageneration_match: Some(value.metageneration),\n", "file_path": "src/bucket.rs", "rank": 74, "score": 17.788506529268137 }, { "content": "use crate::google::storage::v1::common_enums::{\n\n PredefinedBucketAcl, PredefinedObjectAcl, Projection,\n\n};\n\nuse crate::google::storage::v1::{\n\n DeleteBucketRequest, GetBucketRequest, InsertBucketRequest, ListBucketsRequest,\n\n ListBucketsResponse, UpdateBucketRequest,\n\n};\n\nuse crate::paginate::Paginate;\n\nuse crate::query::Query;\n\nuse crate::request::Request;\n\nuse crate::storage::v1::{Bucket, Object, PatchBucketRequest};\n\nuse crate::urls::Urls;\n\nuse crate::{push_enum, push_if, push_if_opt, Client, Result};\n\nuse futures::{Stream, TryStreamExt};\n\nuse reqwest::{Method, Url};\n\nuse std::convert::{TryFrom, TryInto};\n\nuse std::fmt::Debug;\n\nuse std::mem;\n\nuse std::pin::Pin;\n\nuse std::str::FromStr;\n", "file_path": "src/bucket.rs", "rank": 76, "score": 17.49872972533295 }, { "content": "\n\n type Response = ();\n\n\n\n fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(acl_url(base_url, &self.bucket, &self.object)?.join(&self.entity)?)\n\n }\n\n}\n\n\n\nimpl Client {\n\n #[doc = \" Creates a new ACL entry on the specified object.\"]\n\n #[tracing::instrument]\n\n pub async fn insert_object_access_control(\n\n &self,\n\n request: impl Into<InsertObjectAccessControlRequest> + Debug,\n\n ) -> crate::Result<ObjectAccessControl> {\n\n let mut request = request.into();\n", "file_path": "src/object_access_control.rs", "rank": 77, "score": 17.398820898727518 }, { "content": "\n\nimpl Query for CommonObjectRequestParams {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = Vec::new();\n\n\n\n query.push_if(\n\n \"x-goog-encryption-algorithm\",\n\n &mut self.encryption_algorithm,\n\n );\n\n\n\n if !self.encryption_key.is_empty() {\n\n query.push((\n\n \"x-goog-encryption-key\",\n\n base64::encode(mem::take(&mut self.encryption_key).as_bytes()),\n\n ))\n\n }\n\n\n\n if !self.encryption_key_sha256.is_empty() {\n\n query.push((\n\n \"x-goog-encryption-key-sha256\",\n", "file_path": "src/object.rs", "rank": 78, "score": 17.389483753275343 }, { "content": " GetBucketRequest {\n\n bucket: value.name,\n\n if_metageneration_match: Some(value.metageneration),\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\nimpl Query for UpdateBucketRequest {\n\n fn request_query(&mut self) -> Vec<(&'static str, String)> {\n\n let mut query = self.common_request_params.request_query();\n\n\n\n push_if_opt!(self, query, if_metageneration_match);\n\n push_if_opt!(self, query, if_metageneration_not_match);\n\n\n\n push_enum!(self, query, PredefinedBucketAcl, predefined_acl);\n\n push_enum!(\n\n self,\n\n query,\n\n PredefinedObjectAcl,\n", "file_path": "src/bucket.rs", "rank": 79, "score": 17.28777120951245 }, { "content": "use crate::google::storage::v1::{\n\n DeleteNotificationRequest, GetNotificationRequest, InsertNotificationRequest,\n\n ListNotificationsRequest, ListNotificationsResponse, Notification,\n\n};\n\nuse crate::query::Query;\n\nuse crate::request::Request;\n\nuse crate::urls::Urls;\n\nuse crate::{Client, Result};\n\nuse reqwest::Method;\n\nuse url::Url;\n\n\n", "file_path": "src/notifications.rs", "rank": 80, "score": 17.22544000253651 }, { "content": " #[doc = \" bucket.\"]\n\n #[tracing::instrument]\n\n pub async fn compose_object(\n\n &self,\n\n request: impl Into<ComposeObjectRequest> + Debug,\n\n ) -> crate::Result<Object> {\n\n let mut request = request.into();\n\n\n\n #[derive(Debug, serde::Serialize)]\n\n #[serde(rename_all = \"camelCase\")]\n\n struct ComposeRequest {\n\n kind: &'static str,\n\n source_objects: Vec<SourceObjects>,\n\n destination: Object,\n\n }\n\n\n\n let body = ComposeRequest {\n\n kind: \"storage#composeRequest\",\n\n source_objects: mem::take(&mut request.source_objects),\n\n destination: Object {\n", "file_path": "src/object.rs", "rank": 81, "score": 16.998551986235302 }, { "content": " query\n\n }\n\n}\n\n\n\nimpl Request for ListHmacKeysRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListHmacKeysResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n hmac_keys_url(base_url, &self.project_id)\n\n }\n\n}\n\n\n\nimpl<'a> Paginate<'a> for ListHmacKeysRequest {\n\n type Item = HmacKeyMetadata;\n\n\n\n fn extract_items(response: ListHmacKeysResponse) -> Vec<Self::Item> {\n\n response.items\n\n }\n", "file_path": "src/hmac_key.rs", "rank": 82, "score": 16.87626037056583 }, { "content": " type Error = crate::Error;\n\n\n\n fn try_into(self) -> Result<GetBucketRequest> {\n\n let bucket: Bucket = self.try_into()?;\n\n Ok(bucket.into())\n\n }\n\n}\n\n\n\nimpl FromStr for GetBucketRequest {\n\n type Err = crate::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self> {\n\n let bucket = s.parse::<Bucket>()?;\n\n\n\n Ok(bucket.into())\n\n }\n\n}\n\n\n\nimpl From<Bucket> for GetBucketRequest {\n\n fn from(value: Bucket) -> Self {\n", "file_path": "src/bucket.rs", "rank": 84, "score": 16.53726691836566 }, { "content": " push_if_opt!(self, query, if_metageneration_not_match);\n\n\n\n push_enum!(self, query, Projection, projection);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for GetBucketRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = Bucket;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url.bucket(&self.bucket)\n\n }\n\n}\n\n\n\n/// Convert gs://bucket/prefix Urls to a GetBucketRequest\n\nimpl TryInto<GetBucketRequest> for Url {\n", "file_path": "src/bucket.rs", "rank": 85, "score": 16.06736547510915 }, { "content": "use crate::google::storage::v1::{\n\n BucketAccessControl, DeleteBucketAccessControlRequest, GetBucketAccessControlRequest,\n\n InsertBucketAccessControlRequest, ListBucketAccessControlsRequest,\n\n ListBucketAccessControlsResponse, UpdateBucketAccessControlRequest,\n\n};\n\nuse crate::query::Query;\n\nuse crate::request::Request;\n\nuse crate::storage::v1::PatchBucketAccessControlRequest;\n\nuse crate::urls::Urls;\n\nuse crate::{Client, Result};\n\nuse reqwest::Method;\n\nuse std::fmt::Debug;\n\nuse url::Url;\n\n\n", "file_path": "src/bucket_access_control.rs", "rank": 86, "score": 16.01524904628093 }, { "content": " push_enum!(self, query, Projection, projection);\n\n push_if!(self, query, versions);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for ListObjectsRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListObjectsResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url.bucket(&self.bucket)?.join_segment(\"o\")\n\n }\n\n}\n\n\n\nimpl<'a> Paginate<'a> for ListObjectsRequest {\n\n type Item = Object;\n\n\n", "file_path": "src/object.rs", "rank": 87, "score": 16.012792132891526 }, { "content": "use crate::google::storage::v1::common_enums::{PredefinedObjectAcl, Projection};\n\nuse crate::google::storage::v1::compose_object_request::SourceObjects;\n\nuse crate::google::storage::v1::insert_object_request::FirstMessage;\n\nuse crate::google::storage::v1::{\n\n Bucket, CommonObjectRequestParams, CommonRequestParams, ComposeObjectRequest,\n\n CopyObjectRequest, DeleteObjectRequest, GetObjectMediaRequest, GetObjectRequest,\n\n InsertObjectRequest, ListObjectsRequest, ListObjectsResponse, ObjectChecksums,\n\n RewriteObjectRequest, RewriteResponse, StartResumableWriteRequest, UpdateObjectRequest,\n\n};\n\nuse crate::paginate::Paginate;\n\nuse crate::query::{PushIf, Query};\n\nuse crate::request::Request;\n\nuse crate::storage::v1::{\n\n InsertObjectSpec, Object, PatchObjectRequest, QueryWriteStatusRequest,\n\n QueryWriteStatusResponse, StartResumableWriteResponse,\n\n};\n\nuse crate::urls::Urls;\n\nuse crate::Result;\n\nuse crate::{constants, push_enum, push_if, push_if_opt, Client};\n\nuse bytes::Bytes;\n", "file_path": "src/object.rs", "rank": 88, "score": 15.979073022609953 }, { "content": "\n\n push_enum!(self, query, Projection, projection);\n\n\n\n query\n\n }\n\n}\n\n\n\nimpl Request for ListBucketsRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListBucketsResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(base_url.join(\"b\")?)\n\n }\n\n}\n\n\n\nimpl<'a> Paginate<'a> for ListBucketsRequest {\n\n type Item = Bucket;\n\n\n", "file_path": "src/bucket.rs", "rank": 89, "score": 15.967888696543744 }, { "content": "use crate::google::storage::v1::{\n\n DeleteObjectAccessControlRequest, GetObjectAccessControlRequest,\n\n InsertObjectAccessControlRequest, ListObjectAccessControlsRequest,\n\n ListObjectAccessControlsResponse, ObjectAccessControl, UpdateObjectAccessControlRequest,\n\n};\n\nuse crate::query::Query;\n\nuse crate::request::Request;\n\nuse crate::storage::v1::PatchObjectAccessControlRequest;\n\nuse crate::urls::Urls;\n\nuse crate::{push_if, Client, Result};\n\nuse reqwest::{Method, Url};\n\nuse std::fmt::Debug;\n\n\n", "file_path": "src/object_access_control.rs", "rank": 90, "score": 15.723957381306423 }, { "content": "use crate::google::storage::v1::{\n\n DeleteDefaultObjectAccessControlRequest, GetDefaultObjectAccessControlRequest,\n\n InsertDefaultObjectAccessControlRequest, ListDefaultObjectAccessControlsRequest,\n\n ListObjectAccessControlsResponse, ObjectAccessControl, UpdateDefaultObjectAccessControlRequest,\n\n};\n\nuse crate::query::Query;\n\nuse crate::request::Request;\n\nuse crate::storage::v1::PatchDefaultObjectAccessControlRequest;\n\nuse crate::urls::Urls;\n\nuse crate::{push_if_opt, Client, Result};\n\nuse reqwest::Method;\n\nuse std::fmt::Debug;\n\nuse url::Url;\n\n\n", "file_path": "src/default_object_access_control.rs", "rank": 91, "score": 15.682959363372749 }, { "content": "impl Request for ListNotificationsRequest {\n\n const REQUEST_METHOD: Method = Method::GET;\n\n\n\n type Response = ListNotificationsResponse;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n notification_configs_url(base_url, &self.bucket)\n\n }\n\n}\n\n\n\nimpl Client {\n\n #[doc = \" Permanently deletes a notification subscription.\"]\n\n #[doc = \" Note: Older, \\\"Object Change Notification\\\" push subscriptions should be\"]\n\n #[doc = \" deleted using StopChannel instead.\"]\n\n pub async fn delete_notification(\n\n &self,\n\n request: impl Into<DeleteNotificationRequest>,\n\n ) -> Result<()> {\n\n let request = request.into();\n\n\n", "file_path": "src/notifications.rs", "rank": 92, "score": 15.589024407194977 }, { "content": " backtrace: Backtrace,\n\n },\n\n #[error(transparent)]\n\n Serialization {\n\n #[from]\n\n source: serde_json::error::Error,\n\n #[cfg(feature = \"backtrace\")]\n\n backtrace: Backtrace,\n\n },\n\n #[error(transparent)]\n\n Url {\n\n #[from]\n\n source: url::ParseError,\n\n #[cfg(feature = \"backtrace\")]\n\n backtrace: Backtrace,\n\n },\n\n #[error(\"Invalid request url {url}\")]\n\n InvalidRequestUrl {\n\n url: Url,\n\n #[cfg(feature = \"backtrace\")]\n", "file_path": "src/error.rs", "rank": 93, "score": 15.337289677009291 }, { "content": "use crate::google::storage::v1::{\n\n CreateHmacKeyRequest, CreateHmacKeyResponse, DeleteHmacKeyRequest, GetHmacKeyRequest,\n\n HmacKeyMetadata, ListHmacKeysRequest, ListHmacKeysResponse, UpdateHmacKeyRequest,\n\n};\n\nuse crate::paginate::Paginate;\n\nuse crate::query::Query;\n\nuse crate::request::Request;\n\nuse crate::{push_if, Client, Result};\n\nuse futures::{Stream, TryStreamExt};\n\nuse reqwest::Method;\n\nuse std::fmt::Debug;\n\nuse std::pin::Pin;\n\nuse tracing::Instrument;\n\nuse url::Url;\n\n\n", "file_path": "src/hmac_key.rs", "rank": 94, "score": 15.215373990596255 }, { "content": "\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n base_url.bucket(&self.bucket)?.object(&self.object)\n\n }\n\n}\n\n\n\nimpl From<Object> for DeleteObjectRequest {\n\n fn from(value: Object) -> Self {\n\n DeleteObjectRequest {\n\n bucket: value.bucket,\n\n object: value.name,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n/// Convert gs://bucket/prefix Urls to a DeleteObjectRequest\n\nimpl TryFrom<Url> for DeleteObjectRequest {\n\n type Error = crate::Error;\n\n\n", "file_path": "src/object.rs", "rank": 95, "score": 15.092074133026653 }, { "content": " #[doc = \" non-decreasing.\"]\n\n #[tracing::instrument]\n\n pub async fn query_write_status(\n\n &mut self,\n\n _request: impl Into<QueryWriteStatusRequest> + Debug,\n\n ) -> Result<QueryWriteStatusResponse> {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "src/object.rs", "rank": 96, "score": 15.069388439815363 }, { "content": " fn scope(&self) -> &'static str {\n\n crate::request::Scope::FULL_CONTROL\n\n }\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(hmac_keys_url(base_url, &self.project_id)?.join(&self.access_id)?)\n\n }\n\n}\n\n\n\nimpl Client {\n\n #[doc = \" Creates a new HMAC key for the given service account.\"]\n\n #[tracing::instrument]\n\n pub async fn create_hmac_key(\n\n &self,\n\n request: impl Into<CreateHmacKeyRequest> + Debug,\n\n ) -> crate::Result<CreateHmacKeyResponse> {\n\n let request = request.into();\n\n\n\n self.invoke(request).await\n\n }\n", "file_path": "src/hmac_key.rs", "rank": 98, "score": 14.92095318618544 }, { "content": " type Response = Object;\n\n\n\n fn request_path(&self, base_url: Url) -> Result<Url> {\n\n Ok(base_url.bucket(&self.bucket)?.object(&self.object)?)\n\n }\n\n}\n\n\n\nimpl From<Object> for GetObjectRequest {\n\n fn from(value: Object) -> Self {\n\n GetObjectRequest {\n\n bucket: value.bucket,\n\n object: value.name,\n\n ..Default::default()\n\n }\n\n }\n\n}\n\n\n\n/// Convert gs://bucket/prefix Urls to a GetObjectRequest\n\nimpl TryFrom<Url> for GetObjectRequest {\n\n type Error = crate::Error;\n", "file_path": "src/object.rs", "rank": 99, "score": 14.905794194982551 } ]
Rust
sdk/guardduty/src/lens.rs
eduardomourar/aws-sdk-rust
58569c863afbe7bc442da8254df6c3970111de38
pub(crate) fn reflens_structure_crate_output_get_usage_statistics_output_next_token( input: &crate::output::GetUsageStatisticsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_detectors_output_next_token( input: &crate::output::ListDetectorsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_filters_output_next_token( input: &crate::output::ListFiltersOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_findings_output_next_token( input: &crate::output::ListFindingsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_invitations_output_next_token( input: &crate::output::ListInvitationsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_ip_sets_output_next_token( input: &crate::output::ListIpSetsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_members_output_next_token( input: &crate::output::ListMembersOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_organization_admin_accounts_output_next_token( input: &crate::output::ListOrganizationAdminAccountsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_publishing_destinations_output_next_token( input: &crate::output::ListPublishingDestinationsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_threat_intel_sets_output_next_token( input: &crate::output::ListThreatIntelSetsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_detectors_output_detector_ids( input: crate::output::ListDetectorsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.detector_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_filters_output_filter_names( input: crate::output::ListFiltersOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.filter_names { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_findings_output_finding_ids( input: crate::output::ListFindingsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.finding_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_invitations_output_invitations( input: crate::output::ListInvitationsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::Invitation>> { let input = match input.invitations { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_ip_sets_output_ip_set_ids( input: crate::output::ListIpSetsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.ip_set_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_members_output_members( input: crate::output::ListMembersOutput, ) -> std::option::Option<std::vec::Vec<crate::model::Member>> { let input = match input.members { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_organization_admin_accounts_output_admin_accounts( input: crate::output::ListOrganizationAdminAccountsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::AdminAccount>> { let input = match input.admin_accounts { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_threat_intel_sets_output_threat_intel_set_ids( input: crate::output::ListThreatIntelSetsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.threat_intel_set_ids { None => return None, Some(t) => t, }; Some(input) }
pub(crate) fn reflens_structure_crate_output_get_usage_statistics_output_next_token( input: &crate::output::GetUsageStatisticsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_detectors_output_next_token( input: &crate::output::ListDetectorsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_filters_output_next_token( input: &crate::output::ListFiltersOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_findings_output_next_token( input: &crate::output::ListFindingsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_invitations_output_next_token( input: &crate::output::ListInvitationsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_ip_sets_output_next_token( input: &crate::output::ListIpSetsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) }
pub(crate) fn reflens_structure_crate_output_list_organization_admin_accounts_output_next_token( input: &crate::output::ListOrganizationAdminAccountsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_publishing_destinations_output_next_token( input: &crate::output::ListPublishingDestinationsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn reflens_structure_crate_output_list_threat_intel_sets_output_next_token( input: &crate::output::ListThreatIntelSetsOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_detectors_output_detector_ids( input: crate::output::ListDetectorsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.detector_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_filters_output_filter_names( input: crate::output::ListFiltersOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.filter_names { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_findings_output_finding_ids( input: crate::output::ListFindingsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.finding_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_invitations_output_invitations( input: crate::output::ListInvitationsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::Invitation>> { let input = match input.invitations { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_ip_sets_output_ip_set_ids( input: crate::output::ListIpSetsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.ip_set_ids { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_members_output_members( input: crate::output::ListMembersOutput, ) -> std::option::Option<std::vec::Vec<crate::model::Member>> { let input = match input.members { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_organization_admin_accounts_output_admin_accounts( input: crate::output::ListOrganizationAdminAccountsOutput, ) -> std::option::Option<std::vec::Vec<crate::model::AdminAccount>> { let input = match input.admin_accounts { None => return None, Some(t) => t, }; Some(input) } pub(crate) fn lens_structure_crate_output_list_threat_intel_sets_output_threat_intel_set_ids( input: crate::output::ListThreatIntelSetsOutput, ) -> std::option::Option<std::vec::Vec<std::string::String>> { let input = match input.threat_intel_set_ids { None => return None, Some(t) => t, }; Some(input) }
pub(crate) fn reflens_structure_crate_output_list_members_output_next_token( input: &crate::output::ListMembersOutput, ) -> std::option::Option<&std::string::String> { let input = match &input.next_token { None => return None, Some(t) => t, }; Some(input) }
function_block-function_prefix_line
[]
Rust
semantics/src/traversal/functions.rs
vaporydev/fe
246b23bad148e358ea04b80ca9e4e7a5ce4cec8d
use crate::errors::SemanticError; use crate::namespace::scopes::{ BlockScope, BlockScopeType, ContractDef, ContractScope, Scope, Shared, }; use crate::namespace::types::{ Base, FixedSize, Tuple, Type, }; use crate::traversal::_utils::spanned_expression; use crate::traversal::{ assignments, declarations, expressions, types, }; use crate::{ Context, FunctionAttributes, }; use fe_parser::ast as fe; use fe_parser::span::Spanned; use std::rc::Rc; pub fn func_def( contract_scope: Shared<ContractScope>, context: Shared<Context>, def: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::FuncDef { qual, name, args, return_type, body, } = &def.node { let function_scope = BlockScope::from_contract_scope(def.span, Rc::clone(&contract_scope)); let name = name.node.to_string(); let param_types = args .iter() .map(|arg| func_def_arg(Rc::clone(&function_scope), arg)) .collect::<Result<Vec<_>, _>>()?; let return_type = return_type .as_ref() .map(|typ| types::type_desc_fixed_size(Scope::Block(Rc::clone(&function_scope)), &typ)) .transpose()? .unwrap_or_else(|| Tuple::empty().to_fixed_size()); if !return_type.is_empty_tuple() { validate_all_paths_return_or_revert(&body)? } let is_public = qual.is_some(); contract_scope.borrow_mut().add_function( name.clone(), is_public, param_types.clone(), return_type.clone(), ); let attributes = FunctionAttributes { name, param_types, return_type, }; context.borrow_mut().add_function(def, attributes); traverse_statements(function_scope, context, body)?; return Ok(()); } unreachable!() } fn traverse_statements( scope: Shared<BlockScope>, context: Shared<Context>, body: &[Spanned<fe::FuncStmt>], ) -> Result<(), SemanticError> { for stmt in body.iter() { func_stmt(Rc::clone(&scope), Rc::clone(&context), stmt)? } Ok(()) } fn validate_all_paths_return_or_revert( block: &[Spanned<fe::FuncStmt>], ) -> Result<(), SemanticError> { for statement in block.iter().rev() { if let fe::FuncStmt::Return { .. } = &statement.node { return Ok(()); } if let fe::FuncStmt::Revert { .. } = &statement.node { return Ok(()); } if let fe::FuncStmt::If { test: _, body, or_else, } = &statement.node { let body_returns = validate_all_paths_return_or_revert(body).is_ok(); let or_else_returns = or_else.is_empty() || validate_all_paths_return_or_revert(or_else).is_ok(); if body_returns && or_else_returns { return Ok(()); } } } Err(SemanticError::MissingReturn) } fn func_def_arg( scope: Shared<BlockScope>, arg: &Spanned<fe::FuncDefArg>, ) -> Result<FixedSize, SemanticError> { let name = arg.node.name.node.to_string(); let typ = types::type_desc_fixed_size(Scope::Block(Rc::clone(&scope)), &arg.node.typ)?; scope.borrow_mut().add_var(name, typ.clone().into_type()); Ok(typ) } fn func_stmt( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { match &stmt.node { fe::FuncStmt::Return { .. } => func_return(scope, context, stmt), fe::FuncStmt::VarDecl { .. } => declarations::var_decl(scope, context, stmt), fe::FuncStmt::Assign { .. } => assignments::assign(scope, context, stmt), fe::FuncStmt::Emit { .. } => emit(scope, context, stmt), fe::FuncStmt::AugAssign { .. } => unimplemented!(), fe::FuncStmt::For { .. } => unimplemented!(), fe::FuncStmt::While { .. } => while_loop(scope, context, stmt), fe::FuncStmt::If { .. } => if_statement(scope, context, stmt), fe::FuncStmt::Assert { .. } => assert(scope, context, stmt), fe::FuncStmt::Expr { .. } => expr(scope, context, stmt), fe::FuncStmt::Pass => unimplemented!(), fe::FuncStmt::Break => break_statement(scope, context, stmt), fe::FuncStmt::Continue => continue_statement(scope, context, stmt), fe::FuncStmt::Revert => Ok(()), } } fn verify_is_boolean( scope: Shared<BlockScope>, context: Shared<Context>, expr: &Spanned<fe::Expr>, ) -> Result<(), SemanticError> { let attributes = expressions::expr(scope, context, expr)?; if let Type::Base(Base::Bool) = attributes.typ { return Ok(()); } Err(SemanticError::TypeError) } fn break_statement( scope: Shared<BlockScope>, _context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Break {} = &stmt.node { return verify_loop_in_scope(scope, SemanticError::BreakWithoutLoop); } unreachable!() } fn continue_statement( scope: Shared<BlockScope>, _context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Continue {} = &stmt.node { return verify_loop_in_scope(scope, SemanticError::ContinueWithoutLoop); } unreachable!() } fn verify_loop_in_scope( scope: Shared<BlockScope>, error: SemanticError, ) -> Result<(), SemanticError> { if scope.borrow().inherits_type(BlockScopeType::Loop) { Ok(()) } else { Err(error) } } fn if_statement( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { match &stmt.node { fe::FuncStmt::If { test, body, or_else, } => { let body_scope = BlockScope::from_block_scope(stmt.span, BlockScopeType::IfElse, Rc::clone(&scope)); traverse_statements(body_scope, Rc::clone(&context), body)?; let or_else_scope = BlockScope::from_block_scope(stmt.span, BlockScopeType::IfElse, Rc::clone(&scope)); traverse_statements(or_else_scope, Rc::clone(&context), or_else)?; verify_is_boolean(scope, context, test) } _ => unreachable!(), } } fn while_loop( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { match &stmt.node { fe::FuncStmt::While { test, body, or_else, } => { if !or_else.is_empty() { unimplemented!(); } let body_scope = BlockScope::from_block_scope(stmt.span, BlockScopeType::Loop, Rc::clone(&scope)); traverse_statements(body_scope, Rc::clone(&context), body)?; verify_is_boolean(scope, context, test) } _ => unreachable!(), } } fn expr( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Expr { value } = &stmt.node { let spanned = spanned_expression(&stmt.span, value); let _attributes = expressions::expr(scope, context, &spanned)?; } Ok(()) } fn emit( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Emit { value: Spanned { node: fe::Expr::Call { func, args }, .. }, } = &stmt.node { let event_name = expressions::expr_name_string(func)?; if let Some(ContractDef::Event(event)) = scope.borrow().contract_def(event_name) { context.borrow_mut().add_emit(stmt, event); } for arg in args.node.iter() { call_arg(Rc::clone(&scope), Rc::clone(&context), arg)?; } return Ok(()); } unreachable!() } fn assert( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Assert { test, msg } = &stmt.node { verify_is_boolean(Rc::clone(&scope), Rc::clone(&context), test)?; if let Some(msg) = msg { let _msg_attributes = expressions::expr(scope, context, msg)?; } return Ok(()); } unreachable!() } fn call_arg( scope: Shared<BlockScope>, context: Shared<Context>, arg: &Spanned<fe::CallArg>, ) -> Result<(), SemanticError> { match &arg.node { fe::CallArg::Arg(value) => { let spanned = spanned_expression(&arg.span, value); let _attributes = expressions::expr(scope, context, &spanned)?; } fe::CallArg::Kwarg(fe::Kwarg { name: _, value }) => { let _attributes = expressions::expr(scope, context, value)?; } }; Ok(()) } fn func_return( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Return { value: Some(value) } = &stmt.node { let attributes = expressions::expr(Rc::clone(&scope), Rc::clone(&context), value)?; match context .borrow() .get_function(scope.borrow().function_scope().borrow().span) { Some(fn_attr) => { if fn_attr.return_type.clone().into_type() != attributes.typ { return Err(SemanticError::TypeError); } } None => unreachable!(), } return Ok(()); } unreachable!() } #[cfg(test)] mod tests { use crate::namespace::scopes::{ ContractDef, ContractScope, ModuleScope, Shared, }; use crate::namespace::types::{ Base, FixedSize, }; use crate::traversal::functions::func_def; use crate::Context; use fe_parser as parser; use std::rc::Rc; fn scope() -> Shared<ContractScope> { let module_scope = ModuleScope::new(); ContractScope::new(module_scope) } fn analyze(scope: Shared<ContractScope>, src: &str) -> Context { let context = Context::new_shared(); let tokens = parser::get_parse_tokens(src).expect("Couldn't parse expression"); let def = &parser::parsers::func_def(&tokens[..]) .expect("Couldn't build func def AST") .1; func_def(scope, Rc::clone(&context), def).expect("Couldn't map func def AST"); Rc::try_unwrap(context) .map_err(|_| "") .unwrap() .into_inner() } #[test] fn simple_func_def() { let scope = scope(); let func_def = "\ def foo(x: u256) -> u256:\ return x + x\ "; let context = analyze(Rc::clone(&scope), func_def); assert_eq!(context.expressions.len(), 3); assert_eq!( scope.borrow().def("foo".to_string()), Some(ContractDef::Function { is_public: false, params: vec![FixedSize::Base(Base::U256)], returns: FixedSize::Base(Base::U256) }) ); } }
use crate::errors::SemanticError; use crate::namespace::scopes::{ BlockScope, BlockScopeType, ContractDef, ContractScope, Scope, Shared, }; use crate::namespace::types::{ Base, FixedSize, Tuple, Type, }; use crate::traversal::_utils::spanned_expression; use crate::traversal::{ assignments, declarations, expressions, types, }; use crate::{ Context, FunctionAttributes, }; use fe_parser::ast as fe; use fe_parser::span::Spanned; use std::rc::Rc; pub fn func_def( contract_scope: Shared<ContractScope>, context: Shared<Context>, def: &Spanned<fe::ContractStmt>, ) -> Result<(), SemanticError> { if let fe::ContractStmt::FuncDef { qual, name, args, return_type, body, } = &def.node { let function_scope = BlockScope::from_contract_scope(def.span, Rc::clone(&contract_scope)); let name = name.node.to_string(); let param_types = args .iter() .map(|arg| func_def_arg(Rc::clone(&function_scope), arg)) .collect::<Result<Vec<_>, _>>()?; let return_type = return_type .as_ref() .map(|typ| types::type_desc_fixed_size(Scope::Block(Rc::clone(&function_scope)), &typ)) .transpose()? .unwrap_or_else(|| Tuple::empty().to_fixed_size()); if !return_type.is_empty_tuple() { validate_all_paths_return_or_revert(&body)? } let is_public = qual.is_some(); contract_scope.borrow_mut().add_function( name.clone(), is_public, param_types.clone(), return_type.clone(), ); let attributes = FunctionAttributes { name, param_types, return_type, }; context.borrow_mut().add_function(def, attributes); traverse_statements(function_scope, context, body)?; return Ok(()); } unreachable!() } fn traverse_statements( scope: Shared<BlockScope>, context: Shared<Context>, body: &[Spanned<fe::FuncStmt>], ) -> Result<(), SemanticError> { for stmt in body.iter() { func_stmt(Rc::clone(&scope), Rc::clone(&context), stmt)? } Ok(()) } fn validate_all_paths_return_or_revert( block: &[Spanned<fe::FuncStmt>], ) -> Result<(), SemanticError> { for statement in block.iter().rev() { if let fe::FuncStmt::Return { .. } = &statement.node { return Ok(()); } if let fe::FuncStmt::Revert { .. } = &statement.node { return Ok(()); } if let fe::FuncStmt::If { test: _, body, or_else, } = &statement.node { let body_returns = validate_all_paths_return_or_revert(body).is_ok(); let or_else_returns = or_else.is_empty() || validate_all_paths_return_or_revert(or_else).is_ok(); if body_returns && or_else_returns { return Ok(()); } } } Err(SemanticError::MissingReturn) } fn func_def_arg( scope: Shared<BlockScope>, arg: &Spanned<fe::FuncDefArg>, ) -> Result<FixedSize, SemanticError> { let name = arg.node.name.node.to_string(); let typ = types::type_desc_fixed_size(Scope::Block(Rc::clone(&scope)), &arg.node.typ)?; scope.borrow_mut().add_var(name, typ.clone().into_type()); Ok(typ) } fn func_stmt( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { match &stmt.node { fe::FuncStmt::Return { .. } => func_return(scope, context, stmt), fe::FuncStmt::VarDecl { .. } => declarations::var_decl(scope, context, stmt), fe::FuncStmt::Assign { .. } => assignments::assign(scope, context, stmt), fe::FuncStmt::Emit { .. } => emit(scope, context, stmt), fe::FuncStmt::AugAssign { .. } => unimplemented!(), fe::FuncStmt::For { .. } => unimplemented!(), fe::FuncStmt::While { .. } => while_loop(scope, context, stmt), fe::FuncStmt::If { .. } => if_statement(scope, context, stmt), fe::FuncStmt::Assert { .. } => assert(scope, context, stmt), fe::FuncStmt::Expr { .. } => expr(scope, context, stmt), fe::FuncStmt::Pass => unimplemented!(), fe::FuncStmt::Break => break_statement(scope, context, stmt), fe::FuncStmt::Continue => continue_statement(scope, context, stmt), fe::FuncStmt::Revert => Ok(()), } } fn verify_is_boolean( scope: Shared<BlockScope>, context: Shared<Context>, expr: &Spanned<fe::Expr>, ) -> Result<(), SemanticError> { let attributes = expressions::expr(scope, context, expr)?; if let Type::Base(Base::Bool) = attributes.typ { return Ok(()); } Err(SemanticError::TypeError) }
fn continue_statement( scope: Shared<BlockScope>, _context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Continue {} = &stmt.node { return verify_loop_in_scope(scope, SemanticError::ContinueWithoutLoop); } unreachable!() } fn verify_loop_in_scope( scope: Shared<BlockScope>, error: SemanticError, ) -> Result<(), SemanticError> { if scope.borrow().inherits_type(BlockScopeType::Loop) { Ok(()) } else { Err(error) } } fn if_statement( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { match &stmt.node { fe::FuncStmt::If { test, body, or_else, } => { let body_scope = BlockScope::from_block_scope(stmt.span, BlockScopeType::IfElse, Rc::clone(&scope)); traverse_statements(body_scope, Rc::clone(&context), body)?; let or_else_scope = BlockScope::from_block_scope(stmt.span, BlockScopeType::IfElse, Rc::clone(&scope)); traverse_statements(or_else_scope, Rc::clone(&context), or_else)?; verify_is_boolean(scope, context, test) } _ => unreachable!(), } } fn while_loop( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { match &stmt.node { fe::FuncStmt::While { test, body, or_else, } => { if !or_else.is_empty() { unimplemented!(); } let body_scope = BlockScope::from_block_scope(stmt.span, BlockScopeType::Loop, Rc::clone(&scope)); traverse_statements(body_scope, Rc::clone(&context), body)?; verify_is_boolean(scope, context, test) } _ => unreachable!(), } } fn expr( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Expr { value } = &stmt.node { let spanned = spanned_expression(&stmt.span, value); let _attributes = expressions::expr(scope, context, &spanned)?; } Ok(()) } fn emit( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Emit { value: Spanned { node: fe::Expr::Call { func, args }, .. }, } = &stmt.node { let event_name = expressions::expr_name_string(func)?; if let Some(ContractDef::Event(event)) = scope.borrow().contract_def(event_name) { context.borrow_mut().add_emit(stmt, event); } for arg in args.node.iter() { call_arg(Rc::clone(&scope), Rc::clone(&context), arg)?; } return Ok(()); } unreachable!() } fn assert( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Assert { test, msg } = &stmt.node { verify_is_boolean(Rc::clone(&scope), Rc::clone(&context), test)?; if let Some(msg) = msg { let _msg_attributes = expressions::expr(scope, context, msg)?; } return Ok(()); } unreachable!() } fn call_arg( scope: Shared<BlockScope>, context: Shared<Context>, arg: &Spanned<fe::CallArg>, ) -> Result<(), SemanticError> { match &arg.node { fe::CallArg::Arg(value) => { let spanned = spanned_expression(&arg.span, value); let _attributes = expressions::expr(scope, context, &spanned)?; } fe::CallArg::Kwarg(fe::Kwarg { name: _, value }) => { let _attributes = expressions::expr(scope, context, value)?; } }; Ok(()) } fn func_return( scope: Shared<BlockScope>, context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Return { value: Some(value) } = &stmt.node { let attributes = expressions::expr(Rc::clone(&scope), Rc::clone(&context), value)?; match context .borrow() .get_function(scope.borrow().function_scope().borrow().span) { Some(fn_attr) => { if fn_attr.return_type.clone().into_type() != attributes.typ { return Err(SemanticError::TypeError); } } None => unreachable!(), } return Ok(()); } unreachable!() } #[cfg(test)] mod tests { use crate::namespace::scopes::{ ContractDef, ContractScope, ModuleScope, Shared, }; use crate::namespace::types::{ Base, FixedSize, }; use crate::traversal::functions::func_def; use crate::Context; use fe_parser as parser; use std::rc::Rc; fn scope() -> Shared<ContractScope> { let module_scope = ModuleScope::new(); ContractScope::new(module_scope) } fn analyze(scope: Shared<ContractScope>, src: &str) -> Context { let context = Context::new_shared(); let tokens = parser::get_parse_tokens(src).expect("Couldn't parse expression"); let def = &parser::parsers::func_def(&tokens[..]) .expect("Couldn't build func def AST") .1; func_def(scope, Rc::clone(&context), def).expect("Couldn't map func def AST"); Rc::try_unwrap(context) .map_err(|_| "") .unwrap() .into_inner() } #[test] fn simple_func_def() { let scope = scope(); let func_def = "\ def foo(x: u256) -> u256:\ return x + x\ "; let context = analyze(Rc::clone(&scope), func_def); assert_eq!(context.expressions.len(), 3); assert_eq!( scope.borrow().def("foo".to_string()), Some(ContractDef::Function { is_public: false, params: vec![FixedSize::Base(Base::U256)], returns: FixedSize::Base(Base::U256) }) ); } }
fn break_statement( scope: Shared<BlockScope>, _context: Shared<Context>, stmt: &Spanned<fe::FuncStmt>, ) -> Result<(), SemanticError> { if let fe::FuncStmt::Break {} = &stmt.node { return verify_loop_in_scope(scope, SemanticError::BreakWithoutLoop); } unreachable!() }
function_block-full_function
[ { "content": "/// Maps a type description node to an enum type.\n\npub fn type_desc(scope: Scope, typ: &Spanned<fe::TypeDesc>) -> Result<Type, SemanticError> {\n\n types::type_desc(&scope.module_scope().borrow().defs, &typ.node)\n\n}\n\n\n", "file_path": "semantics/src/traversal/types.rs", "rank": 0, "score": 469162.62953289854 }, { "content": "fn expr(context: &Context, stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Expr { value } = &stmt.node {\n\n let spanned = spanned_expression(&stmt.span, value);\n\n let expr = expressions::expr(context, &spanned)?;\n\n if let Some(attributes) = context.get_expression(stmt.span) {\n\n if attributes.typ.is_empty_tuple() {\n\n return Ok(yul::Statement::Expression(expr));\n\n } else {\n\n return Ok(statement! { pop([expr])});\n\n }\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 1, "score": 436589.56289091404 }, { "content": "/// Builds a Yul expression from a Fe expression.\n\npub fn expr(context: &Context, exp: &Spanned<fe::Expr>) -> Result<yul::Expression, CompileError> {\n\n match &exp.node {\n\n fe::Expr::Name(_) => expr_name(context, exp),\n\n fe::Expr::Num(_) => expr_num(exp),\n\n fe::Expr::Bool(_) => expr_bool(exp),\n\n fe::Expr::Subscript { .. } => expr_subscript(context, exp),\n\n fe::Expr::Attribute { .. } => expr_attribute(context, exp),\n\n fe::Expr::Ternary { .. } => expr_ternary(context, exp),\n\n fe::Expr::BoolOperation { .. } => unimplemented!(),\n\n fe::Expr::BinOperation { .. } => expr_bin_operation(context, exp),\n\n fe::Expr::UnaryOperation { .. } => unimplemented!(),\n\n fe::Expr::CompOperation { .. } => expr_comp_operation(context, exp),\n\n fe::Expr::Call { .. } => expr_call(context, exp),\n\n fe::Expr::List { .. } => unimplemented!(),\n\n fe::Expr::ListComp { .. } => unimplemented!(),\n\n fe::Expr::Tuple { .. } => unimplemented!(),\n\n fe::Expr::Str(_) => unimplemented!(),\n\n fe::Expr::Ellipsis => unimplemented!(),\n\n }\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 2, "score": 419330.14786223765 }, { "content": "/// Retrieves the &str value of a name expression and converts it to a String.\n\npub fn expr_name_string(exp: &Spanned<fe::Expr>) -> Result<String, SemanticError> {\n\n expr_name_str(exp).map(|name| name.to_string())\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 3, "score": 400575.28637229395 }, { "content": "/// Retrieves the &str value of a name expression.\n\npub fn expr_name_str<'a>(exp: &Spanned<fe::Expr<'a>>) -> Result<&'a str, SemanticError> {\n\n if let fe::Expr::Name(name) = exp.node {\n\n return Ok(name);\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 4, "score": 385286.5014816697 }, { "content": "fn expr_name(_context: &Context, exp: &Spanned<fe::Expr>) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::Name(name) = exp.node {\n\n return Ok(identifier_expression! {(name)});\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 5, "score": 381370.5509057475 }, { "content": "fn assert(context: &Context, stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Assert { test, msg: _ } = &stmt.node {\n\n let test = expressions::expr(context, test)?;\n\n\n\n return Ok(statement! { if (iszero([test])) { (revert(0, 0)) } });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 6, "score": 376996.78809037723 }, { "content": "fn emit(context: &Context, stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Emit { value } = &stmt.node {\n\n if let fe::Expr::Call { func: _, args } = &value.node {\n\n let event_values = args\n\n .node\n\n .iter()\n\n .map(|arg| call_arg(context, arg))\n\n .collect::<Result<_, _>>()?;\n\n\n\n if let Some(event) = context.get_emit(stmt) {\n\n return Ok(operations::emit_event(event.to_owned(), event_values));\n\n }\n\n\n\n return Err(CompileError::static_str(\"missing event definition\"));\n\n }\n\n\n\n return Err(CompileError::static_str(\n\n \"emit statements must contain a call expression\",\n\n ));\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 7, "score": 376996.7880903773 }, { "content": "/// Gather context information for a module and check for type errors.\n\npub fn module(context: Shared<Context>, module: &fe::Module) -> Result<(), SemanticError> {\n\n let scope = ModuleScope::new();\n\n\n\n for stmt in module.body.iter() {\n\n match &stmt.node {\n\n fe::ModuleStmt::TypeDef { .. } => type_def(Rc::clone(&scope), stmt)?,\n\n fe::ModuleStmt::ContractDef { .. } => {\n\n contracts::contract_def(Rc::clone(&scope), Rc::clone(&context), stmt)?\n\n }\n\n fe::ModuleStmt::FromImport { .. } => unimplemented!(),\n\n fe::ModuleStmt::SimpleImport { .. } => unimplemented!(),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "semantics/src/traversal/module.rs", "rank": 8, "score": 376663.77954814764 }, { "content": "/// Retrieves the &str value of a name expression and converts it to a String.\n\npub fn expr_name_string(exp: &Spanned<fe::Expr>) -> Result<String, CompileError> {\n\n expr_name_str(exp).map(|name| name.to_string())\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 9, "score": 366435.2621979344 }, { "content": "fn expr_bool(exp: &Spanned<fe::Expr>) -> Result<ExpressionAttributes, SemanticError> {\n\n if let fe::Expr::Bool(_) = &exp.node {\n\n return Ok(ExpressionAttributes {\n\n location: Location::Value,\n\n typ: Type::Base(Base::Bool),\n\n });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 10, "score": 362550.3471449213 }, { "content": "fn expr_num(exp: &Spanned<fe::Expr>) -> Result<ExpressionAttributes, SemanticError> {\n\n if let fe::Expr::Num(_) = &exp.node {\n\n return Ok(ExpressionAttributes {\n\n location: Location::Value,\n\n typ: Type::Base(Base::U256),\n\n });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 11, "score": 362550.3471449213 }, { "content": "/// Retrieves the &str value of a name expression.\n\npub fn expr_name_str<'a>(exp: &Spanned<fe::Expr<'a>>) -> Result<&'a str, CompileError> {\n\n if let fe::Expr::Name(name) = exp.node {\n\n return Ok(name);\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 12, "score": 353963.6235364799 }, { "content": "/// Performs semantic analysis of the source program and returns a `Context`\n\n/// instance.\n\npub fn analysis(module: &fe::Module) -> Result<Context, SemanticError> {\n\n let context = Context::new_shared();\n\n traversal::module::module(Rc::clone(&context), module)?;\n\n Ok(Rc::try_unwrap(context)\n\n .map_err(|_| \"more than one strong reference pointing to context\")\n\n // This should never panic.\n\n .expect(\"failed to unwrap reference counter\")\n\n .into_inner())\n\n}\n\n\n\npub mod test_utils {\n\n use crate::namespace::types::FixedSize;\n\n use crate::{\n\n Context,\n\n ExpressionAttributes,\n\n };\n\n use fe_parser::ast as fe;\n\n use fe_parser::span::{\n\n Span,\n\n Spanned,\n", "file_path": "semantics/src/lib.rs", "rank": 13, "score": 343025.9415484702 }, { "content": "fn revert(stmt: &Spanned<fe::FuncStmt>) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Revert = &stmt.node {\n\n return Ok(statement! { revert(0, 0) });\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 14, "score": 310338.07237925415 }, { "content": "/// Builds a vector of Yul contracts from a Fe module.\n\npub fn module(context: &Context, module: &fe::Module) -> Result<YulContracts, CompileError> {\n\n module\n\n .body\n\n .iter()\n\n .try_fold(YulContracts::new(), |mut contracts, stmt| {\n\n match &stmt.node {\n\n fe::ModuleStmt::TypeDef { .. } => {}\n\n fe::ModuleStmt::ContractDef { name, .. } => {\n\n let contract = contracts::contract_def(context, stmt)?;\n\n\n\n if contracts.insert(name.node.to_string(), contract).is_some() {\n\n return Err(CompileError::static_str(\"duplicate contract def\"));\n\n }\n\n }\n\n fe::ModuleStmt::FromImport { .. } => unimplemented!(),\n\n fe::ModuleStmt::SimpleImport { .. } => unimplemented!(),\n\n }\n\n\n\n Ok(contracts)\n\n })\n\n}\n", "file_path": "compiler/src/yul/mappers/module.rs", "rank": 15, "score": 307539.48689606646 }, { "content": "/// Finds the type of an indexed expression.\n\n///\n\n/// e.g. `foo[42]`\n\npub fn index(value: Type, index: Type) -> Result<Type, SemanticError> {\n\n match value {\n\n Type::Array(array) => index_array(array, index),\n\n Type::Map(map) => index_map(map, index),\n\n Type::Base(_) => Err(SemanticError::NotSubscriptable),\n\n Type::Tuple(_) => Err(SemanticError::NotSubscriptable),\n\n Type::String(_) => Err(SemanticError::NotSubscriptable),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/namespace/operations.rs", "rank": 16, "score": 296726.5150291679 }, { "content": "/// Creates a new spanned expression. Useful in cases where an `Expr` is nested\n\n/// within the node of a `Spanned` object.\n\npub fn spanned_expression<'a>(span: &Span, exp: &fe::Expr<'a>) -> Spanned<fe::Expr<'a>> {\n\n Spanned {\n\n node: (*exp).clone(),\n\n span: (*span).to_owned(),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/traversal/_utils.rs", "rank": 17, "score": 296665.87917644286 }, { "content": "/// Creates a new spanned expression. Useful in cases where an `Expr` is nested\n\n/// within the node of a `Spanned` object.\n\npub fn spanned_expression<'a>(span: &Span, exp: &fe::Expr<'a>) -> Spanned<fe::Expr<'a>> {\n\n Spanned {\n\n node: (*exp).clone(),\n\n span: (*span).to_owned(),\n\n }\n\n}\n", "file_path": "compiler/src/yul/mappers/_utils.rs", "rank": 18, "score": 293697.76168541575 }, { "content": "fn expr_num(exp: &Spanned<fe::Expr>) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::Num(num) = &exp.node {\n\n return Ok(literal_expression! {(num)});\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 19, "score": 290592.8409642159 }, { "content": "fn expr_bool(exp: &Spanned<fe::Expr>) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::Bool(val) = &exp.node {\n\n return Ok(literal_expression! {(val)});\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 20, "score": 290592.8409642159 }, { "content": "pub fn expression_attributes_to_types(attributes: Vec<ExpressionAttributes>) -> Vec<Type> {\n\n attributes\n\n .iter()\n\n .map(|attributes| attributes.typ.clone())\n\n .collect()\n\n}\n\n\n", "file_path": "semantics/src/traversal/_utils.rs", "rank": 21, "score": 284629.1093335101 }, { "content": "fn func_def_arg(arg: &Spanned<fe::FuncDefArg>) -> yul::Identifier {\n\n let name = arg.node.name.node.to_string();\n\n identifier! {(name)}\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 22, "score": 273482.6267283865 }, { "content": "/// Builds a switch statement that dispatches calls to the contract.\n\npub fn dispatcher(attributes: Vec<FunctionAttributes>) -> Result<yul::Statement, CompileError> {\n\n let arms = attributes\n\n .iter()\n\n .map(|arm| dispatch_arm(arm.to_owned()))\n\n .collect::<Vec<_>>();\n\n\n\n Ok(switch! {\n\n switch (cloadn(0, 4))\n\n [arms...]\n\n })\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/abi_dispatcher.rs", "rank": 23, "score": 272203.4204305716 }, { "content": "fn expr_attribute_msg(attr: &Spanned<&str>) -> Result<ExpressionAttributes, SemanticError> {\n\n match attr.node {\n\n \"sender\" => Ok(ExpressionAttributes {\n\n location: Location::Value,\n\n typ: Type::Base(Base::Address),\n\n }),\n\n value => Err(SemanticError::UndefinedValue {\n\n value: value.to_string(),\n\n }),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 24, "score": 270376.6346375794 }, { "content": "pub fn tuple(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n map(delimited(op(\"(\"), opt(exprs), op(\")\")), |spanned| {\n\n use Expr::Tuple;\n\n\n\n let node = match spanned.node {\n\n Some(Spanned {\n\n node: Tuple { elts },\n\n ..\n\n }) => Tuple { elts },\n\n Some(exp) => exp.node,\n\n None => Tuple { elts: vec![] },\n\n };\n\n let span = spanned.span;\n\n\n\n Spanned { node, span }\n\n })(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 25, "score": 256692.79096321197 }, { "content": "/// Gather context information for expressions and check for type errors.\n\npub fn expr(\n\n scope: Shared<BlockScope>,\n\n context: Shared<Context>,\n\n exp: &Spanned<fe::Expr>,\n\n) -> Result<ExpressionAttributes, SemanticError> {\n\n let attributes = match &exp.node {\n\n fe::Expr::Name(_) => expr_name(scope, exp)?,\n\n fe::Expr::Num(_) => expr_num(exp)?,\n\n fe::Expr::Bool(_) => expr_bool(exp)?,\n\n fe::Expr::Subscript { .. } => expr_subscript(scope, Rc::clone(&context), exp)?,\n\n fe::Expr::Attribute { .. } => expr_attribute(scope, exp)?,\n\n fe::Expr::Ternary { .. } => expr_ternary(scope, Rc::clone(&context), exp)?,\n\n fe::Expr::BoolOperation { .. } => unimplemented!(),\n\n fe::Expr::BinOperation { .. } => expr_bin_operation(scope, Rc::clone(&context), exp)?,\n\n fe::Expr::UnaryOperation { .. } => unimplemented!(),\n\n fe::Expr::CompOperation { .. } => expr_comp_operation(scope, Rc::clone(&context), exp)?,\n\n fe::Expr::Call { .. } => expr_call(scope, Rc::clone(&context), exp)?,\n\n fe::Expr::List { .. } => unimplemented!(),\n\n fe::Expr::ListComp { .. } => unimplemented!(),\n\n fe::Expr::Tuple { .. } => unimplemented!(),\n\n fe::Expr::Str(_) => unimplemented!(),\n\n fe::Expr::Ellipsis => unimplemented!(),\n\n };\n\n\n\n context.borrow_mut().add_expression(exp, attributes.clone());\n\n\n\n Ok(attributes)\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 26, "score": 254897.61984151893 }, { "content": "/// Parse a type definition (type alias).\n\npub fn type_def(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n let (input, type_kw) = name(\"type\")(input)?;\n\n let (input, name) = name_token(input)?;\n\n let (input, _) = op(\"=\")(input)?;\n\n let (input, type_desc) = type_desc(input)?;\n\n let (input, _) = newline_token(input)?;\n\n\n\n let span = Span::from_pair(type_kw, &type_desc);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: ModuleStmt::TypeDef {\n\n name: name.into(),\n\n typ: type_desc,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 27, "score": 252896.5516262779 }, { "content": "/// Parse a token of a specific type.\n\npub fn token<'a>(typ: TokenType) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {\n\n verify(\n\n next,\n\n move |t| t.typ == typ,\n\n move |inp, _| ParseError::str(inp, format!(\"expected {:?} token\", typ)),\n\n )\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 28, "score": 252132.45674939483 }, { "content": "pub fn func_def(input: Cursor) -> ParseResult<Spanned<ContractStmt>> {\n\n let (input, qual) = opt(func_qual)(input)?;\n\n let (input, def_kw) = name(\"def\")(input)?;\n\n let (input, name_tok) = name_token(input)?;\n\n\n\n let (input, _) = op(\"(\")(input)?;\n\n let (input, args) = arg_list(input)?;\n\n let (input, _) = op(\")\")(input)?;\n\n\n\n let (input, return_type) = opt(preceded(op(\"->\"), base_type))(input)?;\n\n\n\n let (input, _) = op(\":\")(input)?;\n\n\n\n let (input, body) = block(input)?;\n\n\n\n let last = body.last().unwrap();\n\n let span = match &qual {\n\n Some(qual) => Span::from_pair(qual, last),\n\n None => Span::from_pair(def_kw, last),\n\n };\n", "file_path": "parser/src/parsers.rs", "rank": 29, "score": 250006.90223200852 }, { "content": "pub fn block(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n alt((simple_stmt, |input| {\n\n let (input, _) = newline_token(input)?;\n\n let (input, _) = indent_token(input)?;\n\n let (input, stmts) = many1(func_stmt)(input)?;\n\n let (input, _) = dedent_token(input)?;\n\n\n\n let result: Vec<_> = stmts.into_iter().flatten().collect();\n\n\n\n Ok((input, result))\n\n }))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 30, "score": 247342.48668183578 }, { "content": "pub fn expr(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let (input, if_expr) = disjunct(input)?;\n\n let (input, ternary) = opt(|input| {\n\n let (input, _) = name(\"if\")(input)?;\n\n let (input, test) = disjunct(input)?;\n\n let (input, _) = name(\"else\")(input)?;\n\n let (input, else_expr) = expr(input)?;\n\n Ok((input, (test, else_expr)))\n\n })(input)?;\n\n\n\n let result = match ternary {\n\n Some((test, else_expr)) => {\n\n let span = Span::from_pair(&if_expr, &else_expr);\n\n\n\n Spanned {\n\n node: Expr::Ternary {\n\n if_expr: Box::new(if_expr),\n\n test: Box::new(test),\n\n else_expr: Box::new(else_expr),\n\n },\n\n span,\n\n }\n\n }\n\n None => if_expr,\n\n };\n\n\n\n Ok((input, result))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 31, "score": 239476.50919468323 }, { "content": "/// Loads a value from storage.\n\n///\n\n/// The returned expression evaluates to a 256 bit value.\n\npub fn sto_to_val<T: FeSized>(typ: T, sptr: yul::Expression) -> yul::Expression {\n\n let size = literal_expression! { (typ.size()) };\n\n expression! { sloadn([sptr], [size]) }\n\n}\n\n\n", "file_path": "compiler/src/yul/operations.rs", "rank": 32, "score": 238418.10418721894 }, { "content": "/// Loads a value in memory.\n\n///\n\n/// The returned expression evaluates to a 256 bit value.\n\npub fn mem_to_val<T: FeSized>(typ: T, mptr: yul::Expression) -> yul::Expression {\n\n let size = literal_expression! { (typ.size()) };\n\n expression! { mloadn([mptr], [size]) }\n\n}\n\n\n", "file_path": "compiler/src/yul/operations.rs", "rank": 33, "score": 238418.104187219 }, { "content": "/// Copies a segment of storage into memory.\n\n///\n\n/// The returned expression evaluates to a memory pointer.\n\npub fn sto_to_mem<T: FeSized>(typ: T, sptr: yul::Expression) -> yul::Expression {\n\n let size = literal_expression! { (typ.size()) };\n\n expression! { scopy([sptr], [size]) }\n\n}\n\n\n", "file_path": "compiler/src/yul/operations.rs", "rank": 34, "score": 238417.97254694084 }, { "content": "pub fn arg_def(input: Cursor) -> ParseResult<Spanned<FuncDefArg>> {\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, typ) = type_desc(input)?;\n\n\n\n let span = Span::from_pair(name_tok, &typ);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncDefArg {\n\n name: name_tok.into(),\n\n typ,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 35, "score": 236147.5587826493 }, { "content": "pub fn fixed_sizes_to_types(sizes: Vec<FixedSize>) -> Vec<Type> {\n\n sizes\n\n .iter()\n\n .map(|param| param.clone().into_type())\n\n .collect()\n\n}\n", "file_path": "semantics/src/traversal/_utils.rs", "rank": 36, "score": 234394.740304555 }, { "content": "fn index_map(map: Map, index: Type) -> Result<Type, SemanticError> {\n\n if index != Type::Base(map.key) {\n\n return Err(SemanticError::TypeError);\n\n }\n\n\n\n Ok(*map.value)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::errors::SemanticError;\n\n use crate::namespace::operations;\n\n use crate::namespace::types::{\n\n Array,\n\n Base,\n\n Map,\n\n Type,\n\n };\n\n use rstest::rstest;\n\n\n", "file_path": "semantics/src/namespace/operations.rs", "rank": 37, "score": 231518.89275415865 }, { "content": "fn index_array(array: Array, index: Type) -> Result<Type, SemanticError> {\n\n if index != Type::Base(Base::U256) {\n\n return Err(SemanticError::TypeError);\n\n }\n\n\n\n Ok(Type::Base(array.inner))\n\n}\n\n\n", "file_path": "semantics/src/namespace/operations.rs", "rank": 38, "score": 231518.89275415865 }, { "content": "/// Parse a comma-separated list of expressions.\n\npub fn exprs(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let (input, mut elts) = separated(expr, op(\",\"), false)(input)?;\n\n let (input, comma) = opt(op(\",\"))(input)?;\n\n\n\n let first = elts.first().unwrap();\n\n\n\n let result = match comma {\n\n Some(comma_tok) => {\n\n let span = Span::from_pair(first, comma_tok);\n\n\n\n Spanned {\n\n node: Expr::Tuple { elts },\n\n span,\n\n }\n\n }\n\n None => {\n\n if elts.len() > 1 {\n\n let last = elts.last().unwrap();\n\n let span = Span::from_pair(first, last);\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 39, "score": 230001.15153062483 }, { "content": "/// Parse a map of contract ABIs from the input `module`.\n\npub fn module<'a>(module: &'a fe::Module<'a>) -> Result<ModuleABIs, CompileError> {\n\n let mut type_defs = TypeDefs::new();\n\n\n\n module.body.iter().try_fold(ModuleABIs::new(), |mut m, s| {\n\n match &s.node {\n\n fe::ModuleStmt::TypeDef { name, typ } => {\n\n if type_defs.insert(name.node, &typ.node).is_some() {\n\n return Err(CompileError::static_str(\"duplicate type definition\"));\n\n }\n\n }\n\n fe::ModuleStmt::ContractDef { name, body } => {\n\n if m.contracts\n\n .insert(name.node.to_string(), contract_def(&type_defs, body)?)\n\n .is_some()\n\n {\n\n return Err(CompileError::static_str(\"duplicate contract definition\"));\n\n }\n\n }\n\n _ => {}\n\n };\n\n\n\n Ok(m)\n\n })\n\n}\n\n\n", "file_path": "compiler/src/abi/builder.rs", "rank": 40, "score": 228992.70871208305 }, { "content": "pub fn return_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, return_kw) = name(\"return\")(input)?;\n\n let (input, value) = opt(exprs)(input)?;\n\n\n\n let span = match &value {\n\n Some(exp) => Span::from_pair(return_kw, exp),\n\n None => return_kw.span,\n\n };\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::Return { value },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 41, "score": 228430.0003212737 }, { "content": "/// Parse a base type along with an optional array dimension list.\n\n///\n\n/// Example:\n\n/// int128[2][3]\n\npub fn base_type(input: Cursor) -> ParseResult<Spanned<TypeDesc>> {\n\n let (input, base) = name_token(input)?;\n\n let (input, dims) = arr_list(input)?;\n\n\n\n let mut result = Spanned {\n\n node: TypeDesc::Base { base: base.string },\n\n span: base.into(),\n\n };\n\n for dim in dims {\n\n let span = Span::from_pair(&result, &dim);\n\n\n\n result = Spanned {\n\n node: TypeDesc::Array {\n\n typ: Box::new(result),\n\n dimension: dim.node,\n\n },\n\n span,\n\n };\n\n }\n\n\n\n Ok((input, result))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 42, "score": 228373.34960206694 }, { "content": "pub fn assign_stmt(input: Cursor) -> ParseResult<Spanned<FuncStmt>> {\n\n let (input, targets_vec) = many1(terminated(targets, op(\"=\")))(input)?;\n\n let (input, value) = exprs(input)?;\n\n\n\n let first = targets_vec.first().unwrap();\n\n let span = Span::from_pair(first, &value);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: FuncStmt::Assign {\n\n targets: targets_vec,\n\n value,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 43, "score": 228350.32663858536 }, { "content": "pub fn shift_expr(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(sum, alt((op(\"<<\"), op(\">>\"))), bin_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 44, "score": 227223.86619518377 }, { "content": "fn selection_as_statement(name: String, params: &[FixedSize]) -> yul::Statement {\n\n yul::Statement::Expression(selection(name, params))\n\n}\n\n\n", "file_path": "compiler/src/yul/runtime/abi_dispatcher.rs", "rank": 45, "score": 227089.18994153486 }, { "content": "/// Gather context information for assignments and check for type errors.\n\n///\n\n/// e.g. `foo[42] = \"bar\"`, `self.foo[42] = \"bar\"`, `foo = 42`\n\npub fn assign(\n\n scope: Shared<BlockScope>,\n\n context: Shared<Context>,\n\n stmt: &Spanned<fe::FuncStmt>,\n\n) -> Result<(), SemanticError> {\n\n if let fe::FuncStmt::Assign { targets, value } = &stmt.node {\n\n if targets.len() > 1 {\n\n unimplemented!()\n\n }\n\n\n\n if let Some(target) = targets.first() {\n\n match &target.node {\n\n fe::Expr::Name(_) => assign_name(scope, Rc::clone(&context), target, value)?,\n\n fe::Expr::Subscript { .. } => {\n\n assign_subscript(scope, Rc::clone(&context), target, value)?\n\n }\n\n _ => return Err(SemanticError::UnassignableExpression),\n\n }\n\n }\n\n\n\n return Ok(());\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "semantics/src/traversal/assignments.rs", "rank": 46, "score": 224616.37498440806 }, { "content": "/// Generates an decoding function for a single type parameter in either\n\n/// calldata or memory.\n\npub fn decode<T: AbiEncoding>(typ: T, location: AbiDecodeLocation) -> yul::Statement {\n\n let func_name = decode_name(&typ, location.clone());\n\n\n\n let decode_expr = match typ.abi_type() {\n\n AbiType::Uint { .. } => decode_uint(location),\n\n AbiType::Array { inner, size } => decode_array(*inner, size, location),\n\n };\n\n\n\n function_definition! {\n\n // `start` refers to the beginning of the encoding\n\n // `head_ptr` refers to the pointer at which the head is located\n\n function [func_name](start, head_ptr) -> val {\n\n (val := [decode_expr])\n\n }\n\n }\n\n}\n\n\n", "file_path": "compiler/src/yul/abi/functions.rs", "rank": 47, "score": 223937.62038577694 }, { "content": "fn expr_attribute_msg(attr: &Spanned<&str>) -> Result<yul::Expression, CompileError> {\n\n match attr.node {\n\n \"sender\" => Ok(expression! { caller() }),\n\n _ => Err(CompileError::static_str(\"invalid msg attribute name\")),\n\n }\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 48, "score": 222382.91249365613 }, { "content": "fn mock_spanned_expr(start: usize, end: usize) -> Spanned<fe::Expr<'static>> {\n\n Spanned {\n\n node: fe::Expr::Name(\"foo\"),\n\n span: Span { start, end },\n\n }\n\n}\n\n\n", "file_path": "semantics/tests/analysis.rs", "rank": 49, "score": 222235.52213538534 }, { "content": "/// Builds a Yul statement from a Fe assignment.\n\npub fn assign(\n\n context: &Context,\n\n stmt: &Spanned<fe::FuncStmt>,\n\n) -> Result<yul::Statement, CompileError> {\n\n if let fe::FuncStmt::Assign { targets, value } = &stmt.node {\n\n if targets.len() > 1 {\n\n unimplemented!(\"multiple assignment targets\")\n\n }\n\n\n\n if let Some(first_target) = targets.first() {\n\n return match &first_target.node {\n\n fe::Expr::Name(_) => assign_name(context, first_target, value),\n\n fe::Expr::Subscript { .. } => assign_subscript(context, first_target, value),\n\n _ => unreachable!(),\n\n };\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/assignments.rs", "rank": 50, "score": 222138.16477012797 }, { "content": "pub fn arg_list(input: Cursor) -> ParseResult<Vec<Spanned<FuncDefArg>>> {\n\n match input[0] {\n\n Token { string: \")\", .. } => Ok((input, vec![])),\n\n _ => separated(arg_def, op(\",\"), true)(input),\n\n }\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 51, "score": 220517.96331329315 }, { "content": "pub fn func_stmt(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n alt((map(compound_stmt, |stmt| vec![stmt]), simple_stmt))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 52, "score": 219828.74422009027 }, { "content": "pub fn target(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n alt((\n\n |input| {\n\n let (input, atom_expr) = atom(input)?;\n\n let (input, tails) = many1(t_tail)(input)?;\n\n\n\n let tails: Vec<_> = tails.into_iter().flatten().collect();\n\n\n\n Ok((input, build_tail_expr(atom_expr, tails)))\n\n },\n\n |input| {\n\n let (input, atom_expr) = t_atom(input)?;\n\n let (input, tails) = many0(t_tail)(input)?;\n\n\n\n let tails: Vec<_> = tails.into_iter().flatten().collect();\n\n\n\n Ok((input, build_tail_expr(atom_expr, tails)))\n\n },\n\n ))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 53, "score": 219639.97794550454 }, { "content": "pub fn t_atom(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n alt((\n\n map(name_token, |tok| Spanned {\n\n node: Expr::Name(tok.string),\n\n span: tok.span,\n\n }),\n\n map(delimited(op(\"(\"), targets, op(\")\")), |spanned| {\n\n use Expr::Tuple;\n\n\n\n let node = match spanned.node {\n\n Spanned {\n\n node: Tuple { elts },\n\n ..\n\n } => Tuple { elts },\n\n exp => Tuple { elts: vec![exp] },\n\n };\n\n let span = spanned.span;\n\n\n\n Spanned { node, span }\n\n }),\n", "file_path": "parser/src/parsers.rs", "rank": 54, "score": 219639.97794550454 }, { "content": "pub fn targets(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let (input, mut elts) = separated(target, op(\",\"), false)(input)?;\n\n let (input, comma) = opt(op(\",\"))(input)?;\n\n\n\n let first = elts.first().unwrap();\n\n\n\n let result = match comma {\n\n Some(comma_tok) => {\n\n let span = Span::from_pair(first, comma_tok);\n\n\n\n Spanned {\n\n node: Expr::Tuple { elts },\n\n span,\n\n }\n\n }\n\n None => {\n\n if elts.len() > 1 {\n\n let last = elts.last().unwrap();\n\n let span = Span::from_pair(first, last);\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 55, "score": 219639.97794550454 }, { "content": "pub fn comparison(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let (input, nots) = many0(name(\"not\"))(input)?;\n\n let (input, op_expr) = op_expr_builder(bitwise_or, comp_op, comp_op_builder)(input)?;\n\n\n\n let mut result = op_expr;\n\n for not_tok in nots.into_iter().rev() {\n\n let span = Span::from_pair(not_tok, &result);\n\n\n\n result = Spanned {\n\n node: unary_op_builder(not_tok, result),\n\n span,\n\n };\n\n }\n\n\n\n Ok((input, result))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 56, "score": 219639.97794550454 }, { "content": "pub fn bitwise_or(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(bitwise_xor, op(\"|\"), bin_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 57, "score": 219639.97794550454 }, { "content": "pub fn bitwise_and(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(shift_expr, op(\"&\"), bin_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 58, "score": 219639.97794550454 }, { "content": "pub fn disjunct(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(conjunct, name(\"or\"), bool_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 59, "score": 219639.97794550454 }, { "content": "pub fn term(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(\n\n factor,\n\n alt((op(\"*\"), op(\"/\"), op(\"//\"), op(\"%\"))),\n\n bin_op_builder,\n\n )(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 60, "score": 219639.97794550454 }, { "content": "pub fn conjunct(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(comparison, name(\"and\"), bool_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 61, "score": 219639.97794550454 }, { "content": "pub fn atom(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n alt((\n\n map(name(\"true\"), |tok| Spanned {\n\n node: Expr::Bool(true),\n\n span: tok.span,\n\n }),\n\n map(name(\"false\"), |tok| Spanned {\n\n node: Expr::Bool(false),\n\n span: tok.span,\n\n }),\n\n list,\n\n map(group, |exp| Spanned {\n\n node: exp.node.node,\n\n span: exp.span,\n\n }),\n\n tuple,\n\n map(name_token, |tok| Spanned {\n\n node: Expr::Name(tok.string),\n\n span: tok.span,\n\n }),\n", "file_path": "parser/src/parsers.rs", "rank": 62, "score": 219639.97794550454 }, { "content": "pub fn sum(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(term, alt((op(\"+\"), op(\"-\"))), bin_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 63, "score": 219639.97794550454 }, { "content": "pub fn list(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n map(delimited(op(\"[\"), opt(exprs), op(\"]\")), |spanned| {\n\n use Expr::{\n\n List,\n\n Tuple,\n\n };\n\n\n\n let node = match spanned.node {\n\n Some(Spanned {\n\n node: Tuple { elts },\n\n ..\n\n }) => List { elts },\n\n Some(exp) => List { elts: vec![exp] },\n\n None => List { elts: vec![] },\n\n };\n\n let span = spanned.span;\n\n\n\n Spanned { node, span }\n\n })(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 64, "score": 219639.97794550454 }, { "content": "pub fn primary(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let (input, atom_expr) = atom(input)?;\n\n let (input, tails) = many0(alt((attr_tail, index_tail, call_tail)))(input)?;\n\n\n\n Ok((input, build_tail_expr(atom_expr, tails)))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 65, "score": 219639.97794550454 }, { "content": "pub fn factor(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let unary_op = |input| {\n\n let (input, op_tok) = alt((op(\"+\"), op(\"-\"), op(\"~\")))(input)?;\n\n let (input, factor_expr) = factor(input)?;\n\n\n\n let span = Span::from_pair(op_tok, &factor_expr);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: unary_op_builder(op_tok, factor_expr),\n\n span,\n\n },\n\n ))\n\n };\n\n\n\n alt((unary_op, power))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 66, "score": 219639.97794550454 }, { "content": "pub fn power(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n let power_op = |input| {\n\n let (input, primary_expr) = primary(input)?;\n\n let (input, op_tok) = op(\"**\")(input)?;\n\n let (input, factor_expr) = factor(input)?;\n\n\n\n let span = Span::from_pair(&primary_expr, &factor_expr);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: bin_op_builder(primary_expr, op_tok, factor_expr),\n\n span,\n\n },\n\n ))\n\n };\n\n\n\n alt((power_op, primary))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 67, "score": 219639.97794550454 }, { "content": "pub fn type_desc_base(\n\n defs: &HashMap<String, ModuleDef>,\n\n typ: &fe::TypeDesc,\n\n) -> Result<Base, SemanticError> {\n\n match type_desc(defs, typ)? {\n\n Type::Base(base) => Ok(base),\n\n Type::Array(_) => Err(SemanticError::TypeError),\n\n Type::Map(_) => Err(SemanticError::TypeError),\n\n Type::Tuple(_) => Err(SemanticError::TypeError),\n\n Type::String(_) => Err(SemanticError::TypeError),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/namespace/types.rs", "rank": 68, "score": 219242.78843148088 }, { "content": "#[allow(clippy::needless_lifetimes)]\n\npub fn name<'a>(string: &'a str) -> impl Fn(Cursor<'a>) -> ParseResult<&Token> {\n\n verify(\n\n name_token,\n\n move |t| t.string == string,\n\n move |inp, _| ParseError::str(inp, format!(\"expected \\\"{}\\\" name token\", string)),\n\n )\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 69, "score": 217957.16680441902 }, { "content": "/// Parse a contract definition statement.\n\npub fn contract_def(input: Cursor) -> ParseResult<Spanned<ModuleStmt>> {\n\n // \"contract\" name \":\" NEWLINE\n\n let (input, contract_kw) = name(\"contract\")(input)?;\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, _) = newline_token(input)?;\n\n\n\n // INDENT contract_stmt+ DEDENT\n\n let (input, _) = indent_token(input)?;\n\n let (input, body) = many1(contract_stmt)(input)?;\n\n let (input, _) = dedent_token(input)?;\n\n\n\n let last_stmt = body.last().unwrap();\n\n let span = Span::from_pair(contract_kw, last_stmt);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: ContractDef {\n\n name: name_tok.into(),\n\n body,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 70, "score": 217715.3138128014 }, { "content": "/// Parse an event definition statement.\n\npub fn event_def(input: Cursor) -> ParseResult<Spanned<ContractStmt>> {\n\n // \"event\" name \":\" NEWLINE\n\n let (input, event_kw) = name(\"event\")(input)?;\n\n let (input, name_tok) = name_token(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, _) = newline_token(input)?;\n\n\n\n // INDENT event_field+ DEDENT\n\n let (input, _) = indent_token(input)?;\n\n let (input, fields) = many1(event_field)(input)?;\n\n let (input, _) = dedent_token(input)?;\n\n\n\n let last_field = fields.last().unwrap();\n\n let span = Span::from_pair(event_kw, last_field);\n\n\n\n Ok((\n\n input,\n\n Spanned {\n\n node: ContractStmt::EventDef {\n\n name: name_tok.into(),\n\n fields,\n\n },\n\n span,\n\n },\n\n ))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 71, "score": 217715.3138128014 }, { "content": "pub fn bitwise_xor(input: Cursor) -> ParseResult<Spanned<Expr>> {\n\n op_expr_builder(bitwise_and, op(\"^\"), bin_op_builder)(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 72, "score": 216738.1464709844 }, { "content": "/// Maps a type description node to a fixed size enum type.\n\npub fn type_desc_fixed_size(\n\n scope: Scope,\n\n typ: &Spanned<fe::TypeDesc>,\n\n) -> Result<FixedSize, SemanticError> {\n\n types::type_desc_fixed_size(&scope.module_scope().borrow().defs, &typ.node)\n\n}\n", "file_path": "semantics/src/traversal/types.rs", "rank": 73, "score": 216069.67974057444 }, { "content": "pub fn type_desc_fixed_size(\n\n defs: &HashMap<String, ModuleDef>,\n\n typ: &fe::TypeDesc,\n\n) -> Result<FixedSize, SemanticError> {\n\n match type_desc(defs, typ)? {\n\n Type::Base(base) => Ok(FixedSize::Base(base)),\n\n Type::Array(array) => Ok(FixedSize::Array(array)),\n\n Type::Tuple(tuple) => Ok(FixedSize::Tuple(tuple)),\n\n Type::String(string) => Ok(FixedSize::String(string)),\n\n Type::Map(_) => Err(SemanticError::TypeError),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/namespace/types.rs", "rank": 74, "score": 216060.61307239905 }, { "content": "pub fn group(input: Cursor) -> ParseResult<Spanned<Spanned<Expr>>> {\n\n delimited(op(\"(\"), expr, op(\")\"))(input)\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 75, "score": 213918.6065931766 }, { "content": "pub fn else_block(input: Cursor) -> ParseResult<Vec<Spanned<FuncStmt>>> {\n\n let (input, _) = name(\"else\")(input)?;\n\n let (input, _) = op(\":\")(input)?;\n\n let (input, stmts) = block(input)?;\n\n\n\n Ok((input, stmts))\n\n}\n\n\n", "file_path": "parser/src/parsers.rs", "rank": 76, "score": 212780.54015687254 }, { "content": "pub fn call_arg(\n\n scope: Shared<BlockScope>,\n\n context: Shared<Context>,\n\n arg: &Spanned<fe::CallArg>,\n\n) -> Result<ExpressionAttributes, SemanticError> {\n\n match &arg.node {\n\n fe::CallArg::Arg(value) => {\n\n let spanned = spanned_expression(&arg.span, value);\n\n expr(scope, context, &spanned)\n\n }\n\n fe::CallArg::Kwarg(fe::Kwarg { name: _, value }) => expr(scope, context, value),\n\n }\n\n}\n\n\n", "file_path": "semantics/src/traversal/expressions.rs", "rank": 77, "score": 212607.59305258386 }, { "content": "/// Builds a Yul function definition from a Fe function definition.\n\npub fn func_def(\n\n context: &Context,\n\n def: &Spanned<fe::ContractStmt>,\n\n) -> Result<yul::Statement, CompileError> {\n\n if let (\n\n Some(attributes),\n\n fe::ContractStmt::FuncDef {\n\n qual: _,\n\n name,\n\n args,\n\n return_type: _,\n\n body,\n\n },\n\n ) = (context.get_function(def).to_owned(), &def.node)\n\n {\n\n let function_name = identifier! {(name.node)};\n\n let param_names = args.iter().map(|arg| func_def_arg(arg)).collect::<Vec<_>>();\n\n let function_statements = multiple_func_stmt(context, body)?;\n\n\n\n return if attributes.return_type.is_empty_tuple() {\n", "file_path": "compiler/src/yul/mappers/functions.rs", "rank": 79, "score": 209760.90001626126 }, { "content": "/// Builds a contract constructor.\n\n///\n\n/// Takes an optional init function and its parameter types.\n\npub fn build(init: Option<(yul::Statement, Vec<FixedSize>, Vec<yul::Statement>)>) -> yul::Code {\n\n // statements that return the contract code\n\n let deploy_stmts = statements! {\n\n (let size := datasize(\"runtime\"))\n\n (datacopy(0, (dataoffset(\"runtime\")), size))\n\n (return(0, size))\n\n };\n\n\n\n let block = if let Some((init, params, runtime)) = init {\n\n // build a constructor with an init function\n\n\n\n // decode operations for `__init__` parameters\n\n let decoded_params = abi_operations::decode(\n\n params,\n\n expression! { params_start_mem },\n\n AbiDecodeLocation::Memory,\n\n );\n\n\n\n // Build a constructor that runs a user defined init function. Parameters for\n\n // init functions are appended to the end of the initialization code.\n", "file_path": "compiler/src/yul/constructor.rs", "rank": 80, "score": 209621.7682315972 }, { "content": "pub fn call_arg(\n\n context: &Context,\n\n arg: &Spanned<fe::CallArg>,\n\n) -> Result<yul::Expression, CompileError> {\n\n match &arg.node {\n\n fe::CallArg::Arg(value) => {\n\n let spanned = spanned_expression(&arg.span, value);\n\n expr(context, &spanned)\n\n }\n\n fe::CallArg::Kwarg(fe::Kwarg { name: _, value }) => expr(context, value),\n\n }\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 81, "score": 209129.95784173266 }, { "content": "pub fn expr_call(\n\n context: &Context,\n\n exp: &Spanned<fe::Expr>,\n\n) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::Call { args, func } = &exp.node {\n\n if let fe::Expr::Attribute { value: _, attr } = &func.node {\n\n let arguments = &args.node;\n\n let yul_args: Vec<yul::Expression> = arguments\n\n .iter()\n\n .map(|val| call_arg(context, val))\n\n .collect::<Result<_, _>>()?;\n\n\n\n let func_name = identifier! { (attr.node) };\n\n\n\n return Ok(expression! { [func_name]([yul_args...]) });\n\n }\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 82, "score": 208741.43111526797 }, { "content": "pub fn expr_comp_operation(\n\n context: &Context,\n\n exp: &Spanned<fe::Expr>,\n\n) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::CompOperation { left, op, right } = &exp.node {\n\n let yul_left = expr(context, left)?;\n\n let yul_right = expr(context, right)?;\n\n\n\n return match op.node {\n\n fe::CompOperator::Eq => Ok(expression! { eq([yul_left], [yul_right]) }),\n\n fe::CompOperator::NotEq => {\n\n Ok(expression! { iszero([expression! { eq([yul_left], [yul_right]) }]) })\n\n }\n\n fe::CompOperator::Lt => Ok(expression! { lt([yul_left], [yul_right]) }),\n\n fe::CompOperator::LtE => {\n\n Ok(expression! { iszero([expression! {gt([yul_left], [yul_right])}]) })\n\n }\n\n fe::CompOperator::Gt => Ok(expression! { gt([yul_left], [yul_right]) }),\n\n fe::CompOperator::GtE => {\n\n Ok(expression! { iszero([expression! {lt([yul_left], [yul_right])}]) })\n\n }\n\n _ => unimplemented!(),\n\n };\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 83, "score": 205424.9663695573 }, { "content": "pub fn expr_bin_operation(\n\n context: &Context,\n\n exp: &Spanned<fe::Expr>,\n\n) -> Result<yul::Expression, CompileError> {\n\n if let fe::Expr::BinOperation { left, op, right } = &exp.node {\n\n let yul_left = expr(context, left)?;\n\n let yul_right = expr(context, right)?;\n\n\n\n return match op.node {\n\n fe::BinOperator::Add => Ok(expression! { add([yul_left], [yul_right]) }),\n\n fe::BinOperator::Sub => Ok(expression! { sub([yul_left], [yul_right]) }),\n\n fe::BinOperator::Mult => Ok(expression! { mul([yul_left], [yul_right]) }),\n\n fe::BinOperator::Div => Ok(expression! { div([yul_left], [yul_right]) }),\n\n fe::BinOperator::BitAnd => Ok(expression! { and([yul_left], [yul_right]) }),\n\n fe::BinOperator::BitOr => Ok(expression! { or([yul_left], [yul_right]) }),\n\n fe::BinOperator::BitXor => Ok(expression! { xor([yul_left], [yul_right]) }),\n\n fe::BinOperator::LShift => Ok(expression! { shl([yul_right], [yul_left]) }),\n\n fe::BinOperator::RShift => Ok(expression! { shr([yul_right], [yul_left]) }),\n\n fe::BinOperator::Mod => Ok(expression! { mod([yul_left], [yul_right]) }),\n\n fe::BinOperator::Pow => Ok(expression! { exp([yul_left], [yul_right]) }),\n\n _ => unimplemented!(),\n\n };\n\n }\n\n\n\n unreachable!()\n\n}\n\n\n", "file_path": "compiler/src/yul/mappers/expressions.rs", "rank": 84, "score": 205424.9663695573 }, { "content": "/// Generates an encoding function for any set of type parameters.\n\npub fn encode<T: AbiEncoding>(types: Vec<T>) -> yul::Statement {\n\n // the name of the function we're generating\n\n let func_name = encode_name(&types);\n\n\n\n // create a vector of identifiers and a vector of tuples, which contain\n\n // expressions that correspond to the identifiers.\n\n //\n\n // The identifier vector is injected into the parameter section of our\n\n // encoding function and the expressions are used to reference the parameters\n\n // while encoding.\n\n let (params, typed_params): (Vec<_>, Vec<_>) = types\n\n .iter()\n\n .enumerate()\n\n .map(|(i, typ)| {\n\n let ident = identifier! { (format!(\"val_{}\", i)) };\n\n let expr = identifier_expression! { [ident.clone()] };\n\n (ident, (expr, typ))\n\n })\n\n .unzip();\n\n\n", "file_path": "compiler/src/yul/abi/functions.rs", "rank": 85, "score": 204284.91483176022 }, { "content": "#[allow(dead_code)]\n\npub fn to_ron_string_pretty<T>(value: &T) -> ron::ser::Result<String>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut config = ron::ser::PrettyConfig::default();\n\n // Indent with 2 spaces\n\n config.indentor = \" \".to_string();\n\n\n\n let mut serializer = ron::ser::Serializer::new(Some(config), true);\n\n value.serialize(&mut serializer)?;\n\n\n\n Ok(serializer.into_output_string())\n\n}\n\n\n", "file_path": "parser/tests/utils/mod.rs", "rank": 86, "score": 202848.9482691455 }, { "content": "/// Logs an event.\n\npub fn emit_event(event: Event, vals: Vec<yul::Expression>) -> yul::Statement {\n\n let topic = literal_expression! { (event.topic) };\n\n let encoding = abi_operations::encode(event.fields.clone(), vals.clone());\n\n let size = abi_operations::encode_size(event.fields, vals);\n\n\n\n return statement! { log1([encoding], [size], [topic]) };\n\n}\n\n\n", "file_path": "compiler/src/yul/operations.rs", "rank": 87, "score": 201686.58963741572 }, { "content": "/// Parse file content containing a test example into a tuple of input text and\n\n/// expected serialization. Input text and expected serialization are separated\n\n/// by a line that only contains the string \"---\".\n\npub fn parse_fixture<'a>(input: &'a str) -> Result<(&'a str, &'a str), String> {\n\n let parts: Vec<_> = input.split(\"\\n---\\n\").collect();\n\n\n\n if parts.len() != 2 {\n\n Err(format!(\"expected 2 parts, got {}\", parts.len()))\n\n } else {\n\n let input = parts[0];\n\n let parsed = parts[1];\n\n\n\n // If single trailing newline is present, clip off\n\n Ok(match parsed.chars().last() {\n\n Some(c) if c == '\\n' => (input, &parsed[..parsed.len() - 1]),\n\n _ => (input, parsed),\n\n })\n\n }\n\n}\n\n\n\n/// Empty slice syntax is so ugly :/\n\n#[allow(unused_macros)]\n\nmacro_rules! empty_slice {\n", "file_path": "parser/tests/utils/mod.rs", "rank": 88, "score": 201190.4340872601 }, { "content": "/// Evaluates the ternary expression and returns the result.\n\npub fn ternary() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 89, "score": 198603.55981966114 }, { "content": "/// Returns the highest available pointer.\n\npub fn avail() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 90, "score": 198594.86921120796 }, { "content": "/// Takes two 256 bit values and returns the keccak256 value of both.\n\npub fn dualkeccak256() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 91, "score": 198594.64961059493 }, { "content": "/// Read a 256 bit value from memory and right-shift according to size.\n\npub fn mloadn() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 92, "score": 198590.36129862396 }, { "content": "/// Copy storage to a newly allocated segment of memory.\n\npub fn scopy() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 93, "score": 198590.36129862396 }, { "content": "/// Allocate a given number of bytes.\n\npub fn alloc() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 94, "score": 198590.36129862396 }, { "content": "/// Read a 256 bit value from storage and right-shift according to size.\n\npub fn sloadn() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 95, "score": 198590.36129862396 }, { "content": "/// Copy calldata to memory a newly allocated segment of memory.\n\npub fn ccopy() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 96, "score": 198590.36129862396 }, { "content": "/// Copy memory to a given segment of storage.\n\npub fn mcopy() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 97, "score": 198590.36129862396 }, { "content": "/// Set the highest available pointer.\n\npub fn free() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 98, "score": 198590.36129862396 }, { "content": "/// Read a 256 bit value from calldata and right-shift according to size.\n\npub fn cloadn() -> yul::Statement {\n\n function_definition! {\n", "file_path": "compiler/src/yul/runtime/functions.rs", "rank": 99, "score": 198590.36129862396 } ]
Rust
src/utilities.rs
mandx/privie
1e44e5b942979746383496223390718d48db8fc7
use std::{ fmt::{Display, Formatter}, fs::{File, OpenOptions}, io::{self, stdin, stdout, BufReader, BufWriter, Read, Write}, path::{Path, PathBuf}, }; use thiserror::Error as ThisError; #[derive(Debug, ThisError)] pub enum IoUtilsError<P: std::fmt::Debug + Display> { #[error("Could not open `{path}`")] Open { #[source] source: io::Error, path: P, }, #[error("Could not read from `{path}`")] Read { #[source] source: io::Error, path: P, }, #[error("Could not write to `{path}`")] Write { #[source] source: io::Error, path: P, }, #[error("Could not parse `{path}` as JSON")] JsonParse { source: json::Error, path: P }, #[error("STDIN can be used only once")] MultipleStdInRefs, } #[derive(Debug, Clone)] pub struct InputFile { filename: Option<PathBuf>, } impl Display for InputFile { fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match &self.filename { Some(path) => write!(formatter, "{}", path.to_string_lossy()), None => write!(formatter, "{}", Self::DISPLAY_STR), } } } impl std::str::FromStr for InputFile { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "" | "-" | Self::DISPLAY_STR => Self::use_stdin(), s => Self::use_filename(s), }) } } impl Default for InputFile { fn default() -> Self { Self::use_stdin() } } impl InputFile { const DISPLAY_STR: &'static str = "<stdin>"; pub fn use_stdin() -> Self { Self { filename: None } } pub fn use_filename<P: AsRef<Path>>(filename: P) -> Self { Self { filename: Some(filename.as_ref().into()), } } pub fn open(&self) -> Result<impl Read, IoUtilsError<Self>> { match &self.filename { Some(path) => Ok(Box::new(BufReader::new(File::open(path).map_err(|error| { IoUtilsError::Open { source: error, path: self.clone(), } })?)) as Box<dyn Read>), None => Ok(Box::new(BufReader::new(stdin())) as Box<dyn Read>), } } pub fn read(&self) -> Result<String, IoUtilsError<Self>> { let mut reader = self.open()?; let mut buf = String::new(); reader .read_to_string(&mut buf) .map_err(|error| IoUtilsError::Read { source: error, path: self.clone(), })?; Ok(buf) } pub fn read_json(&self) -> Result<json::JsonValue, IoUtilsError<Self>> { json::parse(&self.read()?).map_err(|error| IoUtilsError::JsonParse { source: error, path: self.clone(), }) } pub fn check_stdin_once<'a, I: IntoIterator<Item = &'a Self>>( inputs: I, ) -> Result<(), IoUtilsError<Self>> { let mut found = false; for input in inputs { if input.filename.is_none() { if found { return Err(IoUtilsError::MultipleStdInRefs); } found = true; } } Ok(()) } } #[derive(Debug, Clone)] pub struct OutputFile { filename: Option<PathBuf>, } impl Display for OutputFile { fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match &self.filename { Some(path) => write!(formatter, "{}", path.to_string_lossy()), None => write!(formatter, "{}", Self::DISPLAY_STR), } } } impl std::str::FromStr for OutputFile { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "" | "-" | Self::DISPLAY_STR => Self::use_stdout(), s => Self::use_filename(s), }) } } impl Default for OutputFile { fn default() -> Self { Self::use_stdout() } } impl OutputFile { const DISPLAY_STR: &'static str = "<stdout>"; pub fn use_stdout() -> Self { Self { filename: None } } pub fn use_filename<P: AsRef<Path>>(filename: P) -> Self { Self { filename: Some(filename.as_ref().into()), } } pub fn open(&self) -> Result<impl Write, IoUtilsError<Self>> { match &self.filename { Some(path) => Ok(Box::new(BufWriter::new( OpenOptions::new() .write(true) .truncate(true) .create(true) .open(path) .map_err(|error| IoUtilsError::Open { source: error, path: self.clone(), })?, )) as Box<dyn Write>), None => Ok(Box::new(BufWriter::new(stdout())) as Box<dyn Write>), } } pub fn write<S: AsRef<[u8]>>(&self, content: S) -> Result<(), IoUtilsError<Self>> { self.open()? .write_all(content.as_ref()) .map_err(|error| IoUtilsError::Write { source: error, path: self.clone(), }) } pub fn write_json<J: Into<json::JsonValue>>(&self, data: J) -> Result<(), IoUtilsError<Self>> { self.write(&json::stringify_pretty(data.into(), 2)) } }
use std::{ fmt::{Display, Formatter}, fs::{File, OpenOptions}, io::{self, stdin, stdout, BufReader, BufWriter, Read, Write}, path::{Path, PathBuf}, }; use thiserror::Error as ThisError; #[derive(Debug, ThisError)] pub enum IoUtilsError<P: std::fmt::Debug + Display> { #[error("Could not open `{path}`")] Open { #[source] source: io::Error, path: P, }, #[error("Could not read from `{path}`")] Read { #[source] source: io::Error, path: P, }, #[error("Could not write to `{path}`")] Write { #[source] source: io::Error, path: P, }, #[error("Could not parse `{path}` as JSON")] JsonParse { source: json::Error, path: P }, #[error("STDIN can be used only once")] MultipleStdInRefs, } #[derive(Debug, Clone)] pub struct InputFile { filename: Option<PathBuf>, } impl Display for InputFile { fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match &self.filename { Some(path) => write!(formatter, "{}", path.to_string_lossy()), None => write!(formatter, "{}", Self::DISPLAY_STR), } } } impl std::str::FromStr for InputFile { type Err = String; fn from_str(s: &str) -> Result<Self, S
} impl Default for InputFile { fn default() -> Self { Self::use_stdin() } } impl InputFile { const DISPLAY_STR: &'static str = "<stdin>"; pub fn use_stdin() -> Self { Self { filename: None } } pub fn use_filename<P: AsRef<Path>>(filename: P) -> Self { Self { filename: Some(filename.as_ref().into()), } } pub fn open(&self) -> Result<impl Read, IoUtilsError<Self>> { match &self.filename { Some(path) => Ok(Box::new(BufReader::new(File::open(path).map_err(|error| { IoUtilsError::Open { source: error, path: self.clone(), } })?)) as Box<dyn Read>), None => Ok(Box::new(BufReader::new(stdin())) as Box<dyn Read>), } } pub fn read(&self) -> Result<String, IoUtilsError<Self>> { let mut reader = self.open()?; let mut buf = String::new(); reader .read_to_string(&mut buf) .map_err(|error| IoUtilsError::Read { source: error, path: self.clone(), })?; Ok(buf) } pub fn read_json(&self) -> Result<json::JsonValue, IoUtilsError<Self>> { json::parse(&self.read()?).map_err(|error| IoUtilsError::JsonParse { source: error, path: self.clone(), }) } pub fn check_stdin_once<'a, I: IntoIterator<Item = &'a Self>>( inputs: I, ) -> Result<(), IoUtilsError<Self>> { let mut found = false; for input in inputs { if input.filename.is_none() { if found { return Err(IoUtilsError::MultipleStdInRefs); } found = true; } } Ok(()) } } #[derive(Debug, Clone)] pub struct OutputFile { filename: Option<PathBuf>, } impl Display for OutputFile { fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), std::fmt::Error> { match &self.filename { Some(path) => write!(formatter, "{}", path.to_string_lossy()), None => write!(formatter, "{}", Self::DISPLAY_STR), } } } impl std::str::FromStr for OutputFile { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "" | "-" | Self::DISPLAY_STR => Self::use_stdout(), s => Self::use_filename(s), }) } } impl Default for OutputFile { fn default() -> Self { Self::use_stdout() } } impl OutputFile { const DISPLAY_STR: &'static str = "<stdout>"; pub fn use_stdout() -> Self { Self { filename: None } } pub fn use_filename<P: AsRef<Path>>(filename: P) -> Self { Self { filename: Some(filename.as_ref().into()), } } pub fn open(&self) -> Result<impl Write, IoUtilsError<Self>> { match &self.filename { Some(path) => Ok(Box::new(BufWriter::new( OpenOptions::new() .write(true) .truncate(true) .create(true) .open(path) .map_err(|error| IoUtilsError::Open { source: error, path: self.clone(), })?, )) as Box<dyn Write>), None => Ok(Box::new(BufWriter::new(stdout())) as Box<dyn Write>), } } pub fn write<S: AsRef<[u8]>>(&self, content: S) -> Result<(), IoUtilsError<Self>> { self.open()? .write_all(content.as_ref()) .map_err(|error| IoUtilsError::Write { source: error, path: self.clone(), }) } pub fn write_json<J: Into<json::JsonValue>>(&self, data: J) -> Result<(), IoUtilsError<Self>> { self.write(&json::stringify_pretty(data.into(), 2)) } }
elf::Err> { Ok(match s { "" | "-" | Self::DISPLAY_STR => Self::use_stdin(), s => Self::use_filename(s), }) }
function_block-function_prefixed
[ { "content": "/// Attempt to decrypt the given ciphertext with the given secret key.\n\n/// Will fail if the secret key doesn't match the public key used to\n\n/// encrypt the payload, or if the ciphertext is not long enough.\n\npub fn open(ciphertext: &[u8], secret_key: &SecretKey) -> Result<Vec<u8>, Error> {\n\n if ciphertext.len() <= KEY_SIZE {\n\n // Not long enough\n\n return Err(Error::MalformedData(ciphertext.len()));\n\n }\n\n\n\n let ephemeral_pk = {\n\n let mut array = [0_u8; KEY_SIZE];\n\n array.copy_from_slice(&ciphertext[..KEY_SIZE]);\n\n array.into()\n\n };\n\n\n\n let nonce = get_nonce(&ephemeral_pk, &secret_key.public_key());\n\n let nonce = GenericArray::from_slice(&nonce);\n\n\n\n let encrypted = &ciphertext[KEY_SIZE..];\n\n let crypto_box = CryptoBox::new(&ephemeral_pk, secret_key);\n\n crypto_box.decrypt(nonce, encrypted).map_err(Error::from)\n\n}\n\n\n", "file_path": "src/sealed_box.rs", "rank": 0, "score": 62986.63519415888 }, { "content": "fn main() -> Result<(), Error> {\n\n let cli_args = CliCommands::from_args();\n\n\n\n match cli_args {\n\n CliCommands::VerifyKeyring { input } => {\n\n Keyring::from_json(input.read_json()?, CliErrorHandler::new(true, true))?;\n\n }\n\n\n\n CliCommands::GenerateKeyring { output } => {\n\n output.write_json(Keyring::generate(CliErrorHandler::new(true, true)).to_json())?;\n\n }\n\n\n\n CliCommands::Encrypt {\n\n keyring: keyring_file,\n\n extra_keyrings: extra_keyring_files,\n\n input,\n\n output,\n\n key_id,\n\n strict_keyring,\n\n } => {\n", "file_path": "src/main.rs", "rank": 1, "score": 54473.11517265798 }, { "content": "pub trait PathAssign<E> {\n\n fn get_assign_target(&mut self) -> &mut JsonValue;\n\n\n\n fn preprocess_value<K, J>(\n\n &self,\n\n #[allow(unused_variables)] path: K,\n\n value: Option<J>,\n\n ) -> Result<Option<JsonValue>, E>\n\n where\n\n K: AsRef<str>,\n\n J: Into<JsonValue>,\n\n {\n\n Ok(value.map(Into::into))\n\n }\n\n\n\n fn path_assign<S, J>(\n\n &mut self,\n\n path: S,\n\n value: Option<J>,\n\n #[allow(unused_variables)] force: bool,\n", "file_path": "src/path_assign.rs", "rank": 2, "score": 51830.88144436942 }, { "content": "/// Encrypt the given buffer for the given public key\n\n///\n\n/// overhead: 48 bytes = `KEY_SIZE` (32, ephemeral pk) + 16 (box overhead)\n\npub fn seal(data: &[u8], public_key: &PublicKey) -> Result<Vec<u8>, Error> {\n\n let mut result = Vec::with_capacity(SEALEDBOX_OVERHEAD + data.len());\n\n\n\n let ephemeral_secret_key = SecretKey::generate(&mut rand::rngs::OsRng);\n\n let ephemeral_public_key = ephemeral_secret_key.public_key();\n\n result.extend_from_slice(ephemeral_public_key.as_bytes());\n\n\n\n let nonce_bytes = get_nonce(&ephemeral_public_key, public_key);\n\n let nonce = GenericArray::from_slice(&nonce_bytes);\n\n\n\n let crypto_box = CryptoBox::new(public_key, &ephemeral_secret_key);\n\n\n\n result.extend_from_slice(&crypto_box.encrypt(nonce, data).map_err(Error::from)?);\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/sealed_box.rs", "rank": 3, "score": 47162.17215721506 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(name = \"Privie\", about = \"Code secrets management tool\")]\n\nenum CliCommands {\n\n #[structopt(help = \"Generate a new keyring file with a single key pair\")]\n\n GenerateKeyring {\n\n #[structopt(\n\n short,\n\n long,\n\n default_value,\n\n help = \"Where to write the JSON keyring data.\"\n\n )]\n\n output: OutputFile,\n\n },\n\n\n\n #[structopt(\n\n help = \"Verify a keyring file, like its keys size and that public/secret pair do match.\"\n\n )]\n\n VerifyKeyring {\n\n #[structopt(short, long, default_value, help = \"JSON keyring to verify.\")]\n\n input: InputFile,\n\n },\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 32994.66880536685 }, { "content": "#[derive(Debug, Clone)]\n\nstruct CliErrorHandler {\n\n strict_key_loading: bool,\n\n strict_decryption: bool,\n\n}\n\n\n\nimpl HandleError for CliErrorHandler {\n\n fn decrypt_error<E>(&self, value: &mut json::JsonValue, error: E) -> Result<(), E>\n\n where\n\n E: Debug,\n\n {\n\n if self.strict_decryption {\n\n return Err(error);\n\n }\n\n\n\n // TODO: Colors!\n\n eprintln!(\n\n \"[Strictness Off] Error decrypting string `{}`: {:?}\",\n\n value, error\n\n );\n\n Ok(())\n", "file_path": "src/main.rs", "rank": 5, "score": 31648.7289299345 }, { "content": "/// Generate the nonce for the given public keys\n\n///\n\n/// nonce = `Blake2b(ephemeral_pk + target_pk)`\n\n/// length = 24\n\nfn get_nonce(\n\n ephemeral_public_key: &PublicKey,\n\n target_public_key: &PublicKey,\n\n) -> [u8; BOX_NONCELENGTH] {\n\n let mut hasher = VarBlake2b::new(BOX_NONCELENGTH).unwrap();\n\n\n\n hasher.update(ephemeral_public_key.as_bytes());\n\n hasher.update(target_public_key.as_bytes());\n\n\n\n let mut nonce = [0_u8; BOX_NONCELENGTH];\n\n hasher.finalize_variable(|data| nonce.copy_from_slice(data));\n\n nonce\n\n}\n\n\n", "file_path": "src/sealed_box.rs", "rank": 6, "score": 31396.36717872497 }, { "content": "pub trait HandleError {\n\n // TODO: Add/Pass here some sort of \"context\", that most importantly\n\n // contains the current \"JSON path\" to `value` in the current document.\n\n fn decrypt_error<E>(\n\n &self,\n\n #[allow(unused_variables)] value: &mut JsonValue,\n\n error: E,\n\n ) -> Result<(), E>\n\n where\n\n E: Debug,\n\n {\n\n Err(error)\n\n }\n\n\n\n fn key_load_error<E>(\n\n &self,\n\n #[allow(unused_variables)] public_key: &str,\n\n #[allow(unused_variables)] private_key: &str,\n\n error: E,\n\n ) -> Result<(), E>\n", "file_path": "src/secrets/mod.rs", "rank": 7, "score": 27583.571192063395 }, { "content": " ) -> Result<(), E>\n\n where\n\n S: AsRef<str>,\n\n J: Into<JsonValue>,\n\n {\n\n fn assign<'is, I>(iter: &'is mut I, location: &mut JsonValue, mut value: JsonValue)\n\n where\n\n I: Iterator<Item = &'is str>,\n\n {\n\n match iter.next() {\n\n Some(position) => {\n\n assign(iter, location.index_mut(position), value);\n\n }\n\n None => {\n\n mem::swap(location, &mut value);\n\n }\n\n }\n\n }\n\n\n\n let path = path.as_ref();\n\n let mut path_iter = path.split('.');\n\n let data = self.get_assign_target();\n\n assign(&mut path_iter, data, value.into());\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/path_assign.rs", "rank": 8, "score": 23931.61260764569 }, { "content": "use std::{mem, ops::IndexMut};\n\n\n\nuse json::JsonValue;\n\n\n", "file_path": "src/path_assign.rs", "rank": 9, "score": 23926.062175715815 }, { "content": " self.decrypt_in_place(elem)?;\n\n }\n\n }\n\n json_string @ (JsonValue::String(_) | JsonValue::Short(_)) => {\n\n let encrypted = json_string.as_str().unwrap();\n\n let mut decrypted: JsonValue = match self.decrypt_str(encrypted) {\n\n Ok(decrypted) => decrypted.into(),\n\n Err(error) => {\n\n return self.error_handler.decrypt_error(json_string, error);\n\n }\n\n };\n\n std::mem::swap(json_string, &mut decrypted);\n\n }\n\n _ => {}\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n pub fn default_public_key(&self) -> Result<&str, Error> {\n", "file_path": "src/secrets/mod.rs", "rank": 20, "score": 22.642550146717785 }, { "content": " _ => Err(Error::JsonNotAnObject),\n\n }\n\n }\n\n\n\n pub fn encrypt_str<K: AsRef<str> + Display, S: AsRef<str>>(\n\n &self,\n\n key_id: K,\n\n data: S,\n\n ) -> Result<String, Error> {\n\n let key_id = key_id.as_ref();\n\n let key_pair = self\n\n .keys\n\n .get(key_id)\n\n .cloned()\n\n .ok_or_else(|| Error::MissingKeyId {\n\n key_id: key_id.into(),\n\n })\n\n .or_else(|_key_not_found_error| {\n\n KeyPair::try_from(key_id.to_string()).map_err(|error| Error::DetachedKeyRead {\n\n source: error,\n", "file_path": "src/secrets/mod.rs", "rank": 21, "score": 20.85377212972372 }, { "content": " public: value.public_key(),\n\n private: Some(value),\n\n }\n\n }\n\n}\n\n\n\nimpl From<PublicKey> for KeyPair {\n\n fn from(value: PublicKey) -> Self {\n\n Self {\n\n public: value,\n\n private: None,\n\n }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for KeyPair {\n\n fn fmt(&self, formatter: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n write!(formatter, \"{}\", self.key_id())\n\n }\n\n}\n", "file_path": "src/secrets/keypair.rs", "rank": 22, "score": 20.609006815336006 }, { "content": " key_id: String,\n\n}\n\n\n\nimpl SecretValue {\n\n const SEP: char = ':';\n\n\n\n pub fn get_encrypted(&self) -> &[u8] {\n\n &self.encrypted\n\n }\n\n\n\n pub fn get_key_id(&self) -> &str {\n\n &self.key_id\n\n }\n\n}\n\n\n\nimpl FromStr for SecretValue {\n\n type Err = Error;\n\n fn from_str(data: &str) -> Result<Self, <Self as FromStr>::Err> {\n\n let mut splitter = data.splitn(2, Self::SEP);\n\n\n", "file_path": "src/secrets/secret_value.rs", "rank": 23, "score": 18.807279230823536 }, { "content": " Error::Decrypt {\n\n secret: data.as_ref().into(),\n\n source: error,\n\n }\n\n })?;\n\n String::from_utf8(decrypted).map_err(|error| Error::InvalidUtf8Data {\n\n source: error,\n\n secret: data.as_ref().into(),\n\n })\n\n }\n\n\n\n pub fn decrypt_in_place(&self, data: &mut JsonValue) -> Result<(), Error> {\n\n match data {\n\n JsonValue::Object(obj) => {\n\n for (_k, v) in obj.iter_mut().filter(|(k, _)| !k.starts_with('_')) {\n\n self.decrypt_in_place(v)?;\n\n }\n\n }\n\n JsonValue::Array(elems) => {\n\n for elem in elems.iter_mut() {\n", "file_path": "src/secrets/mod.rs", "rank": 24, "score": 18.479989460122027 }, { "content": " key_id: key_id.into(),\n\n })\n\n })?;\n\n\n\n sealed_box::seal(data.as_ref().as_bytes(), key_pair.public_key())\n\n .map(|encrypted| format!(\"{}{}{}\", key_id, Self::SEP, base64::encode(encrypted)))\n\n .map_err(|error| Error::Encrypt {\n\n source: error,\n\n secret: data.as_ref().into(),\n\n })\n\n }\n\n\n\n pub fn encrypt_in_place<S: AsRef<str> + Display>(\n\n &self,\n\n key_id: &S,\n\n data: &mut JsonValue,\n\n ) -> Result<(), Error> {\n\n match data {\n\n JsonValue::Object(obj) => {\n\n for v in obj\n", "file_path": "src/secrets/mod.rs", "rank": 25, "score": 17.848032094862614 }, { "content": " let mut this = Self::new(error_handler);\n\n for (key_id, value) in json_obj.iter() {\n\n if let json_string @ (JsonValue::String(_) | JsonValue::Short(_)) = value {\n\n this.keys.insert(\n\n key_id.into(),\n\n KeyPair::try_from((\n\n key_id.to_string(),\n\n json_string.as_str().unwrap().to_string(),\n\n ))\n\n .map_err(|error| Error::KeyRead {\n\n source: error,\n\n key_id: key_id.into(),\n\n })?,\n\n );\n\n // Prefer keys with an associated private key\n\n this.default_public_key = Some(key_id.into());\n\n } else {\n\n this.keys.insert(\n\n key_id.into(),\n\n KeyPair::try_from(key_id.to_string()).map_err(|error| Error::KeyRead {\n", "file_path": "src/secrets/mod.rs", "rank": 26, "score": 16.606466848190564 }, { "content": "use std::{\n\n collections::HashMap,\n\n convert::TryFrom,\n\n fmt::{Debug, Display, Formatter},\n\n iter::FromIterator,\n\n string::FromUtf8Error,\n\n};\n\n\n\nuse json::{object::Object as JsonObject, JsonValue};\n\nuse thiserror::Error as BaseError;\n\n\n\nuse crate::sealed_box;\n\nuse crate::{path_assign::PathAssign, sealed_box::Error as SealedBoxError};\n\n\n\nmod keypair;\n\nuse keypair::KeyPair;\n\nmod secret_value;\n\nuse secret_value::{Error as SecretValueError, SecretValue};\n\n\n", "file_path": "src/secrets/mod.rs", "rank": 27, "score": 16.5329388068411 }, { "content": " fn preprocess_value<K, J>(&self, path: K, value: Option<J>) -> Result<Option<JsonValue>, Error>\n\n where\n\n K: AsRef<str>,\n\n J: Into<JsonValue>,\n\n {\n\n let path = path.as_ref();\n\n let dunder = path\n\n .split('.')\n\n .last()\n\n .map_or(false, |last| !last.starts_with('_'));\n\n\n\n let value = value.map(Into::into);\n\n Ok(match (value, dunder) {\n\n (Some(json_string @ (JsonValue::String(_) | JsonValue::Short(_))), true) => Some(\n\n self.keyring\n\n .encrypt_str(\n\n &self.keyring.default_public_key()?,\n\n json_string.as_str().unwrap(),\n\n )?\n\n .into(),\n", "file_path": "src/secrets/mod.rs", "rank": 28, "score": 16.368159289880154 }, { "content": " if let std::io::ErrorKind::NotFound = source.kind() {\n\n return Ok(JsonValue::new_object());\n\n }\n\n }\n\n }\n\n Err(error)\n\n })?,\n\n );\n\n\n\n encrypted_secrets.path_assign(\n\n json_path,\n\n value.map(|s| json::parse(&s)).transpose()?,\n\n force,\n\n )?;\n\n\n\n output.write_json(encrypted_secrets.dump())?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 29, "score": 15.997239012376752 }, { "content": " create,\n\n strict_keyring,\n\n } => {\n\n InputFile::check_stdin_once([&input, &keyring_file])?;\n\n let error_handler = CliErrorHandler::new(strict_keyring, true);\n\n let mut keyring = Keyring::from_json(keyring_file.read_json()?, error_handler.clone())?;\n\n for extra_keyring_file in extra_keyring_files {\n\n keyring +=\n\n Keyring::from_json(extra_keyring_file.read_json()?, error_handler.clone())?;\n\n }\n\n if let Some(key_id) = key_id {\n\n keyring.set_default_public_key(key_id)?;\n\n }\n\n\n\n let mut encrypted_secrets = EncryptedSecrets::from_json(\n\n &keyring,\n\n input.read_json().or_else(|error| {\n\n // TODO: Is there a way to do this this better than with three nested `if`s\n\n if create {\n\n if let IoUtilsError::Open { ref source, .. } = error {\n", "file_path": "src/main.rs", "rank": 30, "score": 15.567657762054811 }, { "content": " ),\n\n (v, _) => v,\n\n })\n\n }\n\n}\n\n\n\nimpl PathAssign<Error> for PlainSecrets {\n\n fn get_assign_target(&mut self) -> &mut JsonValue {\n\n &mut self.data\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod assign_tests {\n\n use json::object;\n\n\n\n use super::*;\n\n use crate::path_assign::PathAssign;\n\n\n\n #[test]\n", "file_path": "src/secrets/mod.rs", "rank": 31, "score": 15.5529966857493 }, { "content": "use std::fmt::Debug;\n\n\n\nuse anyhow::Error;\n\n\n\nuse json::JsonValue;\n\nuse structopt::StructOpt;\n\n\n\nmod path_assign;\n\nmod sealed_box;\n\nmod secrets;\n\nmod utilities;\n\n\n\nuse crate::path_assign::PathAssign;\n\nuse crate::secrets::{EncryptedSecrets, HandleError, Keyring, PlainSecrets};\n\nuse crate::utilities::{InputFile, IoUtilsError, OutputFile};\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/main.rs", "rank": 32, "score": 15.44889841896166 }, { "content": "use std::str::FromStr;\n\n\n\nuse thiserror::Error as BaseError;\n\n\n\n#[derive(Debug, BaseError)]\n\npub enum Error {\n\n #[error(\"Encrypted secret `{secret}` does not have a key section\")]\n\n MissingSecretKeyId { secret: String },\n\n #[error(\"Encrypted secret `{secret}` does not have a data section\")]\n\n MissingSecretData { secret: String },\n\n #[error(\"Decoding `{secret}` as Base64\")]\n\n Base64Decoding {\n\n secret: String,\n\n #[source]\n\n source: base64::DecodeError,\n\n },\n\n}\n\n\n\npub struct SecretValue {\n\n encrypted: Vec<u8>,\n", "file_path": "src/secrets/secret_value.rs", "rank": 33, "score": 15.305432891998686 }, { "content": "\n\n pub fn encrypt<S>(\n\n &self,\n\n secrets: PlainSecrets,\n\n key_id: Option<S>,\n\n ) -> Result<EncryptedSecrets<H>, Error>\n\n where\n\n S: AsRef<str>,\n\n {\n\n let PlainSecrets { mut data } = secrets;\n\n\n\n let public_key = match key_id {\n\n Some(public_key) => public_key.as_ref().to_string(),\n\n None => self.default_public_key().map(ToOwned::to_owned)?,\n\n };\n\n\n\n self.encrypt_in_place(&public_key, &mut data)?;\n\n\n\n Ok(EncryptedSecrets {\n\n keyring: self,\n", "file_path": "src/secrets/mod.rs", "rank": 34, "score": 15.028621295646127 }, { "content": " .iter_mut()\n\n .filter(|(k, _)| !k.starts_with('_'))\n\n .map(|(_, v)| v)\n\n {\n\n self.encrypt_in_place(key_id, v)?;\n\n }\n\n }\n\n JsonValue::Array(elems) => {\n\n for elem in elems.iter_mut() {\n\n self.encrypt_in_place(key_id, elem)?;\n\n }\n\n }\n\n json_string @ (JsonValue::String(_) | JsonValue::Short(_)) => {\n\n let mut encrypted: JsonValue = self\n\n .encrypt_str(key_id, json_string.as_str().unwrap())?\n\n .into();\n\n std::mem::swap(json_string, &mut encrypted);\n\n }\n\n _ => {}\n\n }\n", "file_path": "src/secrets/mod.rs", "rank": 35, "score": 14.847539117444883 }, { "content": " let error_handler = CliErrorHandler::new(strict_keyring, strict_decryption);\n\n let mut keyring = Keyring::from_json(keyring_file.read_json()?, error_handler.clone())?;\n\n for extra_keyring_file in extra_keyring_files {\n\n keyring +=\n\n Keyring::from_json(extra_keyring_file.read_json()?, error_handler.clone())?;\n\n }\n\n let encrypted_secrets = EncryptedSecrets::from_json(&keyring, input.read_json()?);\n\n output.write_json(encrypted_secrets.decrypt()?.dump())?;\n\n }\n\n\n\n CliCommands::AddSecret {\n\n keyring: keyring_file,\n\n extra_keyrings: extra_keyring_files,\n\n\n\n input,\n\n output,\n\n path: json_path,\n\n value,\n\n force,\n\n key_id,\n", "file_path": "src/main.rs", "rank": 36, "score": 14.635639267517714 }, { "content": " MissingPrivateKey { key_id: String },\n\n\n\n #[error(\"Can not use `{key_id}` as public key\")]\n\n KeyRead {\n\n key_id: String,\n\n #[source]\n\n source: KeyPairError,\n\n },\n\n\n\n #[error(\"Can not directly use `{key_id}` as public key (was not found in this keyring)\")]\n\n DetachedKeyRead {\n\n key_id: String,\n\n #[source]\n\n source: KeyPairError,\n\n },\n\n\n\n #[error(\"JSON document is not an object\")]\n\n JsonNotAnObject,\n\n\n\n #[error(\"Could not encrypt `{secret}`\")]\n", "file_path": "src/secrets/mod.rs", "rank": 37, "score": 14.607424509017285 }, { "content": "#[derive(Debug, Clone)]\n\npub struct KeyPair {\n\n public: PublicKey,\n\n private: Option<SecretKey>,\n\n}\n\n\n\nimpl TryFrom<String> for KeyPair {\n\n type Error = KeyPairError;\n\n\n\n fn try_from(value: String) -> Result<Self, Self::Error> {\n\n Self::decode_public_key(value).map(|public_key| Self {\n\n public: public_key,\n\n private: None,\n\n })\n\n }\n\n}\n\n\n\nimpl TryFrom<(String, String)> for KeyPair {\n\n type Error = KeyPairError;\n\n\n", "file_path": "src/secrets/keypair.rs", "rank": 38, "score": 14.508417230902026 }, { "content": " self.default_public_key\n\n .as_deref()\n\n .ok_or(Error::EmptyKeyring)\n\n }\n\n\n\n pub fn set_default_public_key<S: AsRef<str>>(&mut self, key_id: S) -> Result<(), Error> {\n\n let key_id = key_id.as_ref();\n\n if self.keys.contains_key(key_id) {\n\n self.default_public_key = Some(key_id.into());\n\n } else {\n\n let key_pair =\n\n KeyPair::try_from(key_id.to_string()).map_err(|error| Error::KeyRead {\n\n source: error,\n\n key_id: key_id.into(),\n\n })?;\n\n self.default_public_key = Some(key_pair.key_id());\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "src/secrets/mod.rs", "rank": 39, "score": 14.311283461483132 }, { "content": " pub fn dump(&self) -> JsonValue {\n\n self.data.clone()\n\n }\n\n}\n\n\n\nuse self::keypair::KeyPairError;\n\n\n\npub struct Keyring<H: HandleError + Debug> {\n\n keys: HashMap<String, KeyPair>,\n\n default_public_key: Option<String>,\n\n error_handler: H,\n\n}\n\n\n\nimpl<H: HandleError + Debug> std::ops::Add for Keyring<H> {\n\n type Output = Self;\n\n\n\n fn add(self, rhs: Self) -> Self::Output {\n\n Self {\n\n keys: self.keys.into_iter().chain(rhs.keys.into_iter()).collect(),\n\n default_public_key: self.default_public_key,\n", "file_path": "src/secrets/mod.rs", "rank": 40, "score": 14.026850770250743 }, { "content": " InputFile::check_stdin_once([&input, &keyring_file])?;\n\n let error_handler = CliErrorHandler::new(strict_keyring, true);\n\n let mut keyring = Keyring::from_json(keyring_file.read_json()?, error_handler.clone())?;\n\n for extra_keyring_file in extra_keyring_files {\n\n keyring +=\n\n Keyring::from_json(extra_keyring_file.read_json()?, error_handler.clone())?;\n\n }\n\n let unencrypted_secrets = PlainSecrets::from_json(input.read_json()?);\n\n output.write_json(unencrypted_secrets.encrypt_with(&keyring, key_id)?.dump())?;\n\n }\n\n\n\n CliCommands::Decrypt {\n\n keyring: keyring_file,\n\n extra_keyrings: extra_keyring_files,\n\n input,\n\n output,\n\n strict_keyring,\n\n strict_decryption,\n\n } => {\n\n InputFile::check_stdin_once([&input, &keyring_file])?;\n", "file_path": "src/main.rs", "rank": 41, "score": 13.914140913262353 }, { "content": " error_handler: self.error_handler,\n\n }\n\n }\n\n}\n\n\n\nimpl<H: HandleError + Debug> std::ops::AddAssign for Keyring<H> {\n\n fn add_assign(&mut self, rhs: Self) {\n\n self.keys.extend(rhs.keys.into_iter());\n\n if self.default_public_key.is_none() {\n\n self.default_public_key = rhs.default_public_key;\n\n }\n\n }\n\n}\n\n\n\nimpl<H: HandleError + Debug> std::fmt::Debug for Keyring<H> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n let keys_dbg = &f\n\n .debug_map()\n\n .entries(self.keys.iter().map(|(k, _)| (k, \"<redacted>\")))\n\n .finish()?;\n", "file_path": "src/secrets/mod.rs", "rank": 42, "score": 13.883251702487254 }, { "content": " data,\n\n })\n\n }\n\n\n\n pub fn decrypt(&self, secrets: EncryptedSecrets<H>) -> Result<PlainSecrets, Error> {\n\n let EncryptedSecrets {\n\n mut data,\n\n keyring: _,\n\n } = secrets;\n\n\n\n self.decrypt_in_place(&mut data)?;\n\n Ok(PlainSecrets { data })\n\n }\n\n}\n\n\n\nimpl<'k, H: HandleError + Debug> PathAssign<Error> for EncryptedSecrets<'k, H> {\n\n fn get_assign_target(&mut self) -> &mut JsonValue {\n\n &mut self.data\n\n }\n\n\n", "file_path": "src/secrets/mod.rs", "rank": 43, "score": 13.877707326794651 }, { "content": " source: error,\n\n key_id: key_id.into(),\n\n })?,\n\n );\n\n }\n\n }\n\n\n\n // If there's still no default key yet, pick one from the keys hashmap\n\n if this.default_public_key.is_none() {\n\n if let Some(key_id) = this.keys.keys().next() {\n\n this.default_public_key = Some(key_id.into());\n\n }\n\n }\n\n\n\n Ok(this)\n\n }\n\n\n\n pub fn from_json(json_data: JsonValue, error_handler: H) -> Result<Self, Error> {\n\n match json_data {\n\n JsonValue::Object(obj) => Self::from_json_obj(obj, error_handler),\n", "file_path": "src/secrets/mod.rs", "rank": 44, "score": 13.214477229018048 }, { "content": " }\n\n _ => {\n\n panic!(\"WAT: Keyring has no secret key, it should have failed\");\n\n }\n\n }\n\n match keyring.decrypt_str(&encrypted_data[\"d\"][\"db\"].to_string()) {\n\n Err(Error::MissingPrivateKey { key_id }) => {\n\n assert_eq!(key_id, keyring.default_public_key().unwrap());\n\n }\n\n _ => {\n\n panic!(\"WAT: Keyring has no secret key, it should have failed\");\n\n }\n\n }\n\n\n\n // Now we construct the keyring that does have the private key\n\n let keyring = {\n\n let mut keyring_json = json::object! {};\n\n keyring_json\n\n .insert(\n\n &k1.0,\n", "file_path": "src/secrets/mod.rs", "rank": 45, "score": 12.800328122153132 }, { "content": "\n\nimpl KeyPair {\n\n fn decode_key<S: AsRef<str>>(key: S) -> Result<[u8; KEY_SIZE], KeyPairError> {\n\n let key = key.as_ref();\n\n let decoded =\n\n base64::decode(key.as_bytes()).map_err(|error| KeyPairError::Base64Decoding {\n\n source: error,\n\n data: key.to_string(),\n\n })?;\n\n\n\n let decoded_len = decoded.len();\n\n decoded\n\n .try_into()\n\n .map_err(|_error| KeyPairError::KeyLength(decoded_len))\n\n }\n\n\n\n fn decode_public_key<S: AsRef<str>>(key: S) -> Result<PublicKey, KeyPairError> {\n\n Self::decode_key(key).map(PublicKey::from)\n\n }\n\n\n", "file_path": "src/secrets/keypair.rs", "rank": 46, "score": 12.259306415144867 }, { "content": " }\n\n}\n\n\n\nimpl<H: HandleError + Debug> Keyring<H> {\n\n // TODO: Find a better name for this `const`\n\n const SEP: char = ':';\n\n\n\n pub fn new(error_handler: H) -> Self {\n\n Keyring {\n\n keys: HashMap::new(),\n\n default_public_key: None,\n\n error_handler,\n\n }\n\n }\n\n\n\n pub fn generate(error_handler: H) -> Self {\n\n let mut this = Self::new(error_handler);\n\n let key_pair = KeyPair::generate();\n\n let key_id = key_pair.to_string();\n\n this.keys.insert(key_id.clone(), key_pair);\n", "file_path": "src/secrets/mod.rs", "rank": 47, "score": 12.004872917434119 }, { "content": "\n\n #[structopt(\n\n help = \"JSON path where to set the new value. Currently only simple `object.field.subField` paths are supported.\"\n\n )]\n\n path: String,\n\n #[structopt(\n\n help = \"The JSON value to set. This will be parsed as a JSON string, and if the parsed valie is a string, it will be encrypted before written to the output. If parsing fails, the entire argument is used as a string and encryptted. If this argument is missing, then the value at this path is removed.\"\n\n )]\n\n value: Option<String>,\n\n },\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 11.904429003468577 }, { "content": "use std::convert::{TryFrom, TryInto};\n\n\n\nuse thiserror::Error as BaseError;\n\n\n\nuse crate::sealed_box::{PublicKey, SecretKey, KEY_SIZE};\n\n\n\n#[derive(Debug, BaseError)]\n\npub enum KeyPairError {\n\n #[error(\"Decoded key length is {0}, but it should be {}\", KEY_SIZE)]\n\n KeyLength(usize),\n\n #[error(\"Could not decode `{data}` as Base64\")]\n\n Base64Decoding {\n\n #[source]\n\n source: base64::DecodeError,\n\n data: String,\n\n },\n\n #[error(\"Key pair mismatch for key `{key}`\")]\n\n KeyPairMismatch { key: String },\n\n}\n\n\n", "file_path": "src/secrets/keypair.rs", "rank": 49, "score": 11.825821185829646 }, { "content": " data: JsonValue::new_object(),\n\n keyring,\n\n }\n\n }\n\n\n\n pub fn dump(&self) -> JsonValue {\n\n self.data.clone()\n\n }\n\n}\n\n\n\n#[allow(clippy::module_name_repetitions)]\n\n#[derive(Debug)]\n\npub struct PlainSecrets {\n\n data: JsonValue,\n\n}\n\n\n\nimpl PlainSecrets {\n\n pub fn from_json(json_data: JsonValue) -> Self {\n\n Self { data: json_data }\n\n }\n", "file_path": "src/secrets/mod.rs", "rank": 50, "score": 11.422587509532926 }, { "content": " \"db\": \"b2\",\n\n \"_dc\": \"c2\",\n\n }\n\n };\n\n\n\n assert!(data.is_object());\n\n\n\n let keyring = Keyring::generate(DefaultErrorHandler);\n\n let unencrypted_secrets = PlainSecrets::from_json(data.clone());\n\n let encrypted_secrets = keyring.encrypt::<&str>(unencrypted_secrets, None).unwrap();\n\n let encrypted_data = encrypted_secrets.dump();\n\n\n\n let decrypt_str = |s: &str| keyring.decrypt_str(s).unwrap();\n\n\n\n assert_eq!(data[\"a\"], encrypted_data[\"a\"]);\n\n assert_eq!(data[\"_c\"], encrypted_data[\"_c\"]);\n\n assert_eq!(\n\n data[\"b\"].to_string(),\n\n decrypt_str(&encrypted_data[\"b\"].to_string())\n\n );\n", "file_path": "src/secrets/mod.rs", "rank": 51, "score": 11.412490405925656 }, { "content": " fn decode_secret_key<S: AsRef<str>>(key: S) -> Result<SecretKey, KeyPairError> {\n\n Self::decode_key(key).map(SecretKey::from)\n\n }\n\n\n\n pub fn generate() -> Self {\n\n let secret_key = SecretKey::generate(&mut rand::rngs::OsRng);\n\n Self {\n\n public: secret_key.public_key(),\n\n private: Some(secret_key),\n\n }\n\n }\n\n\n\n pub fn to_string_pair(&self) -> (String, Option<String>) {\n\n (\n\n self.key_id(),\n\n self.private\n\n .as_ref()\n\n .map(|key| base64::encode(key.to_bytes())),\n\n )\n\n }\n", "file_path": "src/secrets/keypair.rs", "rank": 52, "score": 11.270842472308583 }, { "content": "\n\n pub fn encrypt_with<S, H>(\n\n self,\n\n keyring: &Keyring<H>,\n\n public_key: Option<S>,\n\n ) -> Result<EncryptedSecrets<H>, Error>\n\n where\n\n S: AsRef<str>,\n\n H: HandleError + Debug,\n\n {\n\n keyring.encrypt(self, public_key)\n\n }\n\n\n\n pub fn encrypt<H: HandleError + Debug>(\n\n self,\n\n keyring: &Keyring<H>,\n\n ) -> Result<EncryptedSecrets<H>, Error> {\n\n self.encrypt_with(keyring, None::<&str>)\n\n }\n\n\n", "file_path": "src/secrets/mod.rs", "rank": 53, "score": 10.767375686809114 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n pub fn decrypt_str<S: AsRef<str>>(&self, data: S) -> Result<String, Error> {\n\n // let mut splitter = data.as_ref().splitn(2, Self::SEP);\n\n\n\n // let key_id = splitter\n\n // .next()\n\n // .ok_or_else(|| SecretsError::MissingSecretKeyId {\n\n // secret: data.as_ref().into(),\n\n // })?;\n\n\n\n // let encrypted =\n\n // base64::decode(\n\n // splitter\n\n // .next()\n\n // .ok_or_else(|| SecretsError::MissingSecretData {\n\n // secret: data.as_ref().into(),\n\n // })?,\n", "file_path": "src/secrets/mod.rs", "rank": 54, "score": 10.641518573179168 }, { "content": " assert_eq!(secretless_keyring.keys.iter().count(), 0);\n\n assert!(secretless_keyring.default_public_key().is_err());\n\n\n\n let unencrypted_secrets = PlainSecrets::from_json(data.clone());\n\n let encrypted_secrets = unencrypted_secrets\n\n .encrypt_with(&secretless_keyring, detached_key)\n\n .unwrap();\n\n let encrypted_data = encrypted_secrets.dump();\n\n\n\n let decrypt_str = |s: &str| keyring.decrypt_str(s).unwrap();\n\n\n\n assert_eq!(data[\"a\"], encrypted_data[\"a\"]);\n\n assert_eq!(data[\"_c\"], encrypted_data[\"_c\"]);\n\n assert_eq!(\n\n data[\"b\"].to_string(),\n\n decrypt_str(&encrypted_data[\"b\"].to_string())\n\n );\n\n\n\n assert_eq!(data[\"d\"][\"da\"], encrypted_data[\"d\"][\"da\"]);\n\n assert_eq!(data[\"d\"][\"_dc\"], encrypted_data[\"d\"][\"_dc\"]);\n\n assert_eq!(\n\n data[\"d\"][\"db\"].to_string(),\n\n decrypt_str(&encrypted_data[\"d\"][\"db\"].to_string())\n\n );\n\n }\n\n}\n", "file_path": "src/secrets/mod.rs", "rank": 55, "score": 10.55201466076719 }, { "content": "#[derive(Debug)]\n\npub struct EncryptedSecrets<'a, H: HandleError + Debug> {\n\n keyring: &'a Keyring<H>,\n\n data: JsonValue,\n\n}\n\n\n\nimpl<'a, H: HandleError + Debug> EncryptedSecrets<'a, H> {\n\n pub fn from_json(keyring: &'a Keyring<H>, json_data: JsonValue) -> Self {\n\n Self {\n\n keyring,\n\n data: json_data,\n\n }\n\n }\n\n\n\n pub fn decrypt(self) -> Result<PlainSecrets, Error> {\n\n self.keyring.decrypt(self)\n\n }\n\n\n\n pub fn new(keyring: &'a Keyring<H>) -> Self {\n\n Self {\n", "file_path": "src/secrets/mod.rs", "rank": 56, "score": 10.551303116342678 }, { "content": "This is where is up to the team to decide where to keep this file, and also, how to make it available in CI. For example, its possible to paste the content of the keyring as a Github secret, and write it to a file just before invoking `privie`:\n\n\n\n```yaml\n\n - name: Decrypt the secrets file\n\n env:\n\n KEYRING_CONTENTS: ${{ secrets.KEYRING_CONTENTS }}\n\n run: |\n\n echo $KEYRING_CONTENTS > my-keyring.json\n\n privie decrypt --keyring=my-keyring.json --input=my-encrypted-secrets.json --output=my-decrypted-secrets.json\n\n```\n\n\n\nYou can also have the keyring's content piped in via `stdin`, like so:\n\n\n\n```\n\n$ echo $KEYRING_CONTENTS | privie decrypt --keyring=- --input=my-encrypted-secrets.json --output=my-decrypted-secrets.json\n\n```\n\nMost commands with a `--keyring` argument will allow using `-` to denote to read from `stdin`, the limitation being that it won't be possible then to use `stdin` for the `--input` argument.\n\n\n\n// TODO: Add more examples\n\n```\n", "file_path": "README.md", "rank": 57, "score": 10.494601076016552 }, { "content": " this.default_public_key = Some(key_id);\n\n this\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn len(&self) -> usize {\n\n self.keys.len()\n\n }\n\n\n\n pub fn to_json(&self) -> JsonValue {\n\n JsonValue::Object(\n\n self.keys\n\n .iter()\n\n .map(|(_key_id, key_pair)| key_pair.to_string_pair())\n\n .collect::<JsonObject>(),\n\n )\n\n }\n\n\n\n #[allow(clippy::needless_pass_by_value)]\n\n pub fn from_json_obj(json_obj: JsonObject, error_handler: H) -> Result<Self, Error> {\n", "file_path": "src/secrets/mod.rs", "rank": 58, "score": 10.39344420675597 }, { "content": " \"_c\": \"c\",\n\n \"d\": {\n\n \"da\": 2,\n\n \"db\": \"b2\",\n\n \"_dc\": \"c2\",\n\n }\n\n };\n\n\n\n let unencrypted_secrets = PlainSecrets::from_json(data.clone());\n\n let encrypted_secrets = unencrypted_secrets.encrypt(&keyring).unwrap();\n\n let encrypted_data = encrypted_secrets.dump();\n\n\n\n assert_eq!(data[\"a\"], encrypted_data[\"a\"]);\n\n assert_eq!(data[\"_c\"], encrypted_data[\"_c\"]);\n\n assert_eq!(data[\"d\"][\"da\"], encrypted_data[\"d\"][\"da\"]);\n\n assert_eq!(data[\"d\"][\"_dc\"], encrypted_data[\"d\"][\"_dc\"]);\n\n\n\n match keyring.decrypt_str(&encrypted_data[\"b\"].to_string()) {\n\n Err(Error::MissingPrivateKey { key_id }) => {\n\n assert_eq!(key_id, keyring.default_public_key().unwrap());\n", "file_path": "src/secrets/mod.rs", "rank": 59, "score": 10.102364341390057 }, { "content": " f.debug_struct(\"Keyring\").field(\"keys\", keys_dbg).finish()\n\n }\n\n}\n\n\n\nimpl Default for Keyring<DefaultErrorHandler> {\n\n fn default() -> Self {\n\n Keyring::new(DefaultErrorHandler)\n\n }\n\n}\n\n\n\nimpl<H: HandleError + Debug + Default> FromIterator<Keyring<H>> for Keyring<H> {\n\n fn from_iter<I>(iterable: I) -> Self\n\n where\n\n I: IntoIterator<Item = Self>,\n\n {\n\n let mut result = Keyring::new(H::default());\n\n for keyring in iterable {\n\n result += keyring;\n\n }\n\n result\n", "file_path": "src/secrets/mod.rs", "rank": 60, "score": 10.09794204556961 }, { "content": " .unwrap()\n\n .dump();\n\n assert_eq!(data, decrypted_data);\n\n }\n\n\n\n #[test]\n\n fn from_json_with_one_secret_key() {\n\n let k1 = KeyPair::generate().to_string_pair();\n\n let k2 = KeyPair::generate().to_string_pair();\n\n let mut json_data = json::object! {};\n\n json_data.insert(&k1.0, JsonValue::Null).unwrap();\n\n json_data\n\n .insert(\n\n &k2.0,\n\n JsonValue::from(k2.1.as_ref().map(String::as_str).unwrap()),\n\n )\n\n .unwrap();\n\n\n\n // This is just to assert that the key with an empty private key comes first when iterated over\n\n if let JsonValue::Object(obj) = &json_data {\n", "file_path": "src/secrets/mod.rs", "rank": 61, "score": 9.883500628595101 }, { "content": " JsonValue::from(k1.1.as_ref().map(String::as_str).unwrap()),\n\n )\n\n .unwrap();\n\n Keyring::from_json(keyring_json, DefaultErrorHandler).unwrap()\n\n };\n\n\n\n assert_eq!(\n\n data[\"b\"].to_string(),\n\n keyring\n\n .decrypt_str(&encrypted_data[\"b\"].to_string())\n\n .unwrap()\n\n );\n\n assert_eq!(\n\n data[\"d\"][\"db\"].to_string(),\n\n keyring\n\n .decrypt_str(&encrypted_data[\"d\"][\"db\"].to_string())\n\n .unwrap()\n\n );\n\n }\n\n\n", "file_path": "src/secrets/mod.rs", "rank": 62, "score": 9.732595736738284 }, { "content": " help = \"Where to write the encrypted JSON data.\"\n\n )]\n\n output: OutputFile,\n\n #[structopt(\n\n long,\n\n help = \"Use this key to encrypt secrets, instead of the one being picked at random. This key doesn't have to be present in the keyring, though it would have to be present in the keyring (and with its corresponding secret key) when decrypting.\"\n\n )]\n\n key_id: Option<String>,\n\n #[structopt(\n\n long,\n\n help = \"Verify the keyring's keys, like their size and that public/secret pair do match.\"\n\n )]\n\n strict_keyring: bool,\n\n },\n\n\n\n Decrypt {\n\n #[structopt(\n\n short,\n\n long,\n\n env = \"PRIVIE_KEYRING\",\n", "file_path": "src/main.rs", "rank": 63, "score": 9.486988889150204 }, { "content": " help = \"Where to write the decrypted JSON data.\"\n\n )]\n\n output: OutputFile,\n\n #[structopt(short, long, help = \"Force overwriting existing values.\")]\n\n force: bool,\n\n #[structopt(\n\n long,\n\n help = \"Use this key to encrypt secrets, instead of the one being picked at random. This key doesn't have to be present in the keyring, though it would have to be present in the keyring (and with its corresponding secret key) when decrypting.\"\n\n )]\n\n key_id: Option<String>,\n\n #[structopt(\n\n long,\n\n help = \"Verify keyring's keys, like their size and that public/secret pair do match.\"\n\n )]\n\n strict_keyring: bool,\n\n #[structopt(\n\n long,\n\n help = \"Don't abort if there is no input file, create an empty JSON document and add new secrets to it.\"\n\n )]\n\n create: bool,\n", "file_path": "src/main.rs", "rank": 64, "score": 8.947969971528245 }, { "content": "# 🔐 Privie 🔐\n\n\n\nAn experimental (and opinionated, and probably over-engineered) CLI utility to manage secrets as part of code, managed with your source control system. This utility is heavily inspired by [`ejson`](https://github.com/Shopify/ejson), and so, it strives to keep all of its benefits, which are:\n\n\n\n> * Secrets can be safely stored in a git repo.\n\n> * Changes to secrets are auditable on a line-by-line basis with git blame.\n\n> * Anyone with git commit access has access to write new secrets.\n\n> * Decryption access can easily be locked down to production servers only.\n\n> * Secrets change synchronously with application source (as opposed to secrets provisioned by Configuration Management).\n\n\n\nThere are also a couple more (opinionated) improvements:\n\n\n\n* Keyrings are a single JSON file, instead of files named with random hashes. Now there's a single file with an explicit name to be added to `.gitignore`.\n\n* By default keyrings are looked up in the current directory, so no need to modify your own system using `sudo` on `/opt/` or whatever.\n\n* Full support for `stdin`/`stdout`, plus explicit CLI flags to specify the input and output files.\n\n* Handy subcommand to add and encrypt secrets to an existing secrets file; less hand editing files.\n\n* Public keys are embedded in the encrypted secrets themselves, instead of a single public key shared for all the secrets in the document. This allows for two things:\n\n * JSON documents can now have an arbitrary structure, now there's no requirement for them to be an object. We could have a JSON document containing an array of strings, the individual strings inside the array can be encrypted too.\n\n * We can construct an encrypted secrets file in which each secret can have independent public keys, so, when decoding, only the keys in the keyring with a corresponding secret key will be decrypted; the rest of the secrets (those that couldn't be decrypted) will be left as-is in the output.\n\n\n", "file_path": "README.md", "rank": 65, "score": 8.939195979457676 }, { "content": " where\n\n E: Debug,\n\n {\n\n Err(error)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DefaultErrorHandler;\n\nimpl HandleError for DefaultErrorHandler {}\n\n\n\n#[derive(Debug, BaseError)]\n\npub enum Error {\n\n #[error(\"This keyring has no keys\")]\n\n EmptyKeyring,\n\n\n\n #[error(\"Key `{key_id}` is missing in this keyring\")]\n\n MissingKeyId { key_id: String },\n\n\n\n #[error(\"Key `{key_id}` is present, but has no corresponding secret key in this keyring\")]\n", "file_path": "src/secrets/mod.rs", "rank": 66, "score": 8.814029490438863 }, { "content": " let sencrypted = bs_seal(&TEST_PAYLOAD[..], &sodium_nonce, &sbob_pkey, &salice_skey);\n\n\n\n assert_eq!(sencrypted, encrypted);\n\n }\n\n\n\n #[test]\n\n fn try_full() {\n\n use sodiumoxide::crypto::box_::{PublicKey as SodiumPKey, SecretKey as SodiumSKey};\n\n use sodiumoxide::crypto::sealedbox::{open as sopen, seal as sseal};\n\n\n\n let bob = {\n\n let sk = SecretKey::generate(&mut rand::thread_rng());\n\n (sk.public_key(), sk)\n\n };\n\n\n\n let sbob = (\n\n SodiumPKey::from_slice(bob.0.as_bytes()).unwrap(),\n\n SodiumSKey::from_slice(&bob.1.to_bytes()).unwrap(),\n\n );\n\n\n", "file_path": "src/sealed_box.rs", "rank": 67, "score": 8.770893991905764 }, { "content": " fn plain_secrets_path_assign() {\n\n let mut secrets = PlainSecrets::from_json(object! {\n\n a: \"1\",\n\n b: \"2\",\n\n d: \"100\",\n\n });\n\n\n\n assert!(!secrets.data.has_key(\"c\"));\n\n assert_eq!(secrets.data[\"d\"], JsonValue::from(\"100\"));\n\n\n\n secrets.path_assign(\"c\", Some(\"3\"), false).unwrap();\n\n secrets.path_assign(\"d.a\", Some(\"4\"), false).unwrap();\n\n\n\n assert_eq!(secrets.data[\"c\"], JsonValue::from(\"3\"));\n\n assert_eq!(secrets.data[\"d\"][\"a\"], JsonValue::from(\"4\"));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/secrets/mod.rs", "rank": 68, "score": 8.528053278796015 }, { "content": " fn try_from((public_key, secret_key): (String, String)) -> Result<Self, Self::Error> {\n\n let public_key = Self::decode_public_key(public_key)?;\n\n let secret_key = Self::decode_secret_key(secret_key)?;\n\n\n\n if secret_key.public_key() != public_key {\n\n return Err(Self::Error::KeyPairMismatch {\n\n key: base64::encode(public_key),\n\n });\n\n }\n\n\n\n Ok(Self {\n\n public: public_key,\n\n private: Some(secret_key),\n\n })\n\n }\n\n}\n\n\n\nimpl From<SecretKey> for KeyPair {\n\n fn from(value: SecretKey) -> Self {\n\n Self {\n", "file_path": "src/secrets/keypair.rs", "rank": 69, "score": 8.516986045939948 }, { "content": "### Encrypting\n\n\n\nAssuming we have a file called `my-secrets.json` with this content:\n\n\n\n```json\n\n{\n\n \"mongodb\": \"mongodb://m-user:[email protected]:23022/someDb\",\n\n \"redis\": \"redis://r-user:[email protected]:12345/1\"\n\n}\n\n```\n\n\n\nWe can encrypt it using the keyring from the previous example:\n\n\n\n```\n\n$ privie encrypt --keyring=my-keyring.json --input=my-secrets.json --output=my-encrypted-secrets.json\n\n```\n\n\n\n<details>\n\n<summary>We can also use `stdin` and/or `stdout` instead of `--input` and/or `--ouput`</summary>\n\n\n\nFor example, this command is equivalent to the previous one:\n\n\n\n```\n\n$ privie encrypt --keyring=my-keyring.json < my-secrets.json > my-encrypted-secrets.json\n\n```\n\n\n\n</details>\n\n\n\nNow we have a `my-encrypted-secrets.json` file that looks something like this:\n\n\n\n```\n\n$ cat my-encrypted-secrets.json\n\n{\n\n \"mongodb\": \"17EV2Rohy...Th0tJF36qfAw=:qeGq09vNaBoDda8RWnmnT2zxF/B5jIeEtQ894Uy4...g9zBE2BPa08DVzt1sjHJxe\",\n\n \"redis\": \"17EV2Rohy...Th0tJF36qfAw=:cDbrdgVu446IA475Hq8HTDJb7FMKGXtlG8YAgGCbr1...7Cq0FXl+O41kKRSfEix3f70pJDK1K\"\n\n}\n\n```\n\n\n\nNow our secrets are encrypted and can be committed to our repository:\n\n\n\n```\n\n$ git add my-encrypted-secrets.json\n\n$ git commit -m 'Add encrypted secrets file'\n\n```\n\n\n\n### Decrypting\n\n\n\n```\n\n$ privie decrypt --keyring=my-keyring.json --input=my-encrypted-secrets.json --output=my-decrypted-secrets.json\n\n```\n\n\n\nSimilar to when decrypting; we use the `decrypt` subcommand instead of `encrypt` and reverse the `--input` and `--output` params. Now we have a `my-decrypted-secrets.json` file which is \"equivalent\" to the initial `my-secrets.json` file we had at the beginning. (They won't be exactly equal, since most formatting will be lost every time a JSON document is read and written).\n\n\n\n### Sharing the keyring\n\n\n", "file_path": "README.md", "rank": 70, "score": 8.478967899991973 }, { "content": "\n\n assert_eq!(data[\"d\"][\"da\"], encrypted_data[\"d\"][\"da\"]);\n\n assert_eq!(data[\"d\"][\"_dc\"], encrypted_data[\"d\"][\"_dc\"]);\n\n assert_eq!(\n\n data[\"d\"][\"db\"].to_string(),\n\n decrypt_str(&encrypted_data[\"d\"][\"db\"].to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn encrypt_flat_arrays() {\n\n let data = json::array![\"a\", \"b\", \"c\", \"d\",];\n\n let keyring = Keyring::generate(DefaultErrorHandler);\n\n let plain_secrets = PlainSecrets::from_json(data.clone());\n\n let encrypted_secrets = plain_secrets.encrypt(&keyring).unwrap();\n\n let encrypted_data = encrypted_secrets.dump();\n\n\n\n assert!(encrypted_data.is_array());\n\n assert_eq!(encrypted_data.len(), 4);\n\n assert!(encrypted_data.members().all(|v| v.is_string()));\n", "file_path": "src/secrets/mod.rs", "rank": 71, "score": 8.442646441877722 }, { "content": " assert!(keyring.default_public_key.is_some());\n\n assert_eq!(keyring.keys.keys().count(), 3);\n\n\n\n let pub_key = keyring.default_public_key().unwrap();\n\n assert!(keyring.keys.contains_key(pub_key));\n\n }\n\n\n\n #[test]\n\n fn default_public_key_is_stable() {\n\n let keyring = Keyring::new(DefaultErrorHandler);\n\n assert!(keyring.default_public_key().is_err());\n\n assert!(keyring.default_public_key.is_none());\n\n\n\n let mut keyring = Keyring::generate(DefaultErrorHandler);\n\n assert!(keyring.default_public_key().is_ok());\n\n assert!(keyring.default_public_key.is_some());\n\n\n\n let key = keyring.default_public_key().unwrap().to_string();\n\n keyring += Keyring::generate(DefaultErrorHandler);\n\n assert_eq!(key, keyring.default_public_key().unwrap());\n", "file_path": "src/secrets/mod.rs", "rank": 72, "score": 8.331326303535556 }, { "content": "\n\n pub fn key_id(&self) -> String {\n\n base64::encode(&self.public)\n\n }\n\n\n\n pub fn public_key(&self) -> &PublicKey {\n\n &self.public\n\n }\n\n\n\n pub fn private_key(&self) -> Option<&SecretKey> {\n\n self.private.as_ref()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn checking_wrong_keys() {\n", "file_path": "src/secrets/keypair.rs", "rank": 73, "score": 8.33051993899884 }, { "content": " let k1 = KeyPair::generate();\n\n let k2 = KeyPair::generate();\n\n\n\n let (k1_public, k1_secret) = k1.to_string_pair();\n\n\n\n let (k2_public, k2_secret) = k2.to_string_pair();\n\n\n\n assert!(KeyPair::try_from((k1_public.clone(), k1_secret.clone().unwrap())).is_ok());\n\n assert!(KeyPair::try_from((k2_public.clone(), k2_secret.clone().unwrap())).is_ok());\n\n\n\n assert!(KeyPair::try_from((k1_public.clone(), k2_secret.clone().unwrap())).is_err());\n\n assert!(KeyPair::try_from((k2_public.clone(), k1_secret.clone().unwrap())).is_err());\n\n\n\n assert!(KeyPair::try_from((\n\n k1_public.clone(),\n\n k1_secret\n\n .as_ref()\n\n .map(|sk| (&sk[..(sk.len() - 2)]).into())\n\n .unwrap(),\n\n ))\n", "file_path": "src/secrets/keypair.rs", "rank": 74, "score": 8.314894682138096 }, { "content": " data[\"b\"].to_string(),\n\n decrypt_str(&encrypted_data[\"b\"].to_string())\n\n );\n\n\n\n assert_eq!(data[\"d\"][\"da\"], encrypted_data[\"d\"][\"da\"]);\n\n assert_eq!(data[\"d\"][\"_dc\"], encrypted_data[\"d\"][\"_dc\"]);\n\n assert_eq!(\n\n data[\"d\"][\"db\"].to_string(),\n\n decrypt_str(&encrypted_data[\"d\"][\"db\"].to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn encrypt_json_associated_method_2() {\n\n let data = object! {\n\n \"a\": 1,\n\n \"b\": \"b\",\n\n \"_c\": \"c\",\n\n \"d\": {\n\n \"da\": 2,\n", "file_path": "src/secrets/mod.rs", "rank": 75, "score": 8.283503825764285 }, { "content": " let mut obj_iter = obj.iter();\n\n\n\n let k1_pair = obj_iter.next().unwrap();\n\n assert_eq!(k1_pair.0, k1.0);\n\n assert_eq!(k1_pair.1, &JsonValue::Null);\n\n\n\n let k2_pair = obj_iter.next().unwrap();\n\n assert_eq!(k2_pair.0, k2.0);\n\n assert_eq!(\n\n k2_pair.1,\n\n &JsonValue::from(k2.1.as_ref().map(String::as_str).unwrap())\n\n );\n\n } else {\n\n panic!(\"wat\");\n\n }\n\n\n\n let restored = Keyring::from_json(json_data, DefaultErrorHandler).unwrap();\n\n assert_eq!(restored.default_public_key().unwrap(), k2.0);\n\n }\n\n\n", "file_path": "src/secrets/mod.rs", "rank": 76, "score": 8.164755169123506 }, { "content": " #[test]\n\n fn from_json_with_no_secret_key() {\n\n let k1 = KeyPair::generate().to_string_pair();\n\n let k2 = KeyPair::generate().to_string_pair();\n\n\n\n let mut json_data = json::object! {};\n\n json_data.insert(&k1.0, JsonValue::Null).unwrap();\n\n json_data.insert(&k2.0, JsonValue::Null).unwrap();\n\n\n\n // This is just to assert that the key with an empty private key comes first when iterated over\n\n if let JsonValue::Object(obj) = &json_data {\n\n let mut obj_iter = obj.iter();\n\n\n\n let k1_pair = obj_iter.next().unwrap();\n\n assert_eq!(k1_pair.0, k1.0);\n\n assert_eq!(k1_pair.1, &JsonValue::Null);\n\n\n\n let k2_pair = obj_iter.next().unwrap();\n\n assert_eq!(k2_pair.0, k2.0);\n\n assert_eq!(k2_pair.1, &JsonValue::Null);\n", "file_path": "src/secrets/mod.rs", "rank": 77, "score": 7.742885701733382 }, { "content": " }\n\n\n\n fn key_load_error<E>(\n\n &self,\n\n #[allow(unused_variables)] public_key: &str,\n\n #[allow(unused_variables)] private_key: &str,\n\n error: E,\n\n ) -> Result<(), E>\n\n where\n\n E: Debug,\n\n {\n\n if self.strict_key_loading {\n\n return Err(error);\n\n }\n\n // TODO: Colors!\n\n eprintln!(\n\n \"[Strictness Off] Error loading keys `{}`: {:?}\",\n\n public_key, error\n\n );\n\n Ok(())\n", "file_path": "src/main.rs", "rank": 78, "score": 7.552120839812968 }, { "content": " // Seal and open local\n\n let encrypted = seal(&TEST_PAYLOAD[..], &bob.0);\n\n let decrypted = open(&encrypted.as_ref().unwrap(), &bob.1).unwrap();\n\n assert_eq!(&decrypted, &TEST_PAYLOAD);\n\n\n\n // Sodiumoxide open local seal\n\n let sopen_rust = sopen(&encrypted.unwrap(), &sbob.0, &sbob.1).unwrap();\n\n assert_eq!(&sopen_rust, &TEST_PAYLOAD);\n\n\n\n // local open sodiumoxide seal\n\n let sencrypted = sseal(&TEST_PAYLOAD[..], &sbob.0);\n\n let open_sodium = open(&sencrypted, &bob.1).unwrap();\n\n assert_eq!(&open_sodium, &TEST_PAYLOAD);\n\n }\n\n\n\n #[test]\n\n fn bad_ciphertext() {\n\n let key = SecretKey::generate(&mut rand::thread_rng());\n\n assert!(open(&[1, 2, 3], &key).is_err())\n\n }\n\n}\n", "file_path": "src/sealed_box.rs", "rank": 79, "score": 7.156892137368928 }, { "content": " \"_c\": \"c\",\n\n \"d\": {\n\n \"da\": 2,\n\n \"db\": \"b2\",\n\n \"_dc\": \"c2\",\n\n }\n\n };\n\n\n\n assert!(data.is_object());\n\n\n\n let keyring = Keyring::generate(DefaultErrorHandler);\n\n let unencrypted_secrets = PlainSecrets::from_json(data.clone());\n\n let encrypted_secrets = unencrypted_secrets.encrypt(&keyring).unwrap();\n\n let encrypted_data = encrypted_secrets.dump();\n\n\n\n let decrypt_str = |s: &str| keyring.decrypt_str(s).unwrap();\n\n\n\n assert_eq!(data[\"a\"], encrypted_data[\"a\"]);\n\n assert_eq!(data[\"_c\"], encrypted_data[\"_c\"]);\n\n assert_eq!(\n", "file_path": "src/secrets/mod.rs", "rank": 80, "score": 6.93992714421252 }, { "content": " Encrypt {\n\n #[source]\n\n source: SealedBoxError,\n\n secret: String,\n\n },\n\n\n\n // #[error(\"Encrypted secret `{secret}` does not have a key section\")]\n\n // MissingSecretKeyId { secret: String },\n\n // #[error(\"Encrypted secret `{secret}` does not have a data section\")]\n\n // MissingSecretData { secret: String },\n\n\n\n // #[error(\"Decoding `{secret}` as Base64\")]\n\n // Base64Decoding {\n\n // secret: String,\n\n // #[source]\n\n // source: base64::DecodeError,\n\n // },\n\n #[error(\"{0}\")]\n\n SecretValue(\n\n #[source]\n", "file_path": "src/secrets/mod.rs", "rank": 81, "score": 6.8331794386846365 }, { "content": " .is_err());\n\n assert!(KeyPair::try_from((\n\n k2_public.clone(),\n\n k2_secret\n\n .as_ref()\n\n .map(|sk| (&sk[..(sk.len() - 2)]).into())\n\n .unwrap(),\n\n ))\n\n .is_err());\n\n\n\n assert!(KeyPair::try_from((\n\n (&k1_public[..k1_public.len() - 2]).into(),\n\n k1_secret.clone().unwrap(),\n\n ))\n\n .is_err());\n\n assert!(KeyPair::try_from((\n\n (&k2_public[..k2_public.len() - 2]).into(),\n\n k2_secret.clone().unwrap(),\n\n ))\n\n .is_err());\n", "file_path": "src/secrets/keypair.rs", "rank": 82, "score": 6.78348219643391 }, { "content": " keyring += Keyring::generate(DefaultErrorHandler);\n\n assert_eq!(key, keyring.default_public_key().unwrap());\n\n\n\n let keyring2 = keyring + Keyring::generate(DefaultErrorHandler);\n\n assert_eq!(key, keyring2.default_public_key().unwrap());\n\n }\n\n\n\n #[test]\n\n fn encrypt_with_just_public_key() {\n\n let k1 = KeyPair::generate().to_string_pair();\n\n\n\n let keyring = {\n\n let mut keyring_json = json::object! {};\n\n keyring_json.insert(&k1.0, JsonValue::Null).unwrap();\n\n Keyring::from_json(keyring_json, DefaultErrorHandler).unwrap()\n\n };\n\n\n\n let data = object! {\n\n \"a\": 1,\n\n \"b\": \"b\",\n", "file_path": "src/secrets/mod.rs", "rank": 83, "score": 6.782845945952116 }, { "content": " #[from]\n\n SecretValueError,\n\n ),\n\n\n\n #[error(\"Could not decrypt `{secret}`\")]\n\n Decrypt {\n\n secret: String,\n\n #[source]\n\n source: SealedBoxError,\n\n },\n\n\n\n #[error(\"Decrypting suceeded, but the decrypted data is not valid UTF-8 text\")]\n\n InvalidUtf8Data {\n\n secret: String,\n\n #[source]\n\n source: FromUtf8Error,\n\n },\n\n}\n\n\n\n#[allow(clippy::module_name_repetitions)]\n", "file_path": "src/secrets/mod.rs", "rank": 84, "score": 6.495004902607873 }, { "content": "## Caveats (or, why you shouldn't use this)\n\nUnfortunately, at this moment, Privie can not be recommended for production environments, mainly because:\n\n* I'm not cryptography expert! (Help is welcomed)\n\n* The [cryptographic libraries used](https://github.com/RustCrypto) provide a good API, however they haven't been (yet) properly audited.\n\n\n\n## Usage\n\n\n\n### Keyrings\n\n\n\nA keyring is a JSON document with a mapping of public keys and (optionally) their corresponding secret keys. We only need a public key to encrypt a secret, but we do need its corresponding secret key to decrypt that encrypted secret.\n\n\n\nA keyring can be generated like this:\n\n```\n\n$ privie generate-keyring > my-keyring.json\n\n# or\n\n$ privie generate-keyring --output=my-keyring.json\n\n```\n\nBy default generated keyrings are dumped to `stdout`, so we can use shell redirections (`>`) or the explicit `--output` parameter.\n\n\n\nNow we have a keyring file that looks like this:\n\n```\n\ncat my-keyring.json\n\n{\"17EV2Rohy...Th0tJF36qfAw=\":\"hy9ZzHBn...8dpiqdIBxck8=\"}\n\n```\n\n\n\nKeyrings contain the secret keys required for decryption, so it's very important we keep these safe. *DO NOT commit these to any code repository!*.\n\n\n", "file_path": "README.md", "rank": 85, "score": 6.412740257226311 }, { "content": " help = \"Where to write the decrypted JSON data\"\n\n )]\n\n output: OutputFile,\n\n #[structopt(\n\n long,\n\n help = \"Verify keyring's keys, like their size and that public/secret pair do match.\"\n\n )]\n\n strict_keyring: bool,\n\n #[structopt(\n\n long,\n\n help = \"Abort the operation if at least one secret fails to be decrypted\"\n\n )]\n\n strict_decryption: bool,\n\n },\n\n\n\n AddSecret {\n\n #[structopt(\n\n short,\n\n long,\n\n env = \"PRIVIE_KEYRING\",\n", "file_path": "src/main.rs", "rank": 86, "score": 6.358896546054433 }, { "content": " assert_ne!(msg, encrypted_msg);\n\n assert_eq!(\n\n encrypted_msg\n\n .split(Keyring::<DefaultErrorHandler>::SEP)\n\n .map(base64::decode)\n\n .filter(Result::is_ok)\n\n .collect::<Vec<_>>()\n\n .len(),\n\n 2\n\n );\n\n\n\n let decrypted_msg = keyring.decrypt_str(encrypted_msg).unwrap();\n\n assert_eq!(decrypted_msg, msg);\n\n }\n\n\n\n #[test]\n\n fn encrypt_json_associated_method() {\n\n let data = object! {\n\n \"a\": 1,\n\n \"b\": \"b\",\n", "file_path": "src/secrets/mod.rs", "rank": 87, "score": 6.195237082072049 }, { "content": " // )\n\n // .map_err(|error| SecretsError::Base64Decoding {\n\n // secret: data.as_ref().into(),\n\n // source: error,\n\n // })?;\n\n let secret_value = data.as_ref().parse::<SecretValue>()?;\n\n let key_id = secret_value.get_key_id();\n\n\n\n let key_pair = self.keys.get(key_id).ok_or_else(|| Error::MissingKeyId {\n\n key_id: key_id.into(),\n\n })?;\n\n\n\n let secret_key = key_pair\n\n .private_key()\n\n .ok_or_else(|| Error::MissingPrivateKey {\n\n key_id: key_id.into(),\n\n })?;\n\n\n\n let decrypted =\n\n sealed_box::open(secret_value.get_encrypted(), secret_key).map_err(|error| {\n", "file_path": "src/secrets/mod.rs", "rank": 88, "score": 6.04620083070819 }, { "content": " (sk.public_key(), sk)\n\n };\n\n\n\n let nonce = get_nonce(&alice.0, &bob.0);\n\n let sodium_nonce = Nonce::from_slice(&nonce).unwrap();\n\n\n\n assert_eq!(&sodium_nonce[..], &nonce[..])\n\n }\n\n\n\n #[test]\n\n fn try_box() {\n\n use sodiumoxide::crypto::box_::{\n\n seal as bs_seal, Nonce, PublicKey as SodiumPKey, SecretKey as SodiumSKey,\n\n };\n\n\n\n // Ephemeral\n\n let alice = {\n\n let sk = SecretKey::generate(&mut rand::thread_rng());\n\n (sk.public_key(), sk)\n\n };\n", "file_path": "src/sealed_box.rs", "rank": 89, "score": 5.954633676801954 }, { "content": " // TODO: Should we also attempt decoding the key as Base64?\n\n let key_id = splitter.next().ok_or_else(|| Error::MissingSecretKeyId {\n\n secret: data.into(),\n\n })?;\n\n\n\n let encrypted =\n\n base64::decode(splitter.next().ok_or_else(|| Error::MissingSecretData {\n\n secret: data.into(),\n\n })?)\n\n .map_err(|error| Error::Base64Decoding {\n\n secret: data.into(),\n\n source: error,\n\n })?;\n\n\n\n Ok(Self {\n\n key_id: key_id.to_string(),\n\n encrypted,\n\n })\n\n }\n\n}\n", "file_path": "src/secrets/secret_value.rs", "rank": 90, "score": 5.934977922836276 }, { "content": "use blake2::{\n\n digest::{Update, VariableOutput},\n\n VarBlake2b,\n\n};\n\nuse thiserror::Error as BaseError;\n\n\n\nuse crypto_box::{\n\n aead::{generic_array::GenericArray, Aead, Error as AeadError},\n\n Box as CryptoBox,\n\n};\n\n\n\n// Re-export the main structs & constants\n\npub use crypto_box::{PublicKey, SecretKey, KEY_SIZE};\n\n\n\n#[derive(Debug, BaseError)]\n\npub enum Error {\n\n #[error(\"Encrypt/Decrypt error ({0})\")]\n\n CryptError(#[from] AeadError),\n\n #[error(\n\n \"Malformed ciphertext; Data length should be at least {}, got {0}\",\n", "file_path": "src/sealed_box.rs", "rank": 91, "score": 5.812969463768372 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crypto_box::{Box as CryptoBox, SecretKey};\n\n\n\n const TEST_PAYLOAD: &[u8; 15] = b\"sealed_box test\";\n\n\n\n #[test]\n\n fn try_nonce() {\n\n use sodiumoxide::crypto::box_::Nonce;\n\n\n\n //ephemeral\n\n let alice = {\n\n let sk = SecretKey::generate(&mut rand::thread_rng());\n\n (sk.public_key(), sk)\n\n };\n\n\n\n //target\n\n let bob = {\n\n let sk = SecretKey::generate(&mut rand::thread_rng());\n", "file_path": "src/sealed_box.rs", "rank": 92, "score": 5.806692099731124 }, { "content": " default_value = \"./.privie-keyring.json\",\n\n help = \"Keyring used for encryption.\"\n\n )]\n\n keyring: InputFile,\n\n #[structopt(\n\n long,\n\n help = \"Extra keyrings to use. These will all get merged with the primary one and used as one.\"\n\n )]\n\n extra_keyrings: Vec<InputFile>,\n\n #[structopt(\n\n short,\n\n long,\n\n default_value,\n\n help = \"The encrypted JSON data to add secrets to.\"\n\n )]\n\n input: InputFile,\n\n #[structopt(\n\n short,\n\n long,\n\n default_value,\n", "file_path": "src/main.rs", "rank": 93, "score": 5.710341590905193 }, { "content": "\n\n // let mut rng = rand::rngs::OsRng;\n\n // let mut buf = Vec::with_capacity(KEY_SIZE * 3);\n\n // rng.fill_bytes(&mut buf);\n\n // assert!(\n\n // Keyring::<ErrorHandlerStub>::check_key_pair(k1_public, &base64::encode(buf)).is_err()\n\n // );\n\n }\n\n}\n", "file_path": "src/secrets/keypair.rs", "rank": 94, "score": 5.693057482605033 }, { "content": " Encrypt {\n\n #[structopt(\n\n short,\n\n long,\n\n env = \"PRIVIE_KEYRING\",\n\n default_value = \"./.privie-keyring.json\",\n\n help = \"Keyring used for encryption.\"\n\n )]\n\n keyring: InputFile,\n\n #[structopt(\n\n long,\n\n help = \"Extra keyrings to use. These will all get merged with the primary one and used as one.\"\n\n )]\n\n extra_keyrings: Vec<InputFile>,\n\n #[structopt(short, long, default_value, help = \"The JSON data to encrypt.\")]\n\n input: InputFile,\n\n #[structopt(\n\n short,\n\n long,\n\n default_value,\n", "file_path": "src/main.rs", "rank": 95, "score": 5.573175507022892 }, { "content": " default_value = \"./.privie-keyring.json\",\n\n help = \"Keyring used for encryption. You will need to make sure all the referenced public keys have their corresponding secret keys to be able to decrypt with them.\"\n\n )]\n\n keyring: InputFile,\n\n #[structopt(\n\n long,\n\n help = \"Extra keyrings to use. These will all get merged with the primary one and used as one.\"\n\n )]\n\n extra_keyrings: Vec<InputFile>,\n\n #[structopt(\n\n short,\n\n long,\n\n default_value,\n\n help = \"The encrypted JSON data to decrypt\"\n\n )]\n\n input: InputFile,\n\n #[structopt(\n\n short,\n\n long,\n\n default_value,\n", "file_path": "src/main.rs", "rank": 96, "score": 5.468177601274613 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use rand::RngCore;\n\n\n\n #[test]\n\n fn parse_success() {\n\n let encrypted = {\n\n let mut buffer: [u8; 120] = [0; 120];\n\n rand::rngs::OsRng.fill_bytes(&mut buffer);\n\n buffer\n\n };\n\n\n\n let original_key_id = \"some-key-id-here\";\n\n let secret_value = format!(\"{}:{}\", original_key_id, base64::encode(encrypted))\n\n .parse::<SecretValue>()\n\n .unwrap();\n\n assert_eq!(secret_value.get_key_id(), original_key_id);\n\n assert_eq!(secret_value.get_encrypted(), encrypted);\n", "file_path": "src/secrets/secret_value.rs", "rank": 97, "score": 5.0976681478274415 }, { "content": " let decrypted_data = EncryptedSecrets::from_json(&keyring, encrypted_data)\n\n .decrypt()\n\n .unwrap()\n\n .dump();\n\n assert_eq!(data, decrypted_data);\n\n }\n\n\n\n #[test]\n\n fn encrypt_flat_arrays_nested_data() {\n\n let data = json::array![\"a\", {\"b\": \"b1\"}, \"c\", {\"_d\": \"d1\"},];\n\n let keyring = Keyring::generate(DefaultErrorHandler);\n\n let plain_secrets = PlainSecrets::from_json(data.clone());\n\n let encrypted_secrets = plain_secrets.encrypt(&keyring).unwrap();\n\n let encrypted_data = encrypted_secrets.dump();\n\n\n\n assert!(encrypted_data.is_array());\n\n assert_eq!(encrypted_data.len(), 4);\n\n\n\n let decrypted_data = EncryptedSecrets::from_json(&keyring, encrypted_data)\n\n .decrypt()\n", "file_path": "src/secrets/mod.rs", "rank": 98, "score": 4.685671622645387 }, { "content": " use json::object;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_encrypted_secrets_new() {\n\n let keyring = Keyring::default();\n\n let secrets = EncryptedSecrets::new(&keyring);\n\n assert!(secrets.data.is_object());\n\n assert_eq!(secrets.data.len(), 0);\n\n }\n\n\n\n #[test]\n\n fn keyring_add_op() {\n\n let keyring1 = Keyring::generate(DefaultErrorHandler);\n\n let keyring2 = Keyring::generate(DefaultErrorHandler);\n\n\n\n let key1 = keyring1\n\n .default_public_key()\n\n .map(ToOwned::to_owned)\n", "file_path": "src/secrets/mod.rs", "rank": 99, "score": 4.594391771396036 } ]
Rust
ivy-vulkan/src/descriptors/layout.rs
ten3roberts/ivy
fb5a7645c9f699c2aebf3d1b90c1d1f9e78355fa
use crate::Result; use std::{collections::HashMap, sync::Arc}; use ash::vk::DescriptorSetLayout; use ash::vk::DescriptorSetLayoutCreateInfo; use ash::Device; use parking_lot::RwLock; use smallvec::SmallVec; use std::hash::{Hash, Hasher}; use super::DescriptorSetBinding; use super::MAX_BINDINGS; #[derive(Clone, Debug)] pub struct DescriptorLayoutInfo { bindings: SmallVec<[DescriptorSetBinding; MAX_BINDINGS]>, } unsafe impl Send for DescriptorLayoutInfo {} unsafe impl Sync for DescriptorLayoutInfo {} impl DescriptorLayoutInfo { pub fn new(bindings: &[DescriptorSetBinding]) -> Self { let mut layout = Self { bindings: Default::default(), }; for binding in bindings { layout.insert(*binding); } layout } pub fn bindings(&self) -> &[DescriptorSetBinding] { &self.bindings } pub fn insert(&mut self, binding: DescriptorSetBinding) { let mut len = self.bindings.len(); let mut mid = (len / 2).min(1); loop { if len < 1 { if mid == self.bindings.len() || self.bindings[mid].binding > binding.binding { self.bindings.insert(mid, binding); } else { self.bindings.insert(mid + 1, binding); } break; } match self.bindings[mid].binding.cmp(&binding.binding) { std::cmp::Ordering::Less => { len /= 2; mid += (len as f32 / 2.0).floor() as usize; } std::cmp::Ordering::Equal => { self.bindings[mid] = binding; break; } std::cmp::Ordering::Greater => { len /= 2; mid -= (len as f32 / 2.0).ceil() as usize; } } } } pub fn extend<I: Iterator<Item = DescriptorSetBinding>>(&mut self, bindings: I) { self.bindings.extend(bindings); } pub fn get(&mut self, binding: u32) -> Option<DescriptorSetBinding> { let mut len = self.bindings.len(); let mut mid = (len / 2).max(1); loop { if mid >= self.bindings.len() || len == 0 { return None; } match self.bindings[mid].binding.cmp(&binding) { std::cmp::Ordering::Less => { len /= 2; mid += (len as f32 / 2.0).ceil() as usize; } std::cmp::Ordering::Equal => return Some(self.bindings[mid]), std::cmp::Ordering::Greater => { len /= 2; mid -= (len as f32 / 2.0).ceil() as usize; } } } } } impl Default for DescriptorLayoutInfo { fn default() -> Self { Self { bindings: Default::default(), } } } impl Hash for DescriptorLayoutInfo { fn hash<H: Hasher>(&self, state: &mut H) { for binding in &self.bindings { binding.binding.hash(state); } } } impl PartialEq for DescriptorLayoutInfo { fn eq(&self, other: &Self) -> bool { for (a, b) in self.bindings.iter().zip(&other.bindings) { if a.binding != b.binding || a.descriptor_type != b.descriptor_type || a.descriptor_count != b.descriptor_count || a.stage_flags != b.stage_flags { return false; } } true } } impl Eq for DescriptorLayoutInfo {} pub struct DescriptorLayoutCache { device: Arc<Device>, layouts: RwLock<HashMap<DescriptorLayoutInfo, DescriptorSetLayout>>, } impl DescriptorLayoutCache { pub fn new(device: Arc<Device>) -> Self { Self { device, layouts: RwLock::new(HashMap::new()), } } pub fn get(&self, info: &DescriptorLayoutInfo) -> Result<DescriptorSetLayout> { let guard = self.layouts.read(); if let Some(layout) = guard.get(info) { Ok(*layout) } else { drop(guard); let info = info.clone(); let layout = create(&self.device, &info)?; Ok(*self.layouts.write().entry(info).or_insert(layout)) } } pub fn clear(&mut self) { for (_, layout) in self.layouts.write().drain() { destroy(&self.device, layout); } } } impl Drop for DescriptorLayoutCache { fn drop(&mut self) { self.clear() } } pub fn create(device: &Device, info: &DescriptorLayoutInfo) -> Result<DescriptorSetLayout> { let create_info = DescriptorSetLayoutCreateInfo { binding_count: info.bindings.len() as u32, p_bindings: info.bindings.as_ptr(), ..Default::default() }; let layout = unsafe { device.create_descriptor_set_layout(&create_info, None)? }; Ok(layout) } pub fn destroy(device: &Device, layout: DescriptorSetLayout) { unsafe { device.destroy_descriptor_set_layout(layout, None) } } #[cfg(test)] mod tests { use ash::vk::DescriptorType; use super::*; #[test] fn layout_info_add() { let mut layout = DescriptorLayoutInfo::new(&[]); let mut bindings = [DescriptorSetBinding::default(); 6]; bindings[0] = DescriptorSetBinding { binding: 0, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[0]); eprintln!("Layout: {:?}", layout); bindings[2] = DescriptorSetBinding { binding: 2, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[2]); eprintln!("Layout: {:?}", layout); bindings[1] = DescriptorSetBinding { binding: 1, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[1]); eprintln!("Layout: {:?}", layout); bindings[3] = DescriptorSetBinding { binding: 3, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[3]); eprintln!("Layout: {:?}", layout); bindings[5] = DescriptorSetBinding { binding: 5, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[5]); eprintln!("Layout: {:?}", layout); bindings[4] = DescriptorSetBinding { binding: 4, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[4]); layout.insert(DescriptorSetBinding { binding: 4, descriptor_type: DescriptorType::COMBINED_IMAGE_SAMPLER, descriptor_count: 1, ..Default::default() }); eprintln!("Layout: {:?}", layout); assert!(layout .bindings() .iter() .map(|val| val.binding) .eq([0, 1, 2, 3, 4, 5].iter().cloned())); for binding in &bindings { assert_eq!( Some(binding.binding), layout.get(binding.binding).map(|val| val.binding) ) } assert_eq!(None, layout.get(9).map(|_| ())); } }
use crate::Result; use std::{collections::HashMap, sync::Arc}; use ash::vk::DescriptorSetLayout; use ash::vk::DescriptorSetLayoutCreateInfo; use ash::Device; use parking_lot::RwLock; use smallvec::SmallVec; use std::hash::{Hash, Hasher}; use super::DescriptorSetBinding; use super::MAX_BINDINGS; #[derive(Clone, Debug)] pub struct DescriptorLayoutInfo { bindings: SmallVec<[DescriptorSetBinding; MAX_BINDINGS]>, } unsafe impl Send for DescriptorLayoutInfo {} unsafe impl Sync for DescriptorLayoutInfo {} impl DescriptorLayoutInfo { pub fn new(bindings: &[DescriptorSetBinding]) -> Self { let mut layout = Self { bindings: Default::default(), }; for binding in bindings { layout.insert(*binding); } layout } pub fn bindings(&self) -> &[DescriptorSetBinding] { &self.bindings } pub fn insert(&mut self, binding: DescriptorSetBinding) { let mut len = self.bindings.len(); let mut mid = (len / 2).min(1); loop { if len < 1 { if mid == self.bindings.len() || self.bindings[mid].binding > binding.binding { self.bindings.insert(mid, binding); } else { self.bindings.insert(mid + 1, binding); } break; } match self.bindings[mid].binding.cmp(&binding.binding) { std::cmp::Ordering::Less => { len /= 2; mid += (len as f32 / 2.0).floor() as usize; } std::cmp::Ordering::Equal => { self.bindings[mid] = binding; break; } std::cmp::Ordering::Greater => { len /= 2; mid -= (len as f32 / 2.0).ceil() as usize; } } } } pub fn extend<I: Iterator<Item = DescriptorSetBinding>>(&mut self, bindings: I) { self.bindings.extend(bindings); } pub fn get(&mut self, binding: u32) -> Option<DescriptorSetBinding> { let mut len = self.bindings.len(); let mut mid = (len / 2).max(1); loop { if mid >= self.bindings.len() || len == 0 { return None; } match self.bindings[mid].binding.cmp(&binding) { std::cmp::Ordering::Less => { len /= 2; mid += (len as f32 / 2.0).ceil() as usize; } std::cmp::Ordering::Equal => return Some(self.bindings[mid]), std::cmp::Ordering::Greater => { len /= 2; mid -= (len as f32 / 2.0).ceil() as usize; } } } } } impl Default for DescriptorLayoutInfo { fn default() -> Self { Self { bindings: Default::default(), } } } impl Hash for DescriptorLayoutInfo { fn hash<H: Hasher>(&self, state: &mut H) { for binding in &self.bindings { binding.binding.hash(state); } } } impl PartialEq for DescriptorLayoutInfo {
} impl Eq for DescriptorLayoutInfo {} pub struct DescriptorLayoutCache { device: Arc<Device>, layouts: RwLock<HashMap<DescriptorLayoutInfo, DescriptorSetLayout>>, } impl DescriptorLayoutCache { pub fn new(device: Arc<Device>) -> Self { Self { device, layouts: RwLock::new(HashMap::new()), } } pub fn get(&self, info: &DescriptorLayoutInfo) -> Result<DescriptorSetLayout> { let guard = self.layouts.read(); if let Some(layout) = guard.get(info) { Ok(*layout) } else { drop(guard); let info = info.clone(); let layout = create(&self.device, &info)?; Ok(*self.layouts.write().entry(info).or_insert(layout)) } } pub fn clear(&mut self) { for (_, layout) in self.layouts.write().drain() { destroy(&self.device, layout); } } } impl Drop for DescriptorLayoutCache { fn drop(&mut self) { self.clear() } } pub fn create(device: &Device, info: &DescriptorLayoutInfo) -> Result<DescriptorSetLayout> { let create_info = DescriptorSetLayoutCreateInfo { binding_count: info.bindings.len() as u32, p_bindings: info.bindings.as_ptr(), ..Default::default() }; let layout = unsafe { device.create_descriptor_set_layout(&create_info, None)? }; Ok(layout) } pub fn destroy(device: &Device, layout: DescriptorSetLayout) { unsafe { device.destroy_descriptor_set_layout(layout, None) } } #[cfg(test)] mod tests { use ash::vk::DescriptorType; use super::*; #[test] fn layout_info_add() { let mut layout = DescriptorLayoutInfo::new(&[]); let mut bindings = [DescriptorSetBinding::default(); 6]; bindings[0] = DescriptorSetBinding { binding: 0, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[0]); eprintln!("Layout: {:?}", layout); bindings[2] = DescriptorSetBinding { binding: 2, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[2]); eprintln!("Layout: {:?}", layout); bindings[1] = DescriptorSetBinding { binding: 1, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[1]); eprintln!("Layout: {:?}", layout); bindings[3] = DescriptorSetBinding { binding: 3, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[3]); eprintln!("Layout: {:?}", layout); bindings[5] = DescriptorSetBinding { binding: 5, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[5]); eprintln!("Layout: {:?}", layout); bindings[4] = DescriptorSetBinding { binding: 4, descriptor_type: DescriptorType::UNIFORM_BUFFER, descriptor_count: 1, ..Default::default() }; layout.insert(bindings[4]); layout.insert(DescriptorSetBinding { binding: 4, descriptor_type: DescriptorType::COMBINED_IMAGE_SAMPLER, descriptor_count: 1, ..Default::default() }); eprintln!("Layout: {:?}", layout); assert!(layout .bindings() .iter() .map(|val| val.binding) .eq([0, 1, 2, 3, 4, 5].iter().cloned())); for binding in &bindings { assert_eq!( Some(binding.binding), layout.get(binding.binding).map(|val| val.binding) ) } assert_eq!(None, layout.get(9).map(|_| ())); } }
fn eq(&self, other: &Self) -> bool { for (a, b) in self.bindings.iter().zip(&other.bindings) { if a.binding != b.binding || a.descriptor_type != b.descriptor_type || a.descriptor_count != b.descriptor_count || a.stage_flags != b.stage_flags { return false; } } true }
function_block-full_function
[ { "content": "// Generates a random scalar between -1 and 1\n\npub fn one<R: Rng>(rng: &mut R) -> f32 {\n\n rng.gen_range(-1.0..=1.0)\n\n}\n", "file_path": "ivy-random/src/scalar.rs", "rank": 0, "score": 255513.0309084286 }, { "content": "// Generates a random scalar between 0 and 1\n\npub fn normalized<R: Rng>(rng: &mut R) -> f32 {\n\n rng.gen_range(0.0..=1.0)\n\n}\n\n\n", "file_path": "ivy-random/src/scalar.rs", "rank": 1, "score": 255513.0309084286 }, { "content": "/// Recursively draw the connection tree using gizmos\n\npub fn draw_connections(world: &impl GenericWorld, gizmos: &mut Gizmos) -> Result<()> {\n\n world\n\n .roots::<Connection>()?\n\n .into_iter()\n\n .try_for_each(|root| draw_subtree(world, root.0, gizmos))\n\n}\n\n\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 2, "score": 239917.14265139596 }, { "content": "pub fn reactive_system<T: 'static + Copy + Send + Sync, I: Iterator<Item = WidgetEvent>>(\n\n world: &World,\n\n events: I,\n\n) -> Result<()> {\n\n events\n\n .filter_map(|event| ReactiveState::try_from_event(&event).map(|val| (event.entity(), val)))\n\n .try_for_each(|(entity, state)| -> Result<()> {\n\n eprintln!(\"Got: {:?}\", state);\n\n let mut query = world.try_query_one::<(&mut T, &Reactive<T>)>(entity)?;\n\n if let Ok((val, reactive)) = query.get() {\n\n reactive.update(val, state);\n\n }\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "ivy-ui/src/systems.rs", "rank": 3, "score": 232205.2413743696 }, { "content": "/// Represents a single pass containing the pipeline and other data. Since\n\n/// [Material](ivy-graphics::Material) does not\n\n/// contain a pipeline, a `ShaderPass` can be considered a master material.\n\npub trait ShaderPass: 'static + Send + Sync {\n\n /// Returns the pipeline used for this shaderpass.\n\n fn pipeline(&self) -> &crate::PipelineInfo;\n\n}\n\n\n\n/// Macro to create a strongly typed shaderpass.\n\n#[macro_export(local_inner_macros)]\n\nmacro_rules! new_shaderpass {\n\n ( $(#[$outer:meta])* $vis:vis struct $name:ident; $($rest:tt)* ) => {\n\n $(#[$outer])*\n\n #[repr(transparent)]\n\n $vis struct $name(pub $crate::PipelineInfo);\n\n\n\n impl $name {\n\n fn new(pipeline: $crate::PipelineInfo) -> Self {\n\n Self ( pipeline )\n\n }\n\n }\n\n\n\n impl $crate::ShaderPass for $name {\n", "file_path": "ivy-vulkan/src/shaderpass.rs", "rank": 5, "score": 210353.70861648553 }, { "content": "#[inline]\n\npub fn perspective_vk(vertical_fov: f32, aspect_ratio: f32, z_near: f32, z_far: f32) -> Mat4 {\n\n let t = (vertical_fov / 2.0).tan();\n\n let sy = 1.0 / t;\n\n let sx = sy / aspect_ratio;\n\n let nmf = z_near - z_far;\n\n\n\n Mat4::from_cols(\n\n Vec4::new(sx, 0.0, 0.0, 0.0),\n\n Vec4::new(0.0, -sy, 0.0, 0.0),\n\n Vec4::new(0.0, 0.0, z_far / nmf, -1.0),\n\n Vec4::new(0.0, 0.0, z_near * z_far / nmf, 0.0),\n\n )\n\n}\n\n\n\n/// Orthographic projection matrix for use with Vulkan.\n\n///\n\n/// This matrix is meant to be used when the source coordinate space is right-handed and y-up\n\n/// (the standard computer graphics coordinate space)and the destination space is right-handed\n\n/// and y-down, with Z (depth) clip extending from 0.0 (close) to 1.0 (far).\n", "file_path": "ivy-graphics/src/camera.rs", "rank": 6, "score": 210138.64532527304 }, { "content": "#[inline]\n\npub fn orthographic_vk(left: f32, right: f32, bottom: f32, top: f32, near: f32, far: f32) -> Mat4 {\n\n let rml = right - left;\n\n let rpl = right + left;\n\n let tmb = top - bottom;\n\n let tpb = top + bottom;\n\n let fmn = far - near;\n\n Mat4::from_cols(\n\n Vec4::new(2.0 / rml, 0.0, 0.0, 0.0),\n\n Vec4::new(0.0, -2.0 / tmb, 0.0, 0.0),\n\n Vec4::new(0.0, 0.0, -1.0 / fmn, 0.0),\n\n Vec4::new(-(rpl / rml), -(tpb / tmb), -(near / fmn), 1.0),\n\n )\n\n}\n", "file_path": "ivy-graphics/src/camera.rs", "rank": 7, "score": 207444.36168085466 }, { "content": "pub trait KeyQuery: Send + Sync + Query {\n\n type K: RendererKey;\n\n fn into_key(&self) -> Self::K;\n\n}\n\n\n", "file_path": "ivy-graphics/src/base_renderer/mod.rs", "rank": 8, "score": 205728.20157208038 }, { "content": "pub trait AnyEventSender: 'static + Send + Sync + Downcast {}\n\nimpl_downcast!(AnyEventSender);\n\n\n\n/// Handles event dispatching for a single type of event\n\npub struct EventDispatcher<T: Event> {\n\n subscribers: Mutex<Vec<Subscriber<T>>>,\n\n pub blocked: bool,\n\n}\n\n\n\nimpl<T> EventDispatcher<T>\n\nwhere\n\n T: Event,\n\n{\n\n pub fn new() -> Self {\n\n Self {\n\n subscribers: Mutex::new(Vec::new()),\n\n blocked: false,\n\n }\n\n }\n\n\n", "file_path": "ivy-base/src/events/dispatcher.rs", "rank": 9, "score": 200844.24269977657 }, { "content": "pub trait AnyEventDispatcher: 'static + Send + Sync + Downcast {}\n\nimpl_downcast!(AnyEventDispatcher);\n\n\n", "file_path": "ivy-base/src/events/dispatcher.rs", "rank": 10, "score": 200844.24269977657 }, { "content": "pub trait CollisionTreeNode: 'static + Sized + Send + Sync {\n\n /// Returns the objects contained in the node\n\n fn objects(&self) -> &[Object];\n\n\n\n fn insert(\n\n index: NodeIndex,\n\n nodes: &mut Nodes<Self>,\n\n object: Object,\n\n data: &SlotMap<ObjectIndex, ObjectData>,\n\n );\n\n /// Removes an object entity from the node\n\n fn remove(&mut self, object: Object) -> Option<Object>;\n\n\n\n /// Returns the node bounds\n\n fn bounds(&self) -> BoundingBox;\n\n\n\n fn locate(\n\n index: NodeIndex,\n\n nodes: &Nodes<Self>,\n\n object: Object,\n", "file_path": "ivy-collision/src/tree/traits.rs", "rank": 11, "score": 198650.7938853929 }, { "content": "/// Compute barycentric coordinates of p in relation to the triangle defined by (a, b, c).\n\npub fn barycentric_vector(point: Vec3, p1: Vec3, p2: Vec3, p3: Vec3) -> (f32, f32, f32) {\n\n let v0 = p2 - p1;\n\n let v1 = p3 - p1;\n\n let v2 = point - p1;\n\n let d00 = v0.dot(v0);\n\n let d01 = v0.dot(v1);\n\n let d11 = v1.dot(v1);\n\n let d20 = v2.dot(v0);\n\n let d21 = v2.dot(v1);\n\n let inv_denom = 1.0 / (d00 * d11 - d01 * d01);\n\n\n\n let v = (d11 * d20 - d01 * d21) * inv_denom;\n\n let w = (d00 * d21 - d01 * d20) * inv_denom;\n\n let u = 1.0 - v - w;\n\n (u, v, w)\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 13, "score": 197853.92008275512 }, { "content": "pub fn get_queue(device: &Device, family_index: u32, index: u32) -> vk::Queue {\n\n unsafe { device.get_device_queue(family_index, index) }\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 14, "score": 197569.41028476798 }, { "content": "pub fn move_system(world: &mut World, input: &Input) {\n\n world\n\n .query::<(&Mover, &mut Velocity, &mut AngularVelocity, &Rotation)>()\n\n .iter()\n\n .for_each(|(_, (m, v, a, r))| {\n\n let movement = m.translate.get(&input);\n\n if m.local {\n\n *v = Velocity(**r * movement) * m.speed;\n\n } else {\n\n *v = Velocity(movement) * m.speed;\n\n }\n\n\n\n let ang = m.rotate.get(&input);\n\n *a = ang.into();\n\n })\n\n}\n", "file_path": "examples/vulkan/movement.rs", "rank": 15, "score": 196607.02465269054 }, { "content": "pub fn wrap_around_system(world: SubWorld<&mut Position>) {\n\n world.native_query().iter().for_each(|(_, pos)| {\n\n if pos.y < -100.0 {\n\n pos.y = 100.0\n\n }\n\n });\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 16, "score": 196357.85433542827 }, { "content": "pub trait AtlasKey: Hash + std::fmt::Debug + PartialEq + Eq + PartialOrd + Ord + Clone {}\n\n\n\nimpl<T> AtlasKey for T where T: Hash + std::fmt::Debug + PartialEq + Eq + PartialOrd + Ord + Clone {}\n\n\n\nimpl<K> TextureAtlas<K>\n\nwhere\n\n K: AtlasKey,\n\n{\n\n /// Creates a new texture atlas of `extent`. All images will attempt to be\n\n /// packed. All images are expected to have the same number of channels.\n\n pub fn new(\n\n context: SharedVulkanContext,\n\n resources: &Resources,\n\n texture_info: &TextureInfo,\n\n channels: u32,\n\n images: Vec<(K, Image)>,\n\n padding: u32,\n\n ) -> Result<Self> {\n\n let mut packer = GroupedRectsToPlace::<K, BinId>::new();\n\n let extent = texture_info.extent;\n", "file_path": "ivy-graphics/src/atlas.rs", "rank": 17, "score": 190541.051978258 }, { "content": "pub fn destroy(debug_utils: &DebugUtils, messenger: DebugUtilsMessengerEXT) {\n\n unsafe { debug_utils.destroy_debug_utils_messenger(messenger, None) };\n\n}\n\n\n\n// Debug callback\n\nunsafe extern \"system\" fn debug_callback(\n\n message_severity: vk::DebugUtilsMessageSeverityFlagsEXT,\n\n _message_types: vk::DebugUtilsMessageTypeFlagsEXT,\n\n p_callback_data: *const vk::DebugUtilsMessengerCallbackDataEXT,\n\n _p_user_data: *mut c_void,\n\n) -> vk::Bool32 {\n\n let msg = CStr::from_ptr((*p_callback_data).p_message)\n\n .to_str()\n\n .unwrap_or(\"Invalid UTF-8\");\n\n match message_severity {\n\n vk::DebugUtilsMessageSeverityFlagsEXT::ERROR => log::error!(\"{}\", msg),\n\n vk::DebugUtilsMessageSeverityFlagsEXT::WARNING => log::warn!(\"{}\", msg),\n\n vk::DebugUtilsMessageSeverityFlagsEXT::INFO => log::info!(\"{}\", msg),\n\n vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE => log::trace!(\"{}\", msg),\n\n _ => log::trace!(\"{}\", msg),\n\n };\n\n vk::FALSE\n\n}\n", "file_path": "ivy-vulkan/src/debug_utils.rs", "rank": 18, "score": 186112.33040564193 }, { "content": "// Calculates the heuristic distance of a face to a ray\n\npub fn ray_distance(p: SupportPoint, normal: Vec3, ray: &Ray) -> f32 {\n\n plane_intersect(p.support, normal, ray.dir()).dot(ray.dir()) * -normal.dot(ray.dir()).signum()\n\n}\n", "file_path": "ivy-collision/src/util.rs", "rank": 19, "score": 183061.4927138118 }, { "content": "pub fn gravity_system(world: SubWorld<&mut Velocity>, dt: Read<DeltaTime>) {\n\n world\n\n .native_query()\n\n .iter()\n\n .for_each(|(_, vel)| vel.y -= 1.0 * **dt)\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 20, "score": 178954.40125976963 }, { "content": "// Set directory to nth parent of current executable\n\npub fn normalize_dir(nth: usize) -> anyhow::Result<()> {\n\n let current_exe = env::current_exe()?\n\n .canonicalize()\n\n .context(\"Failed to canonicalize current exe\")?;\n\n\n\n let dir = (0..nth + 1)\n\n .fold(Some(current_exe.as_path()), |acc, _| {\n\n acc.and_then(|val| val.parent())\n\n })\n\n .context(\"Failed to get parent dir of executable\")?;\n\n\n\n env::set_current_dir(dir).context(\"Failed to set current directory\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "ivy-base/src/dir.rs", "rank": 21, "score": 175892.20622900187 }, { "content": "fn draw_subtree(world: &impl GenericWorld, root: Entity, gizmos: &mut Gizmos) -> Result<()> {\n\n let parent_pos = world.try_get::<Position>(root)?;\n\n\n\n world\n\n .children::<Connection>(root)\n\n .try_for_each(|child| -> Result<()> {\n\n let mut query = world.try_query_one::<(&Position, &ConnectionKind)>(child)?;\n\n let (pos, kind) = query\n\n .get()\n\n .expect(\"Failed to execute query in draw_connections\");\n\n\n\n let color = match kind {\n\n ConnectionKind::Rigid => Color::green(),\n\n ConnectionKind::Spring {\n\n strength: _,\n\n dampening: _,\n\n } => Color::red(),\n\n };\n\n\n\n gizmos.draw(ivy_base::Gizmo::Line {\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 22, "score": 175821.7494061722 }, { "content": "/// Returns the root of the rigid system, along with its mass\n\npub fn get_rigid_root(world: &impl GenericWorld, child: Entity) -> Result<(Entity, Mass)> {\n\n let mut system_mass = match world.try_get::<Mass>(child) {\n\n Ok(mass) => *mass,\n\n Err(_) => {\n\n panic!(\"No mass in leaf\");\n\n }\n\n };\n\n\n\n let mut root = child;\n\n\n\n for val in world.ancestors::<Connection>(child) {\n\n root = val;\n\n system_mass += match world.try_get::<Mass>(val) {\n\n Ok(mass) => *mass,\n\n Err(_) => break,\n\n };\n\n\n\n match *world.try_get::<ConnectionKind>(child)? {\n\n ConnectionKind::Rigid => {}\n\n ConnectionKind::Spring {\n\n strength: _,\n\n dampening: _,\n\n } => break,\n\n };\n\n }\n\n\n\n Ok((root, system_mass))\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 23, "score": 174141.66934726303 }, { "content": "pub fn integrate_velocity(world: SubWorld<(&mut Position, &Velocity)>, dt: Read<DeltaTime>) {\n\n world\n\n .native_query()\n\n .iter()\n\n .for_each(|(_, (pos, vel))| *pos += Position(**vel * **dt));\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 24, "score": 174036.19842086194 }, { "content": "pub fn create(entry: &Entry, instance: &Instance) -> Result<(DebugUtils, DebugUtilsMessengerEXT)> {\n\n let debug_utils = DebugUtils::new(entry, instance);\n\n\n\n let create_info = vk::DebugUtilsMessengerCreateInfoEXT::builder()\n\n .message_severity(\n\n vk::DebugUtilsMessageSeverityFlagsEXT::ERROR\n\n | vk::DebugUtilsMessageSeverityFlagsEXT::INFO\n\n | vk::DebugUtilsMessageSeverityFlagsEXT::WARNING\n\n | vk::DebugUtilsMessageSeverityFlagsEXT::VERBOSE,\n\n )\n\n .message_type(\n\n vk::DebugUtilsMessageTypeFlagsEXT::GENERAL\n\n | vk::DebugUtilsMessageTypeFlagsEXT::VALIDATION\n\n | vk::DebugUtilsMessageTypeFlagsEXT::PERFORMANCE,\n\n )\n\n .pfn_user_callback(Some(debug_callback));\n\n\n\n let messenger = unsafe { debug_utils.create_debug_utils_messenger(&create_info, None)? };\n\n Ok((debug_utils, messenger))\n\n}\n\n\n", "file_path": "ivy-vulkan/src/debug_utils.rs", "rank": 25, "score": 172063.60326495575 }, { "content": "pub fn gravity(world: SubWorld<(&GravityInfluence, &Mass, &mut Effector)>, gravity: Read<Gravity>) {\n\n if gravity.length_squared() < TOLERANCE {\n\n return;\n\n }\n\n\n\n world\n\n .native_query()\n\n .iter()\n\n .for_each(|(_, (influence, mass, effector))| {\n\n effector.apply_force(**gravity * **influence * **mass)\n\n })\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 26, "score": 171315.68330251193 }, { "content": "fn nearest_power_2(val: u32) -> u32 {\n\n let mut result = 1;\n\n while result < val {\n\n result *= 2;\n\n }\n\n result\n\n}\n\n\n\nimpl LoadResource for Font {\n\n type Info = FontInfo;\n\n\n\n type Error = Error;\n\n\n\n fn load(resources: &Resources, info: &Self::Info) -> Result<Self> {\n\n let context = resources.get_default::<SharedVulkanContext>()?;\n\n\n\n let sampler = resources.load(SamplerInfo {\n\n address_mode: AddressMode::CLAMP_TO_BORDER,\n\n mag_filter: FilterMode::LINEAR,\n\n min_filter: FilterMode::LINEAR,\n\n unnormalized_coordinates: false,\n\n anisotropy: 0,\n\n mip_levels: 1,\n\n })??;\n\n\n\n Self::new(context.clone(), resources, sampler, info)\n\n }\n\n}\n", "file_path": "ivy-ui/src/font.rs", "rank": 27, "score": 165178.40680578345 }, { "content": "fn nearest_power_2(val: u32) -> u32 {\n\n let mut result = 1;\n\n while result < val {\n\n result *= 2;\n\n }\n\n result\n\n}\n", "file_path": "ivy-graphics/src/base_renderer/pass.rs", "rank": 28, "score": 162341.37333850504 }, { "content": "/// Creates a vulkan instance with the appropriate extensions and layers\n\npub fn create(\n\n entry: &Entry,\n\n extensions: &[String],\n\n name: &str,\n\n engine_name: &str,\n\n) -> Result<Instance> {\n\n let name = CString::new(name).unwrap();\n\n let engine_name = CString::new(engine_name).unwrap();\n\n\n\n let app_info = vk::ApplicationInfo::builder()\n\n .application_name(&name)\n\n .engine_name(&engine_name);\n\n\n\n let extensions = extensions\n\n .iter()\n\n .cloned()\n\n .map(CString::new)\n\n .chain(INSTANCE_EXTENSIONS.iter().map(|s| CString::new(*s)))\n\n .collect::<std::result::Result<Vec<_>, _>>()\n\n .unwrap();\n", "file_path": "ivy-vulkan/src/instance.rs", "rank": 29, "score": 156363.0156697362 }, { "content": "/// Copies the contents of one buffer to another\n\n/// `commandpool`: pool to allocate transfer command buffer\n\n/// Does not wait for operation to complete\n\npub fn copy(\n\n commandpool: &CommandPool,\n\n queue: vk::Queue,\n\n src_buffer: vk::Buffer,\n\n dst_buffer: vk::Buffer,\n\n size: DeviceSize,\n\n offset: DeviceSize,\n\n) -> Result<()> {\n\n let region = vk::BufferCopy {\n\n src_offset: 0,\n\n dst_offset: offset,\n\n size,\n\n };\n\n\n\n commandpool.single_time_command(queue, |commandbuffer| {\n\n commandbuffer.copy_buffer(src_buffer, dst_buffer, &[region]);\n\n })\n\n}\n\n\n", "file_path": "ivy-vulkan/src/buffer.rs", "rank": 30, "score": 156363.0156697362 }, { "content": "/// Creates a logical device by choosing the best appropriate physical device\n\npub fn create(\n\n instance: &Instance,\n\n surface: Option<(&Surface, SurfaceKHR)>,\n\n layers: &[&str],\n\n) -> Result<(Arc<Device>, PhysicalDeviceInfo)> {\n\n let extensions = DEVICE_EXTENSIONS\n\n .iter()\n\n .map(|s| CString::new(*s))\n\n .collect::<std::result::Result<Vec<_>, _>>()\n\n .unwrap();\n\n\n\n let pdevice_info = pick_physical_device(instance, surface, &extensions)?;\n\n\n\n let mut unique_queue_families = HashSet::new();\n\n unique_queue_families.insert(pdevice_info.queue_families.graphics().unwrap());\n\n unique_queue_families.insert(pdevice_info.queue_families.present().unwrap());\n\n\n\n let queue_create_infos: Vec<_> = unique_queue_families\n\n .iter()\n\n .map(|index| {\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 31, "score": 156363.0156697362 }, { "content": "pub fn copy_to_image(\n\n commandpool: &CommandPool,\n\n queue: vk::Queue,\n\n buffer: vk::Buffer,\n\n image: vk::Image,\n\n layout: vk::ImageLayout,\n\n extent: Extent,\n\n) -> Result<()> {\n\n let region = vk::BufferImageCopy {\n\n buffer_offset: 0,\n\n buffer_row_length: 0,\n\n buffer_image_height: 0,\n\n image_subresource: vk::ImageSubresourceLayers {\n\n aspect_mask: vk::ImageAspectFlags::COLOR,\n\n mip_level: 0,\n\n base_array_layer: 0,\n\n layer_count: 1,\n\n },\n\n image_offset: vk::Offset3D { x: 0, y: 0, z: 0 },\n\n image_extent: vk::Extent3D {\n", "file_path": "ivy-vulkan/src/buffer.rs", "rank": 32, "score": 154553.31762878323 }, { "content": "/// Applies effectors to their respective entities and clears the effects.\n\npub fn apply_effectors(\n\n world: SubWorld<(RbQueryMut, &mut Position, &Rotation, &mut Effector)>,\n\n dt: Read<DeltaTime>,\n\n) {\n\n world\n\n .native_query()\n\n .without::<Static>()\n\n .iter()\n\n .for_each(|(_, (rb, pos, rot, effector))| {\n\n *rb.vel += effector.net_velocity_change(*rb.mass, **dt);\n\n *pos += effector.net_translation(rot);\n\n\n\n *rb.ang_vel += effector.net_angular_velocity_change(*rb.ang_mass, **dt);\n\n\n\n effector.clear()\n\n })\n\n}\n", "file_path": "ivy-physics/src/systems.rs", "rank": 33, "score": 154553.31762878323 }, { "content": "pub fn get_limits(\n\n instance: &Instance,\n\n physical_device: vk::PhysicalDevice,\n\n) -> vk::PhysicalDeviceLimits {\n\n let properties = unsafe { instance.get_physical_device_properties(physical_device) };\n\n\n\n properties.limits\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 34, "score": 154553.31762878323 }, { "content": "pub fn handle_events(\n\n world: Write<World>,\n\n mut events: Write<Events>,\n\n mut state: Write<InteractiveState>,\n\n cursor_pos: Read<Position2D>,\n\n intercepted_events: impl Iterator<Item = InputEvent>,\n\n control_events: impl Iterator<Item = UIControl>,\n\n) {\n\n control_events.for_each(|event| match event {\n\n UIControl::Focus(widget) => state.set_focus(widget, true, &mut events),\n\n });\n\n\n\n let hovered = intersect_widget(&*world, *cursor_pos);\n\n\n\n let sticky = hovered\n\n .map(|val| world.get::<Sticky>(val).is_ok())\n\n .unwrap_or_default();\n\n\n\n for event in intercepted_events {\n\n let event = InputEvent::from(event);\n", "file_path": "ivy-ui/src/systems.rs", "rank": 35, "score": 154553.31762878323 }, { "content": "pub fn update_connection(\n\n kind: &ConnectionKind,\n\n offset_pos: &PositionOffset,\n\n offset_rot: &RotationOffset,\n\n child_trans: TransformQueryMut,\n\n rb: RbQueryMut,\n\n parent: &TransformBundle,\n\n parent_rb: &mut RbBundle,\n\n effector: &mut Effector,\n\n) {\n\n // The desired postion\n\n let pos = Position(parent.into_matrix().transform_point3(**offset_pos));\n\n let displacement = pos - *child_trans.pos;\n\n match kind {\n\n ConnectionKind::Rigid => {\n\n // The desired velocity\n\n let vel = point_vel(pos - parent.pos, parent_rb.ang_vel) + parent_rb.vel;\n\n\n\n let total_mass = *rb.mass + parent_rb.mass;\n\n\n", "file_path": "ivy-physics/src/connections/mod.rs", "rank": 36, "score": 152811.70992178126 }, { "content": "pub fn integrate_angular_velocity(\n\n world: SubWorld<(&mut Rotation, &AngularVelocity)>,\n\n dt: Read<DeltaTime>,\n\n) {\n\n world.native_query().into_iter().for_each(|(_, (rot, w))| {\n\n let mag = w.length();\n\n if mag > 0.0 {\n\n let w = Quat::from_axis_angle(w.0 / mag, mag * **dt);\n\n *rot = Rotation(w * rot.0);\n\n }\n\n });\n\n}\n\n\n", "file_path": "ivy-physics/src/systems.rs", "rank": 37, "score": 152811.70992178126 }, { "content": "pub fn update_connections(\n\n world: SubWorld<(\n\n &ConnectionKind,\n\n &PositionOffset,\n\n &RotationOffset,\n\n &mut Effector,\n\n HierarchyQuery<Connection>,\n\n RbQueryMut,\n\n TransformQueryMut,\n\n )>,\n\n) -> Result<()> {\n\n world\n\n .roots::<Connection>()?\n\n .into_iter()\n\n .try_for_each(|root| update_subtree(&world, root.0))\n\n}\n\n\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 38, "score": 152811.70992178126 }, { "content": "/// Updates a connection that has no rigidbody\n\npub fn update_fixed(\n\n offset_pos: &PositionOffset,\n\n offset_rot: &RotationOffset,\n\n parent: &TransformBundle,\n\n child: TransformQueryMut,\n\n) {\n\n let pos = Position(parent.into_matrix().transform_point3(**offset_pos));\n\n *child.pos = pos;\n\n *child.rot = parent.rot * **offset_rot;\n\n}\n\n\n", "file_path": "ivy-physics/src/connections/mod.rs", "rank": 39, "score": 152811.70992178126 }, { "content": "/// Import the buffer data referenced by a glTF document.\n\npub fn import_buffer_data(\n\n document: &gltf::Document,\n\n mut blob: Option<Vec<u8>>,\n\n base: &Path,\n\n) -> Result<Vec<buffer::Data>> {\n\n document\n\n .buffers()\n\n .map(|buffer| {\n\n let mut data = match buffer.source() {\n\n buffer::Source::Uri(uri) => Scheme::read(base, uri),\n\n buffer::Source::Bin => blob.take().ok_or(Error::GltfImport(\n\n gltf::Error::MissingBlob,\n\n Some(base.to_owned()),\n\n )),\n\n }?;\n\n if data.len() < buffer.length() {\n\n return Err(Error::GltfImport(\n\n gltf::Error::BufferLength {\n\n buffer: buffer.index(),\n\n expected: buffer.length(),\n", "file_path": "ivy-graphics/src/document/util.rs", "rank": 40, "score": 151134.42063497554 }, { "content": "/// Import the image data referenced by a glTF document.\n\npub fn import_image_data(\n\n document: &gltf::Document,\n\n base: &Path,\n\n buffer_data: &[buffer::Data],\n\n resources: &Resources,\n\n) -> Result<Vec<Handle<Texture>>> {\n\n let context = resources.get_default::<SharedVulkanContext>()?;\n\n\n\n document\n\n .textures()\n\n .map(|tex| -> Result<Handle<Texture>> {\n\n match tex.source().source() {\n\n image::Source::Uri { uri, mime_type: _ } => {\n\n let data = Scheme::read(base, uri)?;\n\n\n\n let texture = Texture::from_memory(context.clone(), &data)?;\n\n Ok(resources.insert(texture)?)\n\n }\n\n image::Source::View { view, mime_type: _ } => {\n\n let parent_buffer_data = &buffer_data[view.buffer().index()].0;\n", "file_path": "ivy-graphics/src/document/util.rs", "rank": 41, "score": 151134.42063497554 }, { "content": "pub fn input_field_system(\n\n world: SubWorld<(&mut InputField, &mut Text)>,\n\n state: Read<InteractiveState>,\n\n reader: impl Iterator<Item = WidgetEvent>,\n\n mut events: Write<Events>,\n\n) -> Result<()> {\n\n let focused = match state.focused() {\n\n Some(val) => val,\n\n None => return Ok(()),\n\n };\n\n\n\n let mut query = world.query_one::<(&mut InputField, &mut Text)>(focused)?;\n\n let (field, text) = match query.get().ok() {\n\n Some(val) => val,\n\n None => return Ok(()),\n\n };\n\n\n\n reader.for_each(|event| match event.kind {\n\n WidgetEventKind::Focus(false)\n\n | WidgetEventKind::Key {\n", "file_path": "ivy-ui/src/input_field.rs", "rank": 42, "score": 151134.42063497554 }, { "content": "pub fn epa_ray<F: Fn(Vec3) -> SupportPoint>(\n\n support_func: F,\n\n simplex: Simplex,\n\n ray: &Ray,\n\n) -> Contact {\n\n let mut polytype =\n\n Polytype::from_simplex(&simplex, |a, b| Face::new_ray(a, b, ray, Vec3::ZERO));\n\n\n\n let mut iterations = 0;\n\n loop {\n\n // Find the face closest to the ray\n\n let (_index, max_face) = match polytype.find_furthest_face() {\n\n Some(val) => val,\n\n None => {\n\n unreachable!(\"No intersecting faces\");\n\n }\n\n };\n\n\n\n // Search in the normal of the face pointing against the ray\n\n\n", "file_path": "ivy-collision/src/epa/epa_ray.rs", "rank": 43, "score": 148763.8061763372 }, { "content": "pub fn write<B>(\n\n device: &Device,\n\n descriptor_set: vk::DescriptorSet,\n\n buffer: B,\n\n texture: &Texture,\n\n sampler: &Sampler,\n\n) where\n\n B: AsRef<vk::Buffer>,\n\n{\n\n let buffer_info = vk::DescriptorBufferInfo {\n\n buffer: *buffer.as_ref(),\n\n offset: 0,\n\n range: vk::WHOLE_SIZE,\n\n };\n\n\n\n let image_info = vk::DescriptorImageInfo {\n\n sampler: sampler.sampler(),\n\n image_view: texture.image_view(),\n\n image_layout: vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL,\n\n };\n", "file_path": "ivy-vulkan/src/descriptors/mod.rs", "rank": 44, "score": 148045.99945193686 }, { "content": "pub trait Storage: 'static + Send {\n\n fn as_any(&self) -> &dyn std::any::Any;\n\n\n\n fn as_any_mut(&mut self) -> &mut dyn std::any::Any;\n\n}\n\n\n\nimpl<T> Storage for T\n\nwhere\n\n T: 'static + Sized + Send,\n\n{\n\n #[inline]\n\n fn as_any(&self) -> &dyn std::any::Any {\n\n self as &dyn std::any::Any\n\n }\n\n\n\n #[inline]\n\n fn as_any_mut(&mut self) -> &mut dyn std::any::Any {\n\n self as &mut dyn std::any::Any\n\n }\n\n}\n", "file_path": "ivy-resources/src/cell.rs", "rank": 45, "score": 146442.632375423 }, { "content": "/// Represents a node in the renderpass.\n\npub trait Node: 'static + Send {\n\n /// Returns the color attachments for this node. Should not be execution heavy function\n\n fn color_attachments(&self) -> &[AttachmentInfo] {\n\n &[]\n\n }\n\n /// Returns the read attachments for this node. Should not be execution heavy function\n\n fn read_attachments(&self) -> &[Handle<Texture>] {\n\n &[]\n\n }\n\n /// Partially sampled input attachments. Read from the same pixel coord we write to\n\n fn input_attachments(&self) -> &[Handle<Texture>] {\n\n &[]\n\n }\n\n /// Returns the optional depth attachment for this node. Should not be execution heavy function\n\n fn depth_attachment(&self) -> Option<&AttachmentInfo> {\n\n None\n\n }\n\n\n\n fn buffer_reads(&self) -> &[Buffer] {\n\n &[]\n", "file_path": "ivy-rendergraph/src/node.rs", "rank": 46, "score": 146442.632375423 }, { "content": "pub fn destroy(device: &Device) {\n\n unsafe { device.destroy_device(None) };\n\n}\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 47, "score": 143960.87491347129 }, { "content": "pub fn destroy(instance: &Instance) {\n\n unsafe { instance.destroy_instance(None) };\n\n}\n\n\n", "file_path": "ivy-vulkan/src/instance.rs", "rank": 48, "score": 143960.87491347129 }, { "content": "pub fn create() -> Result<Entry> {\n\n unsafe { Entry::load().map_err(|_| Error::LibLoading) }\n\n}\n", "file_path": "ivy-vulkan/src/entry.rs", "rank": 49, "score": 143960.87491347129 }, { "content": "fn calculate_mip_levels(extent: Extent) -> u32 {\n\n (extent.width.max(extent.height) as f32).log2().floor() as u32 + 1\n\n}\n\n\n", "file_path": "ivy-vulkan/src/texture.rs", "rank": 50, "score": 143081.86025208852 }, { "content": "#[inline]\n\npub fn support<T: CollisionPrimitive>(\n\n transform: &Mat4,\n\n transform_inv: &Mat4,\n\n coll: &T,\n\n dir: Vec3,\n\n) -> Position {\n\n transform\n\n .transform_point3(coll.support(transform_inv.transform_vector3(dir).normalize()))\n\n .into()\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 51, "score": 142283.58562666556 }, { "content": "pub trait RendererKey: std::hash::Hash + std::cmp::Eq + Copy {}\n\n\n\nimpl<T> RendererKey for T where T: std::hash::Hash + std::cmp::Eq + Copy {}\n\n\n", "file_path": "ivy-graphics/src/base_renderer/mod.rs", "rank": 52, "score": 141745.34398098727 }, { "content": "/// Updates the view matrix from camera [ `Position` ] and optional [ `Rotation` ]\n\npub fn update_view_matrices(world: &World) {\n\n world\n\n .query::<(&mut Camera, &Position, Option<&Rotation>)>()\n\n .into_iter()\n\n .for_each(|(_, (camera, position, rotation))| {\n\n let view = match rotation {\n\n Some(rotation) => (Mat4::from_translation(**position)\n\n * rotation.into_matrix()\n\n * Mat4::from_rotation_y(DEG_180))\n\n .inverse(),\n\n\n\n None => Mat4::from_translation(-**position) * Mat4::from_rotation_y(DEG_180),\n\n };\n\n\n\n camera.set_view(view);\n\n })\n\n}\n", "file_path": "ivy-graphics/src/systems.rs", "rank": 53, "score": 140667.1163924266 }, { "content": "/// Updates all UI trees and applies constraints.\n\n/// Also updates canvas cameras.\n\npub fn update(world: &World) -> Result<()> {\n\n world.roots::<Widget>()?.iter().try_for_each(|(root, _)| {\n\n apply_constraints(\n\n world,\n\n root,\n\n Position2D::default(),\n\n Size2D::new(1.0, 1.0),\n\n true,\n\n )?;\n\n\n\n if world.get::<Canvas>(root).is_ok() {\n\n update_canvas(world, root)?;\n\n }\n\n\n\n update_from(world, root, 1)\n\n })\n\n}\n\n\n\npub(crate) fn update_from(world: &impl GenericWorld, parent: Entity, depth: u32) -> Result<()> {\n\n let mut query =\n", "file_path": "ivy-ui/src/systems.rs", "rank": 54, "score": 138742.97272512165 }, { "content": "/// Describes a type which can send events. Implemented for mpsc::channel and crossbeam channel.\n\npub trait EventSender<T>: 'static + Send {\n\n /// Send an event. Returns true if receiver is still alive.\n\n fn send(&self, event: T) -> bool;\n\n}\n\n\n\nimpl<T: Event> EventSender<T> for mpsc::Sender<T> {\n\n fn send(&self, event: T) -> bool {\n\n self.send(event).is_ok()\n\n }\n\n}\n\n\n\n#[cfg(feature = \"crossbeam-channel\")]\n\nimpl<T: Event> EventSender<T> for crossbeam_channel::Sender<T> {\n\n fn send(&self, event: T) -> bool {\n\n self.send(event).is_ok()\n\n }\n\n}\n\n\n\nimpl<T: Event> EventSender<T> for flume::Sender<T> {\n\n fn send(&self, event: T) -> bool {\n\n self.send(event).is_ok()\n\n }\n\n}\n\n\n", "file_path": "ivy-base/src/events/dispatcher.rs", "rank": 55, "score": 137895.0033766065 }, { "content": "// Returns the currently enabled instance layers\n\npub fn get_layers() -> &'static [&'static str] {\n\n if ENABLE_VALIDATION_LAYERS {\n\n VALIDATION_LAYERS\n\n } else {\n\n &[]\n\n }\n\n}\n\n\n", "file_path": "ivy-vulkan/src/instance.rs", "rank": 56, "score": 137131.74438317528 }, { "content": "pub fn wait_idle(device: &Device) -> Result<()> {\n\n // log::debug!(\"Device wait idle\");\n\n unsafe { device.device_wait_idle()? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 57, "score": 137126.50349088272 }, { "content": "pub fn max_axis(val: Vec3) -> Vec3 {\n\n if val.x > val.y {\n\n if val.x > val.z {\n\n Vec3::new(1.0, 0.0, 0.0)\n\n } else {\n\n Vec3::new(0.0, 0.0, 1.0)\n\n }\n\n } else if val.y > val.z {\n\n Vec3::new(0.0, 1.0, 0.0)\n\n } else {\n\n Vec3::new(0.0, 0.0, 1.0)\n\n }\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 58, "score": 137126.50349088272 }, { "content": "pub fn max_axis_abs(val: Vec3) -> Vec3 {\n\n if val.x.abs() > val.y.abs() {\n\n if val.x > val.z {\n\n Vec3::new(1.0, 0.0, 0.0)\n\n } else {\n\n Vec3::new(0.0, 0.0, 1.0)\n\n }\n\n } else if val.y.abs() > val.z.abs() {\n\n Vec3::new(0.0, 1.0, 0.0)\n\n } else {\n\n Vec3::new(0.0, 0.0, 1.0)\n\n }\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 59, "score": 135567.60513170686 }, { "content": "pub fn destroy(device: &Device, fence: Fence) {\n\n unsafe { device.destroy_fence(fence, None) }\n\n}\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 60, "score": 134028.33645561745 }, { "content": "pub fn resolve_collisions<I: Iterator<Item = Collision>>(\n\n world: SubWorld<(\n\n RbQuery,\n\n &Position,\n\n &mut Effector,\n\n HierarchyQuery<Connection>,\n\n &ConnectionKind,\n\n )>,\n\n mut collisions: I,\n\n dt: Read<DeltaTime>,\n\n _events: Read<Events>, // Wait for events\n\n) -> Result<()> {\n\n collisions.try_for_each(|coll| -> Result<()> {\n\n // Ignore triggers\n\n if coll.a.is_trigger || coll.b.is_trigger {\n\n return Ok(());\n\n }\n\n // Check for static collision\n\n else if coll.a.is_static {\n\n return resolve_static(\n", "file_path": "ivy-physics/src/systems.rs", "rank": 61, "score": 134028.33645561745 }, { "content": "pub fn epa<F: Fn(Vec3) -> SupportPoint>(support_func: F, simplex: Simplex) -> Contact {\n\n assert_eq!(simplex.points().len(), 4);\n\n let mut polytype = Polytype::new(\n\n simplex.points(),\n\n &[0, 1, 2, 0, 3, 1, 0, 2, 3, 1, 3, 2],\n\n Face::new,\n\n );\n\n\n\n let mut iterations = 0;\n\n loop {\n\n let (_, min) = match polytype.find_closest_face() {\n\n Some(val) => val,\n\n None => {\n\n // eprintln!(\"The two shapes are the same\");\n\n let p = support_func(Vec3::X);\n\n return Contact {\n\n points: ContactPoints::double(p.a, p.b),\n\n depth: p.support.length(),\n\n normal: p.support.normalize(),\n\n };\n", "file_path": "ivy-collision/src/epa/epa3d.rs", "rank": 62, "score": 132668.0307881278 }, { "content": "#[derive(Debug, Clone)]\n\nstruct DebugReport {\n\n framerate: f32,\n\n min_frametime: Duration,\n\n avg_frametime: Duration,\n\n max_frametime: Duration,\n\n elapsed: Duration,\n\n position: Position,\n\n}\n\n\n\nimpl Default for DebugReport {\n\n fn default() -> Self {\n\n Self {\n\n framerate: 0.0,\n\n min_frametime: Duration::from_secs(u64::MAX),\n\n avg_frametime: Duration::from_secs(0),\n\n max_frametime: Duration::from_secs(u64::MIN),\n\n elapsed: Duration::from_secs(0),\n\n position: Default::default(),\n\n }\n\n }\n", "file_path": "examples/vulkan/main.rs", "rank": 63, "score": 132618.76436969306 }, { "content": "struct DebugLayer {\n\n elapsed: Clock,\n\n last_status: Clock,\n\n frequency: Duration,\n\n\n\n min: Duration,\n\n max: Duration,\n\n\n\n framecount: usize,\n\n}\n\n\n\nimpl DebugLayer {\n\n fn new(\n\n _world: &mut World,\n\n _resources: &Resources,\n\n _events: &mut Events,\n\n frequency: Duration,\n\n ) -> anyhow::Result<Self> {\n\n log::debug!(\"Created debug layer\");\n\n Ok(Self {\n", "file_path": "examples/vulkan/main.rs", "rank": 64, "score": 132613.48481936118 }, { "content": "/// Performs a gjk intersection test.\n\n/// Returns true if the shapes intersect.\n\npub fn gjk<A: CollisionPrimitive, B: CollisionPrimitive>(\n\n a_transform: &Mat4,\n\n b_transform: &Mat4,\n\n a_transform_inv: &Mat4,\n\n b_transform_inv: &Mat4,\n\n a_coll: &A,\n\n b_coll: &B,\n\n) -> (bool, Simplex) {\n\n // Get first support function in direction of separation\n\n // let dir = (a_pos - b_pos).normalized();\n\n let dir = Vec3::X;\n\n let a = minkowski_diff(\n\n a_transform,\n\n b_transform,\n\n a_transform_inv,\n\n b_transform_inv,\n\n a_coll,\n\n b_coll,\n\n dir,\n\n );\n", "file_path": "ivy-collision/src/gjk.rs", "rank": 65, "score": 132474.5460607707 }, { "content": "pub fn init() -> Result<Arc<RwLock<Glfw>>> {\n\n Ok(Arc::new(RwLock::new(glfw::init(glfw::FAIL_ON_ERRORS)?)))\n\n}\n\n\n\nimpl Window {\n\n pub fn new(\n\n glfw: Arc<RwLock<Glfw>>,\n\n info: WindowInfo,\n\n ) -> Result<(Window, Receiver<(f64, WindowEvent)>)> {\n\n let mut glfw_mut = glfw.write();\n\n glfw_mut.window_hint(WindowHint::ClientApi(ClientApiHint::NoApi));\n\n\n\n glfw_mut.window_hint(WindowHint::Resizable(info.resizable));\n\n\n\n let (mut window, events) = match info.mode {\n\n WindowMode::Windowed(extent) => glfw_mut\n\n .create_window(\n\n extent.width,\n\n extent.height,\n\n info.title.as_ref(),\n", "file_path": "ivy-window/src/lib.rs", "rank": 66, "score": 132469.43809644156 }, { "content": "pub fn intersect<A: CollisionPrimitive, B: CollisionPrimitive>(\n\n a_transform: &Mat4,\n\n b_transform: &Mat4,\n\n a: &A,\n\n b: &B,\n\n) -> Option<Contact> {\n\n let a_transform_inv = a_transform.inverse();\n\n let b_transform_inv = b_transform.inverse();\n\n\n\n let (intersect, simplex) = gjk(\n\n a_transform,\n\n b_transform,\n\n &a_transform_inv,\n\n &b_transform_inv,\n\n a,\n\n b,\n\n );\n\n\n\n if intersect {\n\n Some(epa(\n", "file_path": "ivy-collision/src/collision.rs", "rank": 67, "score": 132469.43809644156 }, { "content": "pub fn destroy(surface_loader: &Surface, surface: SurfaceKHR) {\n\n unsafe { surface_loader.destroy_surface(surface, None) };\n\n}\n", "file_path": "ivy-vulkan/src/surface.rs", "rank": 68, "score": 130965.08882575147 }, { "content": "#[inline]\n\npub fn minkowski_diff<A: CollisionPrimitive, B: CollisionPrimitive>(\n\n a_transform: &Mat4,\n\n b_transform: &Mat4,\n\n a_transform_inv: &Mat4,\n\n b_transform_inv: &Mat4,\n\n a_coll: &A,\n\n b_coll: &B,\n\n dir: Vec3,\n\n) -> SupportPoint {\n\n let a = support(a_transform, a_transform_inv, a_coll, dir);\n\n let b = support(b_transform, b_transform_inv, b_coll, -dir);\n\n\n\n SupportPoint {\n\n support: *a - *b,\n\n a,\n\n b,\n\n }\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 69, "score": 130965.08882575147 }, { "content": "struct DisplayDebugReport;\n\n\n", "file_path": "examples/vulkan/main.rs", "rank": 70, "score": 130716.20494373926 }, { "content": "/// Calculates the perpendicular velocity of a point rotating around origin.\n\npub fn point_vel(p: Position, w: AngularVelocity) -> Velocity {\n\n if w.length_squared() < std::f32::EPSILON {\n\n Velocity::default()\n\n } else {\n\n Velocity(-p.cross(*w))\n\n }\n\n}\n", "file_path": "ivy-physics/src/util.rs", "rank": 71, "score": 129735.65995479986 }, { "content": "pub fn destroy(device: &Device, semaphore: vk::Semaphore) {\n\n unsafe { device.destroy_semaphore(semaphore, None) }\n\n}\n", "file_path": "ivy-vulkan/src/semaphore.rs", "rank": 72, "score": 129735.65995479986 }, { "content": "pub fn reset(device: &Device, fences: &[Fence]) -> Result<()> {\n\n unsafe { device.reset_fences(fences)? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 73, "score": 129735.65995479986 }, { "content": "pub fn project_plane(a: Vec3, normal: Vec3) -> Vec3 {\n\n a - normal * a.dot(normal)\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 74, "score": 129735.65995479986 }, { "content": "pub fn create(device: &Device) -> Result<vk::Semaphore> {\n\n let create_info = vk::SemaphoreCreateInfo {\n\n s_type: vk::StructureType::SEMAPHORE_CREATE_INFO,\n\n p_next: std::ptr::null(),\n\n flags: vk::SemaphoreCreateFlags::default(),\n\n };\n\n\n\n let semaphore = unsafe { device.create_semaphore(&create_info, None)? };\n\n Ok(semaphore)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/semaphore.rs", "rank": 75, "score": 129735.65995479986 }, { "content": "/// Creates a pipeline layout from shader reflection.\n\npub fn reflect<S: AsRef<spirv_reflect::ShaderModule>>(\n\n context: &VulkanContext,\n\n modules: &[S],\n\n override_sets: &[DescriptorLayoutInfo],\n\n) -> Result<vk::PipelineLayout> {\n\n let mut sets: [DescriptorLayoutInfo; MAX_SETS] = Default::default();\n\n\n\n let mut push_constant_ranges: SmallVec<[PushConstantRange; MAX_PUSH_CONSTANTS]> =\n\n Default::default();\n\n\n\n for module in modules {\n\n let module = module.as_ref();\n\n\n\n let stage_flags = vk::ShaderStageFlags::from_raw(module.get_shader_stage().bits());\n\n let bindings = module\n\n .enumerate_descriptor_bindings(None)\n\n .map_err(|msg| Error::SpirvReflection(msg))?;\n\n\n\n for binding in bindings {\n\n sets[binding.set as usize].insert(descriptors::DescriptorSetBinding {\n", "file_path": "ivy-vulkan/src/pipeline/shader.rs", "rank": 76, "score": 129517.7005897061 }, { "content": "#[derive(Default, Debug, Clone, PartialEq)]\n\nstruct AnimationState {\n\n animation: Handle<Animation>,\n\n states: BTreeMap<ChannelIndex, Frame>,\n\n repeat: bool,\n\n time: f32,\n\n playing: bool,\n\n influence: f32,\n\n}\n\n\n\nimpl AnimationState {\n\n pub fn new(animation: Handle<Animation>, repeat: bool, influence: f32) -> Self {\n\n Self {\n\n animation,\n\n states: BTreeMap::new(),\n\n repeat,\n\n playing: true,\n\n time: 0.0,\n\n influence,\n\n }\n\n }\n", "file_path": "ivy-graphics/src/animation/animator.rs", "rank": 77, "score": 128918.67438024143 }, { "content": "// Transitions image layout from one layout to another using a pipeline barrier\n\nfn transition_layout(\n\n commandpool: &CommandPool,\n\n queue: vk::Queue,\n\n image: vk::Image,\n\n mip_levels: u32,\n\n old_layout: vk::ImageLayout,\n\n new_layout: vk::ImageLayout,\n\n) -> Result<()> {\n\n let (src_access_mask, dst_access_mask, src_stage_mask, dst_stage_mask) =\n\n match (old_layout, new_layout) {\n\n (vk::ImageLayout::UNDEFINED, vk::ImageLayout::TRANSFER_DST_OPTIMAL) => (\n\n vk::AccessFlags::default(),\n\n vk::AccessFlags::TRANSFER_WRITE,\n\n vk::PipelineStageFlags::TOP_OF_PIPE,\n\n vk::PipelineStageFlags::TRANSFER,\n\n ),\n\n\n\n (vk::ImageLayout::TRANSFER_DST_OPTIMAL, vk::ImageLayout::SHADER_READ_ONLY_OPTIMAL) => (\n\n vk::AccessFlags::TRANSFER_WRITE,\n\n vk::AccessFlags::SHADER_READ,\n", "file_path": "ivy-vulkan/src/texture.rs", "rank": 78, "score": 128540.04627034413 }, { "content": "/// Updates the canvas view and projection\n\npub fn update_canvas(world: &World, canvas: Entity) -> Result<()> {\n\n let mut camera_query = world.try_query_one::<(&mut Camera, &Size2D, &Position2D)>(canvas)?;\n\n\n\n let (camera, size, position) = camera_query.get()?;\n\n\n\n camera.set_orthographic(size.x * 2.0, size.y * 2.0, 0.0, 100.0);\n\n camera.set_view(Mat4::from_translation(-position.extend(0.0)));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-ui/src/systems.rs", "rank": 79, "score": 128231.31068410978 }, { "content": "pub fn create_loader(entry: &Entry, instance: &Instance) -> Surface {\n\n Surface::new(entry, instance)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/surface.rs", "rank": 80, "score": 128231.31068410978 }, { "content": "fn update_subtree(world: &impl GenericWorld, root: Entity) -> Result<()> {\n\n let mut query = world.try_query_one::<(TransformQuery, Option<RbQuery>)>(root)?;\n\n\n\n if let Ok((parent_trans, rb)) = query.get() {\n\n let parent_trans = parent_trans.into_owned();\n\n let mut parent_rb = rb.map(|val| RbBundle {\n\n vel: *val.vel,\n\n mass: *val.mass,\n\n ang_mass: *val.ang_mass,\n\n ang_vel: *val.ang_vel,\n\n resitution: *val.resitution,\n\n effector: Effector::new(),\n\n });\n\n\n\n drop(query);\n\n\n\n world\n\n .children::<Connection>(root)\n\n .try_for_each(|child| -> Result<_> {\n\n let mut fixed = world\n", "file_path": "ivy-physics/src/connections/systems.rs", "rank": 81, "score": 127427.2265414035 }, { "content": "/// Returns an optional intersection between a triangle and a ray\n\n/// Assumes the points are relative to the ray origin\n\npub fn check_triangle_intersect(points: &[Vec3], dir: Vec3) -> bool {\n\n let [a, b, c] = [points[0], points[1], points[2]];\n\n\n\n let ab = b - a;\n\n let ac = c - a;\n\n let a0 = -a;\n\n\n\n let ab = project_plane(ab, dir);\n\n let ac = project_plane(ac, dir);\n\n let a0 = project_plane(a0, dir);\n\n\n\n let perp = triple_prod(ac, ab, ab);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return false;\n\n }\n\n let perp = triple_prod(ab, ac, ac);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return false;\n\n }\n\n\n\n true\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 82, "score": 126783.71958564845 }, { "content": "pub fn create(device: &Device, signaled: bool) -> Result<Fence> {\n\n let create_info = vk::FenceCreateInfo {\n\n s_type: vk::StructureType::FENCE_CREATE_INFO,\n\n p_next: std::ptr::null(),\n\n flags: if signaled {\n\n vk::FenceCreateFlags::SIGNALED\n\n } else {\n\n vk::FenceCreateFlags::default()\n\n },\n\n };\n\n\n\n let fence = unsafe { device.create_fence(&create_info, None)? };\n\n Ok(fence)\n\n}\n\n\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 83, "score": 125801.20590221451 }, { "content": "fn nearest_power_2(val: usize) -> usize {\n\n let mut result = 1;\n\n while result < val {\n\n result *= 2;\n\n }\n\n result\n\n}\n\n\n", "file_path": "ivy-ui/src/text_renderer.rs", "rank": 84, "score": 125477.13595021484 }, { "content": "/// Gets the normal of a direction vector with a reference point. Normal will\n\n/// face the same direciton as reference\n\npub fn triple_prod(a: Vec3, b: Vec3, c: Vec3) -> Vec3 {\n\n a.cross(b).cross(c).normalize()\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 85, "score": 125131.18725448531 }, { "content": "/// Returns an optional intersection between a triangle and a ray\n\npub fn triangle_ray(points: &[Vec3], ray: &Ray) -> Option<Vec3> {\n\n let [a, b, c] = [points[0], points[1], points[2]];\n\n\n\n let ab = b - a;\n\n let ac = c - a;\n\n let a0 = -a;\n\n\n\n let ab = project_plane(ab, ray.dir());\n\n let ac = project_plane(ac, ray.dir());\n\n let a0 = project_plane(a0, ray.dir());\n\n\n\n let perp = triple_prod(ac, ab, ab);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n let perp = triple_prod(ab, ac, ac);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n\n\n let normal = (b - a).cross(c - a).normalize();\n\n Some(plane_ray(a, normal, ray))\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 86, "score": 124353.83266650133 }, { "content": "/// Returns an optional intersection between a triangle and a ray\n\n/// Assumes the points are relative to the ray origin\n\npub fn triangle_intersect(points: &[Vec3], dir: Vec3) -> Option<Vec3> {\n\n let [a, b, c] = [points[0], points[1], points[2]];\n\n\n\n let ab = b - a;\n\n let ac = c - a;\n\n let a0 = -a;\n\n\n\n let ab = project_plane(ab, dir);\n\n let ac = project_plane(ac, dir);\n\n let a0 = project_plane(a0, dir);\n\n\n\n let perp = triple_prod(ac, ab, ab);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n let perp = triple_prod(ab, ac, ac);\n\n\n\n if perp.dot(a0) > 0.0 {\n\n return None;\n\n }\n\n\n\n let normal = (b - a).cross(c - a).normalize();\n\n Some(plane_intersect(a, normal, dir))\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 87, "score": 124353.6148037532 }, { "content": "/// Installs PBR rendering for the specified camera. Returns a list of nodes suitable for\n\n/// rendergraph insertions. Configures gpu camera data, light management and\n\n/// environment manager and attaches them to the camera.\n\npub fn create_pbr_pipeline<GeometryPass, PostProcessingPass, EnvData, R>(\n\n context: SharedVulkanContext,\n\n world: &mut World,\n\n resources: &Resources,\n\n camera: Entity,\n\n renderer: R,\n\n extent: Extent,\n\n frames_in_flight: usize,\n\n read_attachments: &[Handle<Texture>],\n\n color_attachments: &[AttachmentInfo],\n\n bindables: &[&dyn MultiDescriptorBindable],\n\n info: PBRInfo<EnvData>,\n\n) -> ivy_rendergraph::Result<[Box<dyn Node>; 2]>\n\nwhere\n\n GeometryPass: ShaderPass,\n\n PostProcessingPass: ShaderPass,\n\n R: Renderer + Storage,\n\n R::Error: Storage + Into<anyhow::Error>,\n\n EnvData: Copy + Component,\n\n{\n", "file_path": "ivy-postprocessing/src/pbr/mod.rs", "rank": 88, "score": 124174.03098769975 }, { "content": "pub fn new_event_dispatcher<T: Event>() -> Box<dyn AnyEventDispatcher> {\n\n let dispatcher: EventDispatcher<T> = EventDispatcher::new();\n\n Box::new(dispatcher)\n\n}\n\n\n\npub struct ConcreteSender<T> {\n\n inner: Mutex<Box<dyn EventSender<T>>>,\n\n}\n\n\n\nimpl<T> ConcreteSender<T> {\n\n pub fn new<S: EventSender<T>>(sender: S) -> Self {\n\n Self {\n\n inner: Mutex::new(Box::new(sender)),\n\n }\n\n }\n\n}\n\n\n\nimpl<T: Event> EventSender<T> for ConcreteSender<T> {\n\n fn send(&self, event: T) -> bool {\n\n self.inner.lock().send(event)\n\n }\n\n}\n\n\n\nimpl<T: Event> AnyEventSender for ConcreteSender<T> {}\n", "file_path": "ivy-base/src/events/dispatcher.rs", "rank": 89, "score": 124018.35349757626 }, { "content": "fn setup_graphics(world: &mut World, resources: &Resources) -> anyhow::Result<Assets> {\n\n let pbr = presets::PBRRendering::setup(\n\n world,\n\n resources,\n\n PBRInfo {\n\n max_lights: 5,\n\n env_data: DefaultEnvData {\n\n ambient_radiance: Vec3::ONE * 0.01,\n\n fog_density: 0.05,\n\n fog_color: Vec3::new(0.0, 0.0, 0.0),\n\n fog_gradient: 2.0,\n\n },\n\n },\n\n FRAMES_IN_FLIGHT,\n\n )?;\n\n\n\n pbr.setup_pipelines(resources, presets::PipelinesInfo::default())?;\n\n\n\n Ok(Assets {\n\n geometry_pass: resources.default()?,\n\n text_pass: resources.default()?,\n\n ui_pass: resources.default()?,\n\n })\n\n}\n\n\n", "file_path": "examples/vulkan/main.rs", "rank": 90, "score": 123954.02209417563 }, { "content": "/// Returns the first widget that intersects the postiion\n\nfn intersect_widget(world: &impl GenericWorld, point: Position2D) -> Option<Entity> {\n\n world\n\n .try_query::<(&Position2D, &Size2D, &WidgetDepth, &Visible)>()\n\n .unwrap()\n\n .with::<Interactive>()\n\n .iter()\n\n .filter_map(|(e, (pos, size, depth, visible))| {\n\n if visible.is_visible() && box_intersection(*pos, *size, *point) {\n\n Some((e, depth))\n\n } else {\n\n None\n\n }\n\n })\n\n .max_by_key(|(_, depth)| *depth)\n\n .map(|(a, _)| a)\n\n}\n\n\n", "file_path": "ivy-ui/src/systems.rs", "rank": 91, "score": 123547.3358654755 }, { "content": "pub fn queue_wait_idle(device: &Device, queue: vk::Queue) -> Result<()> {\n\n // log::debug!(\"Queue wait idle\");\n\n unsafe { device.queue_wait_idle(queue)? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/device.rs", "rank": 92, "score": 122945.08911662566 }, { "content": "pub fn plane_ray(p: Vec3, normal: Vec3, ray: &Ray) -> Vec3 {\n\n plane_intersect(p - *ray.origin, normal, ray.dir()) + *ray.origin\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 93, "score": 122174.22385578947 }, { "content": "pub fn edge_intersect(p: Vec3, tangent: Vec3, ray: &Ray) -> Vec3 {\n\n // Path of the edge point in in tangent plane\n\n let projected = project_plane(p, tangent);\n\n\n\n ray.dir() * (p.length() / (projected.dot(ray.dir())))\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 94, "score": 122174.22385578947 }, { "content": "pub fn plane_intersect(p: Vec3, normal: Vec3, dir: Vec3) -> Vec3 {\n\n let rel = p;\n\n let along = -rel.dot(normal);\n\n let t = -dir.dot(normal);\n\n\n\n along * (dir / t)\n\n}\n\n\n", "file_path": "ivy-collision/src/util.rs", "rank": 95, "score": 122174.22385578947 }, { "content": "pub fn wait(device: &Device, fences: &[Fence], wait_all: bool) -> Result<()> {\n\n unsafe { device.wait_for_fences(fences, wait_all, std::u64::MAX)? }\n\n Ok(())\n\n}\n\n\n", "file_path": "ivy-vulkan/src/fence.rs", "rank": 96, "score": 122174.22385578947 }, { "content": "fn setup_ui(world: &mut World, resources: &Resources, assets: &Assets) -> anyhow::Result<()> {\n\n let canvas = world\n\n .query::<&Canvas>()\n\n .iter()\n\n .next()\n\n .ok_or(anyhow!(\"Missing canvas\"))?\n\n .0;\n\n\n\n let heart: Handle<Image> = resources.load(ImageInfo {\n\n texture: \"./res/textures/heart.png\".into(),\n\n sampler: SamplerInfo::pixelated(),\n\n })??;\n\n\n\n let input_field: Handle<Image> = resources.load(ImageInfo {\n\n texture: \"./res/textures/field.png\".into(),\n\n sampler: SamplerInfo::pixelated(),\n\n })??;\n\n\n\n let font: Handle<Font> = resources.load(FontInfo {\n\n size: 48.0,\n", "file_path": "examples/vulkan/main.rs", "rank": 97, "score": 120517.35702168639 }, { "content": "fn remove_or_add_edge<T: Array<Item = Edge>>(edges: &mut SmallVec<T>, edge: Edge) {\n\n if let Some((index, _)) = edges.iter().enumerate().find(|(_, val)| {\n\n // assert_ne!(**val, edge);\n\n (val.0, val.1) == (edge.1, edge.0)\n\n }) {\n\n edges.remove(index);\n\n } else {\n\n edges.push(edge);\n\n }\n\n}\n", "file_path": "ivy-collision/src/epa/polytype.rs", "rank": 98, "score": 112292.86119187322 }, { "content": "struct InfoCache<I, T>(HashMap<I, Handle<T>>);\n", "file_path": "ivy-resources/src/manager.rs", "rank": 99, "score": 109713.0256676956 } ]
Rust
third-party/fs-mistrust/src/imp.rs
capyloon/api-daemon
ab4e4b60aa9bb617734c64655c0b8940fff098bc
use std::{ fs::{FileType, Metadata}, path::Path, }; #[cfg(target_family = "unix")] use std::os::unix::prelude::MetadataExt; use crate::{ walk::{PathType, ResolvePath}, Error, Result, Type, }; #[cfg(target_family = "unix")] pub(crate) const STICKY_BIT: u32 = 0o1000; fn boxed<'a, I: Iterator<Item = Error> + 'a>(iter: I) -> Box<dyn Iterator<Item = Error> + 'a> { Box::new(iter) } impl<'a> super::Verifier<'a> { pub(crate) fn check_errors(&self, path: &Path) -> impl Iterator<Item = Error> + '_ { if self.mistrust.dangerously_trust_everyone { let meta = match path.metadata() { Ok(meta) => meta, Err(e) => return boxed(vec![Error::inspecting(e, path)].into_iter()), }; let mut errors = Vec::new(); self.check_type(path, PathType::Final, &meta, &mut errors); return boxed(errors.into_iter()); } let rp = match ResolvePath::new(path) { Ok(rp) => rp, Err(e) => return boxed(vec![e].into_iter()), }; let should_retain = move |r: &Result<_>| match (r, &self.mistrust.ignore_prefix) { (Ok((p, _, _)), Some(ignore_prefix)) => !ignore_prefix.starts_with(p), (_, _) => true, }; boxed( rp.filter(should_retain) .flat_map(move |r| match r { Ok((path, path_type, metadata)) => { self.check_one(path.as_path(), path_type, &metadata) } Err(e) => vec![e], }), ) } #[cfg(feature = "walkdir")] pub(crate) fn check_content_errors(&self, path: &Path) -> impl Iterator<Item = Error> + '_ { use std::sync::Arc; if !self.check_contents || self.mistrust.dangerously_trust_everyone { return boxed(std::iter::empty()); } boxed( walkdir::WalkDir::new(path) .follow_links(false) .min_depth(1) .into_iter() .flat_map(move |ent| match ent { Err(err) => vec![Error::Listing(Arc::new(err))], Ok(ent) => match ent.metadata() { Ok(meta) => self .check_one(ent.path(), PathType::Content, &meta) .into_iter() .map(|e| Error::Content(Box::new(e))) .collect(), Err(err) => vec![Error::Listing(Arc::new(err))], }, }), ) } #[cfg(not(feature = "walkdir"))] pub(crate) fn check_content_errors(&self, _path: &Path) -> impl Iterator<Item = Error> + '_ { std::iter::empty() } #[must_use] pub(crate) fn check_one( &self, path: &Path, path_type: PathType, meta: &Metadata, ) -> Vec<Error> { let mut errors = Vec::new(); self.check_type(path, path_type, meta, &mut errors); #[cfg(target_family = "unix")] self.check_permissions(path, path_type, meta, &mut errors); errors } fn check_type( &self, path: &Path, path_type: PathType, meta: &Metadata, errors: &mut Vec<Error>, ) { let want_type = match path_type { PathType::Symlink => { return; } PathType::Intermediate => Type::Dir, PathType::Final => self.enforce_type, PathType::Content => Type::DirOrFile, }; if !want_type.matches(meta.file_type()) { errors.push(Error::BadType(path.into())); } } #[cfg(target_family = "unix")] fn check_permissions( &self, path: &Path, path_type: PathType, meta: &Metadata, errors: &mut Vec<Error>, ) { let uid = meta.uid(); if uid != 0 && Some(uid) != self.mistrust.trust_user { errors.push(Error::BadOwner(path.into(), uid)); } if path_type == PathType::Symlink { return; } let mut forbidden_bits = if !self.readable_okay && path_type == PathType::Final { 0o077 } else { if meta.is_dir() && meta.mode() & STICKY_BIT != 0 && path_type == PathType::Intermediate { 0o000 } else { 0o022 } }; if self.mistrust.trust_group == Some(meta.gid()) { forbidden_bits &= !0o070; } let bad_bits = meta.mode() & forbidden_bits; if bad_bits != 0 { errors.push(Error::BadPermission( path.into(), meta.mode() & 0o777, bad_bits, )); } } } impl super::Type { fn matches(&self, have_type: FileType) -> bool { match self { Type::Dir => have_type.is_dir(), Type::File => have_type.is_file(), Type::DirOrFile => have_type.is_dir() || have_type.is_file(), Type::Anything => true, } } }
use std::{ fs::{FileType, Metadata}, path::Path, }; #[cfg(target_family = "unix")] use std::os::unix::prelude::MetadataExt; use crate::{ walk::{PathType, ResolvePath}, Error, Result, Type, }; #[cfg(target_family = "unix")] pub(crate) const STICKY_BIT: u32 = 0o1000; fn boxed<'a, I: Iterator<Item = Error> + 'a>(iter: I) -> Box<dyn Iterator<Item = Error> + 'a> { Box::new(iter) } impl<'a> super::Verifier<'a> { pub(crate) fn check_errors(&self, path: &Path) -> impl Iterator<Item = Error> + '_ { if self.mistrust.dangerously_trust_everyone { let meta = match path.metadata() { Ok(meta) => meta, Err(e) => return boxed(vec![Error::inspecting(e, path)].into_iter()), }; let mut errors = Vec::new(); self.check_type(path, PathType::Final, &meta, &mut errors); return boxed(errors.into_iter()); } let rp = match ResolvePath::new(path) { Ok(rp) => rp, Err(e) => return boxed(vec![e].into_iter()), }; let should_retain = move |r: &Result<_>| match (r, &self.mistrust.ignore_prefix) { (Ok((p, _, _)), Some(ignore_prefix)) => !ignore_prefix.starts_with(p), (_, _) => true, }; boxed( rp.filter(should_retain) .flat_map(move |r| match r { Ok((path, path_type, metadata)) => { self.check_one(path.as_path(), path_type, &metadata) } Err(e) => vec![e], }), ) } #[cfg(feature = "walkdir")] pub(crate) fn check_content_errors(&self, path: &Path) -> impl Iterator<Item = Error> + '_ { use std::sync::Arc; if !self.check_contents || self.mistrust.dangerously_trust_everyone { return boxed(std::iter::empty()); } boxed( walkdir::WalkDir::new(path) .follow_links(false) .min_depth(1) .into_iter() .flat_map(move |ent| match ent { Err(err) => vec![Error::Listing(Arc::new(err))], Ok(ent) => match ent.metadata() { Ok(meta) => self .check_one(ent.path(), PathType::Content, &meta) .into_iter() .map(|e| Error::Content(Box::new(e))) .collect(), Err(err) => vec![Error::Listing(Arc::new(err))], }, }), ) } #[cfg(not(feature = "walkdir"))] pub(crate) fn check_content_errors(&self, _path: &Path) -> impl Iterator<Item = Error> + '_ { std::iter::empty() } #[must_use] pub(crate) fn check_one( &self, path: &Path,
fg(target_family = "unix")] self.check_permissions(path, path_type, meta, &mut errors); errors } fn check_type( &self, path: &Path, path_type: PathType, meta: &Metadata, errors: &mut Vec<Error>, ) { let want_type = match path_type { PathType::Symlink => { return; } PathType::Intermediate => Type::Dir, PathType::Final => self.enforce_type, PathType::Content => Type::DirOrFile, }; if !want_type.matches(meta.file_type()) { errors.push(Error::BadType(path.into())); } } #[cfg(target_family = "unix")] fn check_permissions( &self, path: &Path, path_type: PathType, meta: &Metadata, errors: &mut Vec<Error>, ) { let uid = meta.uid(); if uid != 0 && Some(uid) != self.mistrust.trust_user { errors.push(Error::BadOwner(path.into(), uid)); } if path_type == PathType::Symlink { return; } let mut forbidden_bits = if !self.readable_okay && path_type == PathType::Final { 0o077 } else { if meta.is_dir() && meta.mode() & STICKY_BIT != 0 && path_type == PathType::Intermediate { 0o000 } else { 0o022 } }; if self.mistrust.trust_group == Some(meta.gid()) { forbidden_bits &= !0o070; } let bad_bits = meta.mode() & forbidden_bits; if bad_bits != 0 { errors.push(Error::BadPermission( path.into(), meta.mode() & 0o777, bad_bits, )); } } } impl super::Type { fn matches(&self, have_type: FileType) -> bool { match self { Type::Dir => have_type.is_dir(), Type::File => have_type.is_file(), Type::DirOrFile => have_type.is_dir() || have_type.is_file(), Type::Anything => true, } } }
path_type: PathType, meta: &Metadata, ) -> Vec<Error> { let mut errors = Vec::new(); self.check_type(path, path_type, meta, &mut errors); #[c
function_block-random_span
[]
Rust
x86asm/src/encode/encoding.rs
project-ela/ela
b59cae869ca4258954583a87725b090a586601c1
use crate::{ common::{modrm::ModRM, rex::Rex, sib::Sib}, instruction::operand::{ immediate::Immediate, memory::{Displacement, Memory}, offset::Offset, register::{self, Register}, }, }; use super::inst::EncodedInst; pub enum RM<'a> { Register(&'a Register), Memory(&'a Memory), } pub fn encode_m(opcode: &[u8], opr1: RM) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr1), None); enc.modrm = Some(encode_modrm(&opr1)); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc } pub fn encode_o(opcode: u8, opr1: &Register) -> EncodedInst { let mut enc = EncodedInst::new(&[opcode + opr1.number()]); if opr1.only_in_64bit() { enc.rex = Some(Rex::new(false, false, false, opr1.only_in_64bit())); } enc } pub fn encode_i(opcode: &[u8], opr1: &Immediate) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.imm = Some(opr1.clone()); enc } pub fn encode_d(opcode: &[u8], opr1: &Offset) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.imm = match opr1 { Offset::Off8(value) => Some(Immediate::Imm8(*value)), Offset::Off32(value) => Some(Immediate::Imm32(*value)), }; enc } pub fn encode_mi(opcode: &[u8], opr1: RM, opr2: &Immediate) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr1), None); enc.modrm = Some(encode_modrm(&opr1)); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc.imm = Some(opr2.clone()); enc } pub fn encode_mr(opcode: &[u8], opr1: RM, opr2: &Register) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr1), Some(opr2)); enc.modrm = Some({ let mut modrm = encode_modrm(&opr1); modrm.reg = opr2.number(); modrm }); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc } pub fn encode_rm(opcode: &[u8], opr1: &Register, opr2: RM) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr2), Some(opr1)); enc.modrm = Some({ let mut modrm = encode_modrm(&opr2); modrm.reg = opr1.number(); modrm }); enc.sib = encode_sib(&opr2); enc.disp = encode_disp(&opr2); enc } pub fn encode_rmi(opcode: &[u8], opr1: &Register, opr2: RM, opr3: &Immediate) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr2), Some(opr1)); enc.modrm = Some({ let mut modrm = encode_modrm(&opr2); modrm.reg = opr1.number(); modrm }); enc.sib = encode_sib(&opr2); enc.disp = encode_disp(&opr2); enc.imm = Some(opr3.clone()); enc } pub fn encode_set(opcode: &[u8], opr1: RM) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = match opr1 { RM::Register(reg) => { if reg.only_in_64bit() { Some(Rex::new(false, false, false, reg.only_in_64bit())) } else { None } } RM::Memory(_) => encode_rex(Some(&opr1), None), }; enc.modrm = Some(encode_modrm(&opr1)); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc } fn encode_rex(rm: Option<&RM>, reg: Option<&Register>) -> Option<Rex> { let reg_rm = match rm { Some(RM::Register(reg)) => reg, Some(RM::Memory(Memory { base: Some(base), .. })) => base, _ => &Register::Al, }; let reg_reg = reg.unwrap_or(&Register::Al); if reg_rm.size() != register::Size::QWord && reg_reg.size() != register::Size::QWord && !reg_rm.only_in_64bit() && !reg_reg.only_in_64bit() { return None; } Some(Rex::new( true, reg_reg.only_in_64bit(), false, reg_rm.only_in_64bit(), )) } fn encode_modrm(rm: &RM) -> ModRM { match rm { RM::Memory(mem) => match &mem.base { Some(base) => match &mem.disp { None => match base { Register::R12 => ModRM::new(0b00, 0, 0b100), Register::R13 => ModRM::new(0b01, 0, 0b101), _ => ModRM::new(0b00, 0, base.number()), }, Some(Displacement::Disp8(_)) => ModRM::new(0b01, 0, base.number()), Some(Displacement::Disp32(_)) => match base { Register::Rip => ModRM::new(0b00, 0, base.number()), _ => ModRM::new(0b10, 0, base.number()), }, }, None => match mem.disp { None => panic!(), Some(Displacement::Disp8(_)) => panic!(), Some(Displacement::Disp32(_)) => ModRM::new(0b00, 0, 0b100), }, }, RM::Register(reg) => ModRM::new(0b11, 0, reg.number()), } } fn encode_sib(rm: &RM) -> Option<Sib> { match rm { RM::Memory(Memory { base: Some(Register::R12), disp: None, }) => Some(Sib::new(0, 0b100, 0b100)), RM::Memory(Memory { base: None, disp: Some(disp), }) => match disp { Displacement::Disp8(_) => panic!(), Displacement::Disp32(_) => Some(Sib::new(0, 0b100, 0b101)), }, _ => None, } } fn encode_disp(rm: &RM) -> Option<Displacement> { match rm { RM::Memory(Memory { base: Some(Register::R13), disp: None, }) => Some(Displacement::Disp8(0)), RM::Memory(Memory { disp, .. }) => disp.clone(), _ => None, } }
use crate::{ common::{modrm::ModRM, rex::Rex, sib::Sib}, instruction::operand::{ immediate::Immediate, memory::{Displacement, Memory}, offset::Offset, register::{self, Register}, }, }; use super::inst::EncodedInst; pub enum RM<'a> { Register(&'a Register), Memory(&'a Memory), } pub fn encode_m(opcode: &[u8], opr1: RM) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr1), None); enc.modrm = Some(encode_modrm(&opr1)); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc } pub fn encode_o(opcode: u8, opr1: &Register) -> EncodedInst { let mut enc = EncodedInst::new(&[opcode + opr1.number()]); if opr1.only_in_64bit() { enc.rex = Some(Rex::new(false, false, false, opr1.only_in_64bit())); } enc } pub fn encode_i(opcode: &[u8], opr1: &Immediate) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.imm = Some(opr1.clone()); enc } pub fn encode_d(opcode: &[u8], opr1: &Offset) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.imm = match opr1 { Offset::Off8(value) => Some(Immediate::Imm8(*value)), Offset::Off32(value) => Some(Immediate::Imm32(*value)), }; enc } pub fn encode_mi(opcode: &[u8], opr1: RM, opr2: &Immediate) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr1), None); enc.modrm = Some(encode_modrm(&opr1)); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc.imm = Some(opr2.clone()); enc } pub fn encode_mr(opcode: &[u8], opr1: RM, opr2: &Register) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr1), Some(opr2)); enc.modrm = Some({ let mut modrm = encode_modrm(&opr1); modrm.reg = opr2.number(); modrm }); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc } pub fn encode_rm(opcode: &[u8], opr1: &Register, opr2: RM) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr2), Some(opr1)); enc.modrm = Some({ let mut modrm = encode_modrm(&opr2); modrm.reg = opr1.number(); modrm }); enc.sib = encode_sib(&opr2); enc.disp = encode_disp(&opr2); enc } pub fn encode_rmi(opcode: &[u8], opr1: &Register, opr2: RM, opr3: &Immediate) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = encode_rex(Some(&opr2), Some(opr1)); enc.modrm = Some({ let mut modrm = encode_modrm(&opr2); modrm.reg = opr1.number(); modrm }); enc.sib = encode_sib(&opr2); enc.disp = encode_disp(&opr2); enc.imm = Some(opr3.clone()); enc } pub fn encode_set(opcode: &[u8], opr1: RM) -> EncodedInst { let mut enc = EncodedInst::new(opcode); enc.rex = match opr1 { RM::Register(reg) => { if reg.only_in_64bit() { Some(Rex::new(false, false, false, reg.only_in_64bit())) } else { None } } RM::Memory(_) => encode_rex(Some(&opr1), None), }; enc.modrm = Some(encode_modrm(&opr1)); enc.sib = encode_sib(&opr1); enc.disp = encode_disp(&opr1); enc } fn encode_rex(rm: Option<&RM>, reg: Option<&Register>) -> Option<Rex> {
let reg_reg = reg.unwrap_or(&Register::Al); if reg_rm.size() != register::Size::QWord && reg_reg.size() != register::Size::QWord && !reg_rm.only_in_64bit() && !reg_reg.only_in_64bit() { return None; } Some(Rex::new( true, reg_reg.only_in_64bit(), false, reg_rm.only_in_64bit(), )) } fn encode_modrm(rm: &RM) -> ModRM { match rm { RM::Memory(mem) => match &mem.base { Some(base) => match &mem.disp { None => match base { Register::R12 => ModRM::new(0b00, 0, 0b100), Register::R13 => ModRM::new(0b01, 0, 0b101), _ => ModRM::new(0b00, 0, base.number()), }, Some(Displacement::Disp8(_)) => ModRM::new(0b01, 0, base.number()), Some(Displacement::Disp32(_)) => match base { Register::Rip => ModRM::new(0b00, 0, base.number()), _ => ModRM::new(0b10, 0, base.number()), }, }, None => match mem.disp { None => panic!(), Some(Displacement::Disp8(_)) => panic!(), Some(Displacement::Disp32(_)) => ModRM::new(0b00, 0, 0b100), }, }, RM::Register(reg) => ModRM::new(0b11, 0, reg.number()), } } fn encode_sib(rm: &RM) -> Option<Sib> { match rm { RM::Memory(Memory { base: Some(Register::R12), disp: None, }) => Some(Sib::new(0, 0b100, 0b100)), RM::Memory(Memory { base: None, disp: Some(disp), }) => match disp { Displacement::Disp8(_) => panic!(), Displacement::Disp32(_) => Some(Sib::new(0, 0b100, 0b101)), }, _ => None, } } fn encode_disp(rm: &RM) -> Option<Displacement> { match rm { RM::Memory(Memory { base: Some(Register::R13), disp: None, }) => Some(Displacement::Disp8(0)), RM::Memory(Memory { disp, .. }) => disp.clone(), _ => None, } }
let reg_rm = match rm { Some(RM::Register(reg)) => reg, Some(RM::Memory(Memory { base: Some(base), .. })) => base, _ => &Register::Al, };
assignment_statement
[ { "content": "fn add_padding(v: &mut Vec<u8>, offset: usize) {\n\n if offset < v.len() {\n\n return;\n\n }\n\n let padding_len = offset - v.len();\n\n v.extend(&vec![0; padding_len as usize]);\n\n}\n", "file_path": "elfen/src/elf.rs", "rank": 9, "score": 228514.40608285688 }, { "content": "pub fn allocate(assembly: &mut Assembly) {\n\n let mut allocator = DummyRegisterAllocator::new();\n\n allocator.allocate(assembly);\n\n}\n\n\n", "file_path": "siderow/src/arch/x86/regalloc.rs", "rank": 12, "score": 177702.81063233363 }, { "content": "pub fn apply(module: &mut Module) {\n\n ConstantFolding::new().apply(module);\n\n}\n\n\n", "file_path": "siderow/src/ssa/pass/cf.rs", "rank": 13, "score": 177702.81063233363 }, { "content": "pub fn apply(module: &mut Module) {\n\n DeadCodeElimination::new().apply(module);\n\n}\n\n\n", "file_path": "siderow/src/ssa/pass/dce.rs", "rank": 14, "score": 177702.81063233363 }, { "content": "pub fn decode(code: &[u8]) -> Vec<Instruction> {\n\n let mut decoder = Decoder::new(code);\n\n decoder.decode()\n\n}\n\n\n\nimpl Decoder {\n\n pub fn new(code: &[u8]) -> Self {\n\n Self {\n\n code: code.to_vec(),\n\n pos: 0,\n\n rex: None,\n\n }\n\n }\n\n\n\n pub fn decode(&mut self) -> Vec<Instruction> {\n\n let mut inst = Vec::new();\n\n\n\n while self.pos < self.code.len() {\n\n inst.push(self.decode_inst());\n\n }\n", "file_path": "x86asm/src/decode.rs", "rank": 15, "score": 175585.05680284026 }, { "content": "pub fn encode(inst: &Instruction) -> Vec<u8> {\n\n let typ = inst.mnenomic.typ();\n\n let enc = match typ {\n\n mnemonic::Type::Nullary => encode_nullary_op(inst),\n\n mnemonic::Type::Unary => encode_unary_op(inst),\n\n mnemonic::Type::Binary => encode_binary_op(inst),\n\n };\n\n\n\n enc.to_bytes()\n\n}\n\n\n", "file_path": "x86asm/src/encode.rs", "rank": 16, "score": 175585.05680284026 }, { "content": "fn reg_num(reg: &Register) -> usize {\n\n let extend = reg.only_in_64bit();\n\n reg.number() as usize + if extend { 8 } else { 0 }\n\n}\n", "file_path": "eir/src/emulator/cpu.rs", "rank": 17, "score": 170939.0410663258 }, { "content": "pub fn assemble(source: SourceFile) -> Result<Vec<u8>, Error> {\n\n lexer::tokenize(source)\n\n .and_then(parser::parse)\n\n .and_then(gen_code::generate)\n\n .and_then(gen_elf::generate)\n\n .map(|elf| elf.to_bytes())\n\n}\n\n\n", "file_path": "rota/src/assembler.rs", "rank": 18, "score": 161237.66921465262 }, { "content": "pub fn assemble_raw(source: SourceFile) -> Result<Vec<u8>, Error> {\n\n let obj = lexer::tokenize(source)\n\n .and_then(parser::parse)\n\n .and_then(gen_code::generate)?;\n\n\n\n let text_section = obj\n\n .sections\n\n .into_iter()\n\n .find(|section| section.name == SectionName::Text)\n\n .unwrap();\n\n\n\n Ok(text_section.data)\n\n}\n", "file_path": "rota/src/assembler.rs", "rank": 19, "score": 159062.95378398837 }, { "content": "pub fn translate(module: ast::Module, symtab: &mut SymbolTable) -> ssa::Module {\n\n SsaGen::new(symtab).translate(module)\n\n}\n\n\n", "file_path": "sigrun/src/middleend/ssagen.rs", "rank": 20, "score": 149397.85074516182 }, { "content": "pub fn register_size(typ: &ssa::Type) -> asm::RegisterSize {\n\n use asm::RegisterSize::*;\n\n use ssa::Type::*;\n\n\n\n match typ {\n\n I1 | I8 => Byte,\n\n I32 => QWord,\n\n\n\n Pointer(_) | Array(_, _) => QWord,\n\n Structure(_) => QWord, // TODO\n\n\n\n x => panic!(\"{:?}\", x),\n\n }\n\n}\n\n\n", "file_path": "siderow/src/arch/x86/instsel/layout.rs", "rank": 21, "score": 148453.30354453556 }, { "content": "pub fn resolve_symbol(symbols: &Symbols, code: &mut Code) -> Vec<Rela> {\n\n let mut relas = Vec::new();\n\n for unresolved_symbol in &code.unresolved_symbols {\n\n let symbol = symbols.get(&unresolved_symbol.symbol_name).unwrap();\n\n match unresolved_symbol.typ {\n\n SymbolType::Addr => {\n\n let item_index = unresolved_symbol.item_index + 1;\n\n let rela_offset = calc_offset(&code.items, 0, item_index) as u32 - 4;\n\n\n\n relas.push(Rela {\n\n name: symbol.name.to_string(),\n\n typ: RelaType::Pc32,\n\n offset: rela_offset,\n\n });\n\n }\n\n SymbolType::Jump => {\n\n let is_global = symbol.is_global || symbol.addr.is_none();\n\n\n\n let offset = if is_global {\n\n 0\n", "file_path": "rota/src/backend/gen_code/resolve_symbol.rs", "rank": 22, "score": 148329.71492275354 }, { "content": "pub fn assemble_to_file(\n\n input_file: String,\n\n output_file: String,\n\n) -> Result<(), Box<dyn error::Error>> {\n\n let source = SourceFile {\n\n filename: input_file.clone(),\n\n content: fs::read_to_string(input_file)?,\n\n };\n\n let output = assemble(source)?;\n\n fs::write(output_file, output)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rota/src/assembler.rs", "rank": 25, "score": 135106.32958998962 }, { "content": "pub fn member_offset_in_bits(typ: &ssa::Type, index: usize) -> usize {\n\n use ssa::Type::*;\n\n\n\n match typ {\n\n Pointer(elm_typ) => type_size_in_bits(elm_typ) * index,\n\n Array(elm_typ, _) => type_size_in_bits(elm_typ) * index,\n\n Structure(s) => {\n\n let mut total_offet = 0;\n\n for i in 0..index {\n\n let align = register_size(&s.members[i + 1]).size_in_bits();\n\n let member_size = type_size_in_bits(&s.members[i]);\n\n total_offet = align_to(total_offet + member_size, align);\n\n }\n\n total_offet\n\n }\n\n\n\n x => panic!(\"{:?}\", x),\n\n }\n\n}\n\n\n", "file_path": "siderow/src/arch/x86/instsel/layout.rs", "rank": 26, "score": 133602.45495261197 }, { "content": "pub fn assemble_raw_to_file(\n\n input_file: String,\n\n output_file: String,\n\n) -> Result<(), Box<dyn error::Error>> {\n\n let source = SourceFile {\n\n filename: input_file.clone(),\n\n content: fs::read_to_string(input_file)?,\n\n };\n\n let output = assemble_raw(source)?;\n\n fs::write(output_file, output)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "rota/src/assembler.rs", "rank": 27, "score": 133225.7778819027 }, { "content": "fn bytes_to_str(bytes: &[u8]) -> String {\n\n bytes\n\n .iter()\n\n .map(|byte| format!(\"{:02x}\", byte))\n\n .collect::<Vec<String>>()\n\n .join(\" \")\n\n}\n", "file_path": "rota/tests/code.rs", "rank": 28, "score": 123941.50532467914 }, { "content": "/// 1になっているビットが偶数個の場合にtrueを返す\n\nfn check_parity(value: u8) -> bool {\n\n value.count_ones() % 2 == 0\n\n}\n", "file_path": "eir/src/emulator/flags.rs", "rank": 29, "score": 122222.65161754945 }, { "content": "pub fn parse_arguments() -> Result<CompilerConfig, ()> {\n\n let mut config = CompilerConfig::default();\n\n let args: Vec<String> = env::args().skip(1).collect();\n\n for (i, arg) in args.iter().enumerate() {\n\n match arg.as_str() {\n\n \"--optimize\" => config.optimize = true,\n\n \"--tse\" => config.tse = true,\n\n \"--dump-token\" => config.dump_token = true,\n\n \"--dump-ast\" => config.dump_ast = true,\n\n \"--dump-ir\" => config.dump_ir = true,\n\n _ => {\n\n if args.len() != i + 2 {\n\n return Err(());\n\n }\n\n config.input_file = args[i].to_owned();\n\n config.output_file = args[i + 1].to_owned();\n\n return Ok(config);\n\n }\n\n }\n\n }\n\n Err(())\n\n}\n", "file_path": "sigrun/src/common/cli.rs", "rank": 30, "score": 119817.10327405567 }, { "content": "pub fn compile_to_file(config: CompilerConfig) -> Result<()> {\n\n let source = SourceFile {\n\n filename: config.input_file.to_owned(),\n\n content: fs::read_to_string(&config.input_file)?,\n\n };\n\n let output = compile(source, &config)?;\n\n fs::write(&config.output_file, output)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "sigrun/src/compiler.rs", "rank": 31, "score": 116557.7597079491 }, { "content": "pub fn apply(module: &Module) -> Result<()> {\n\n let mut pass = SemaCheck::new();\n\n pass.apply(module);\n\n match pass.issues.0.len() {\n\n 0 => Ok(()),\n\n _ => Err(pass.issues.into()),\n\n }\n\n}\n\n\n", "file_path": "sigrun/src/frontend/pass/sema_check.rs", "rank": 32, "score": 114988.23046911319 }, { "content": "fn encode_item(item: &CodeItem) -> Vec<u8> {\n\n match item {\n\n CodeItem::Raw(data) => data.clone(),\n\n CodeItem::Inst(inst) => encode::encode(inst),\n\n }\n\n}\n\n\n", "file_path": "rota/src/backend/gen_code.rs", "rank": 33, "score": 114197.92395625939 }, { "content": "pub fn parse(input: &str) -> ssa::Module {\n\n translate(ssa_parser::module(input).unwrap())\n\n}\n\n\n", "file_path": "siderow/src/ssa/parser.rs", "rank": 34, "score": 113726.394301717 }, { "content": "#[test]\n\nfn binary_rm() {\n\n do_test(Instruction::new_binary(\n\n Mnemonic::Add,\n\n Operand::Register(Register::Rax),\n\n Operand::Memory(Memory::new(Register::Rax, None)),\n\n ));\n\n do_test(Instruction::new_binary(\n\n Mnemonic::Add,\n\n Operand::Register(Register::Rax),\n\n Operand::Memory(Memory::new(Register::R8, None)),\n\n ));\n\n do_test(Instruction::new_binary(\n\n Mnemonic::Add,\n\n Operand::Register(Register::Rax),\n\n Operand::Memory(Memory::new(Register::R12, Some(Displacement::Disp8(0)))),\n\n ));\n\n do_test(Instruction::new_binary(\n\n Mnemonic::Add,\n\n Operand::Register(Register::Rax),\n\n Operand::Memory(Memory::new(Register::R13, Some(Displacement::Disp8(0)))),\n", "file_path": "x86asm/tests/lib.rs", "rank": 35, "score": 110544.50973380501 }, { "content": "fn trans_inst(i: Instruction, sm: &ssa::Module, ctx: &mut Context, fb: &mut ssa::FunctionBuilder) {\n\n match i {\n\n Instruction::O { op, src } => match op.as_str() {\n\n \"store\" => {\n\n let dst = trans_value(&src[0], ctx);\n\n let src = trans_value(&src[1], ctx);\n\n fb.store(dst, src);\n\n }\n\n \"ret\" => match src.len() {\n\n 0 => fb.ret_void(),\n\n 1 => {\n\n let v = trans_value(&src[0], ctx);\n\n fb.ret(v);\n\n }\n\n _ => panic!(),\n\n },\n\n \"br\" => match src.len() {\n\n 1 => {\n\n let dst = trans_label(&src[0], ctx);\n\n fb.br(dst);\n", "file_path": "siderow/src/ssa/parser.rs", "rank": 36, "score": 110319.18321308697 }, { "content": "pub fn parse(tokens: Vec<Token>) -> Result<Module> {\n\n let mut parser = Parser::new(tokens);\n\n parser.parse()\n\n}\n\n\n", "file_path": "sigrun/src/frontend/parser.rs", "rank": 37, "score": 109674.37570164047 }, { "content": "pub fn apply(module: &Module) -> Result<SymbolTable> {\n\n let mut pass = TypeCheck::new();\n\n let table = pass.apply(module);\n\n match pass.issues.0.len() {\n\n 0 => Ok(table),\n\n _ => Err(pass.issues.into()),\n\n }\n\n}\n\n\n", "file_path": "sigrun/src/frontend/pass/type_check.rs", "rank": 38, "score": 109210.83627897702 }, { "content": "pub fn tokenize(source: SourceFile) -> Result<Vec<Token>> {\n\n let mut tokenizer = Tokenizer::new(source);\n\n tokenizer.tokenize()\n\n}\n\n\n\nimpl Tokenizer {\n\n fn new(source: SourceFile) -> Tokenizer {\n\n let pos = Pos {\n\n filename: source.filename.to_owned(),\n\n line: 1,\n\n column: 1,\n\n };\n\n\n\n Tokenizer {\n\n source_index: 0,\n\n source,\n\n pos,\n\n }\n\n }\n\n\n", "file_path": "sigrun/src/frontend/lexer.rs", "rank": 39, "score": 108170.53617693015 }, { "content": "pub fn generate(obj: Object) -> Result<Elf, Error> {\n\n let elfgen = ElfGen::new(obj);\n\n elfgen.generate()\n\n}\n\n\n\nimpl ElfGen {\n\n fn new(obj: Object) -> Self {\n\n Self {\n\n elf: Elf::default(),\n\n obj,\n\n symbols: HashMap::new(),\n\n }\n\n }\n\n\n\n fn generate(mut self) -> Result<Elf, Error> {\n\n self.gen_header();\n\n self.gen_sections();\n\n self.elf.update_section_headers();\n\n self.elf.update_header();\n\n Ok(self.elf)\n", "file_path": "rota/src/backend/gen_elf.rs", "rank": 40, "score": 108170.53617693015 }, { "content": "pub fn translate(module: ssa::Module) -> asm::Assembly {\n\n let selector = InstructionSelector::new();\n\n selector.translate(module)\n\n}\n\n\n", "file_path": "siderow/src/arch/x86/instsel.rs", "rank": 41, "score": 108170.53617693015 }, { "content": "pub fn generate(program: Program) -> Result<Object, Error> {\n\n let collector = SymbolCollector::new();\n\n let (symbols, tses) = collector.collect_symbols(&program);\n\n\n\n let generator = CodeGen::new();\n\n let mut codes = generator.gen_program(program);\n\n\n\n Ok(Object {\n\n sections: gen_sections(&symbols, &mut codes),\n\n global_symbols: list_global_symbols(symbols, &codes),\n\n tses,\n\n })\n\n}\n\n\n", "file_path": "rota/src/backend/gen_code.rs", "rank": 42, "score": 108170.53617693015 }, { "content": "pub fn type_size_in_bits(typ: &ssa::Type) -> usize {\n\n use ssa::Type::*;\n\n\n\n match typ {\n\n Void => 0,\n\n I1 | I8 => 1,\n\n I32 => 8,\n\n\n\n Pointer(_) => 8,\n\n Array(elm_typ, len) => type_size_in_bits(elm_typ) * len,\n\n Structure(typ) => struct_size_in_bits(typ),\n\n }\n\n}\n\n\n", "file_path": "siderow/src/arch/x86/instsel/layout.rs", "rank": 43, "score": 107826.58238065944 }, { "content": "pub fn struct_size_in_bits(typ: &ssa::StructType) -> usize {\n\n let mut total_size: usize = 0;\n\n for member in &typ.members {\n\n let align = register_size(member).size_in_bits();\n\n let member_size = type_size_in_bits(member);\n\n total_size = align_to(total_size, align) + member_size;\n\n }\n\n total_size\n\n}\n\n\n", "file_path": "siderow/src/arch/x86/instsel/layout.rs", "rank": 44, "score": 106496.84153053063 }, { "content": "pub fn link(input_elfs: Vec<Elf>) -> Result<Elf, String> {\n\n let linker = Linker::new(input_elfs);\n\n let output_elf = linker.link()?;\n\n Ok(output_elf)\n\n}\n\n\n", "file_path": "herja/src/linker.rs", "rank": 45, "score": 105976.18311289561 }, { "content": "pub fn parse(tokens: Vec<Token>) -> Result<Program, Error> {\n\n let mut parser = Parser::new(tokens);\n\n parser.parse()\n\n}\n\n\n\nimpl Parser {\n\n fn new(tokens: Vec<Token>) -> Self {\n\n Self { pos: 0, tokens }\n\n }\n\n\n\n fn parse(&mut self) -> Result<Program, Error> {\n\n let mut insts = Vec::new();\n\n loop {\n\n if self.is_eof() {\n\n break;\n\n }\n\n\n\n if matches!(self.peek().kind, TokenKind::Comment(_)) {\n\n self.consume();\n\n continue;\n", "file_path": "rota/src/frontend/parser.rs", "rank": 46, "score": 105976.18311289561 }, { "content": "fn relocate_symbol(mut symbol: Symbol, codes: &Codes) -> Symbol {\n\n let code = codes.get(&symbol.section).unwrap();\n\n\n\n symbol.addr = symbol\n\n .addr\n\n .map(|addr| calc_offset(&code.items, 0, addr) as usize);\n\n\n\n symbol\n\n}\n\n\n", "file_path": "rota/src/backend/gen_code/resolve_symbol.rs", "rank": 47, "score": 104834.03996040451 }, { "content": "pub fn compile(source: SourceFile, config: &CompilerConfig) -> Result<String> {\n\n let tokens = frontend::lexer::tokenize(source)?;\n\n if config.dump_token {\n\n println!(\"{:?}\", tokens);\n\n }\n\n\n\n let module = frontend::parser::parse(tokens)?;\n\n if config.dump_ast {\n\n println!(\"{:?}\", module);\n\n }\n\n\n\n let mut symtab = frontend::type_check::apply(&module)?;\n\n frontend::sema_check::apply(&module)?;\n\n\n\n let mut module = middleend::ssagen::translate(module, &mut symtab);\n\n if config.optimize {\n\n siderow::ssa::pass::cf::apply(&mut module);\n\n siderow::ssa::pass::dce::apply(&mut module);\n\n }\n\n if config.dump_ir {\n\n println!(\"{}\", module.dump());\n\n }\n\n\n\n let mut asm = x86::instsel::translate(module);\n\n x86::regalloc::allocate(&mut asm);\n\n Ok(asm.stringify())\n\n}\n", "file_path": "sigrun/src/compiler.rs", "rank": 48, "score": 104533.99385370186 }, { "content": "pub fn tokenize(source: SourceFile) -> Result<Vec<Token>, Error> {\n\n let mut lexer = Lexer::new(source);\n\n lexer.tokenize()\n\n}\n\n\n\nimpl Lexer {\n\n fn new(source: SourceFile) -> Self {\n\n let pos = Pos {\n\n filename: source.filename.clone(),\n\n line: 1,\n\n column: 1,\n\n };\n\n\n\n Self {\n\n source,\n\n source_index: 0,\n\n pos,\n\n }\n\n }\n\n\n", "file_path": "rota/src/frontend/lexer.rs", "rank": 49, "score": 104533.99385370186 }, { "content": "fn gen_sections(symbols: &Symbols, codes: &mut Codes) -> Vec<Section> {\n\n let mut sections = Vec::new();\n\n for (section_name, code) in codes.iter_mut() {\n\n let section_relas = resolve_symbol(&symbols, code);\n\n\n\n let section_data: Vec<u8> = code\n\n .items\n\n .iter()\n\n .flat_map(|item| encode_item(&item))\n\n .collect();\n\n\n\n sections.push(Section {\n\n name: section_name.clone(),\n\n data: section_data,\n\n relas: section_relas,\n\n })\n\n }\n\n sections\n\n}\n\n\n", "file_path": "rota/src/backend/gen_code.rs", "rank": 50, "score": 103938.86741252613 }, { "content": "pub fn link_to_files(input_files: Vec<String>, output_file: String) -> Result<(), String> {\n\n let input_elfs = input_files\n\n .into_iter()\n\n .map(|path| Elf::read_from_file(&path))\n\n .collect();\n\n\n\n let output_elf = link(input_elfs)?;\n\n\n\n let elf_bytes = output_elf.to_bytes();\n\n fs::write(output_file, elf_bytes).unwrap();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "herja/src/linker.rs", "rank": 51, "score": 99866.36902720702 }, { "content": "pub fn list_global_symbols(symbols: Symbols, codes: &Codes) -> Vec<Symbol> {\n\n let mut global_symbols: Vec<Symbol> = symbols\n\n .into_iter()\n\n .map(|(_, v)| v)\n\n .filter(|symbol| symbol.is_global | symbol.addr.is_none())\n\n .map(|symbol| relocate_symbol(symbol, &codes))\n\n .collect();\n\n\n\n global_symbols.sort_by_key(|symbol| symbol.addr);\n\n\n\n global_symbols\n\n}\n\n\n", "file_path": "rota/src/backend/gen_code/resolve_symbol.rs", "rank": 52, "score": 99311.66396952856 }, { "content": "fn trans_func(f: Function, sm: &ssa::Module, ctx: &mut Context) -> ssa::Function {\n\n let (ret_typ, param_typ) = {\n\n let ret_typ = trans_typ(f.typ, ctx);\n\n let param_typ = f\n\n .params\n\n .into_iter()\n\n .map(|param| trans_typ(param, ctx))\n\n .collect();\n\n\n\n (ret_typ, param_typ)\n\n };\n\n\n\n let mut sf = ssa::Function::new(f.name, ret_typ, param_typ);\n\n for i in 0..sf.param_typ.len() {\n\n ctx.registers.insert(i, ssa::Value::new_param(&sf, i));\n\n }\n\n\n\n let mut fb = ssa::FunctionBuilder::new(&mut sf);\n\n\n\n for inst in &f.body {\n", "file_path": "siderow/src/ssa/parser.rs", "rank": 53, "score": 97239.16923642639 }, { "content": "fn encode_unary_op(inst: &Instruction) -> EncodedInst {\n\n let opr1 = inst.operand1.as_ref().expect(\"first operand is required\");\n\n if inst.operand2.is_some() {\n\n panic!(\"number of operands mismatched\");\n\n }\n\n\n\n match inst.mnenomic {\n\n Mnemonic::Call => match opr1 {\n\n Operand::Offset(off) => match off {\n\n Offset::Off8(_) => panic!(),\n\n Offset::Off32(_) => encoding::encode_d(&[0xe8], off),\n\n },\n\n Operand::Register(_) | Operand::Memory(_) => {\n\n encoding::encode_m(&[0xff], opr1.to_rm()).set_reg(2)\n\n }\n\n _ => panic!(),\n\n },\n\n Mnemonic::IDiv => match opr1 {\n\n Operand::Register(_) | Operand::Memory(_) => {\n\n encoding::encode_m(&[0xf7], opr1.to_rm()).set_reg(7)\n", "file_path": "x86asm/src/encode.rs", "rank": 54, "score": 83397.39015236816 }, { "content": "fn encode_nullary_op(inst: &Instruction) -> EncodedInst {\n\n if inst.operand1.is_some() || inst.operand2.is_some() {\n\n panic!(\"number of operands mismatched\");\n\n }\n\n\n\n match inst.mnenomic {\n\n Mnemonic::Hlt => EncodedInst::new(&[0xf4]),\n\n Mnemonic::Ret => EncodedInst::new(&[0xc3]),\n\n Mnemonic::Syscall => EncodedInst::new(&[0x0f, 0x05]),\n\n _ => panic!(),\n\n }\n\n}\n\n\n", "file_path": "x86asm/src/encode.rs", "rank": 55, "score": 83397.39015236816 }, { "content": "// todo サイズの比較\n\nfn encode_binary_op(inst: &Instruction) -> EncodedInst {\n\n let opr1 = inst.operand1.as_ref().expect(\"first operand is required\");\n\n let opr2 = inst.operand2.as_ref().expect(\"second operand is required\");\n\n\n\n match inst.mnenomic {\n\n Mnemonic::Add => match (opr1, opr2) {\n\n (Operand::Register(_), Operand::Immediate(imm))\n\n | (Operand::Memory(_), Operand::Immediate(imm)) => match imm {\n\n Immediate::Imm8(_) => encoding::encode_mi(&[0x83], opr1.to_rm(), imm).set_reg(0),\n\n Immediate::Imm32(_) => encoding::encode_mi(&[0x81], opr1.to_rm(), imm).set_reg(0),\n\n },\n\n (Operand::Register(_), Operand::Register(reg))\n\n | (Operand::Memory(_), Operand::Register(reg)) => {\n\n encoding::encode_mr(&[0x01], opr1.to_rm(), reg)\n\n }\n\n (Operand::Register(reg), Operand::Memory(_)) => {\n\n encoding::encode_rm(&[0x03], reg, opr2.to_rm())\n\n }\n\n _ => panic!(),\n\n },\n", "file_path": "x86asm/src/encode.rs", "rank": 56, "score": 83397.39015236816 }, { "content": "fn calc_offset(items: &[CodeItem], from: usize, to: usize) -> i32 {\n\n // make from <= to\n\n let sign = if from < to { 1 } else { -1 };\n\n let (from, to) = if from < to { (from, to) } else { (to, from) };\n\n\n\n items[from..to]\n\n .iter()\n\n .map(|item| encode_item(item).len() as i32)\n\n .sum::<i32>()\n\n * sign\n\n}\n", "file_path": "rota/src/backend/gen_code/resolve_symbol.rs", "rank": 57, "score": 82991.17653024633 }, { "content": "#[derive(Debug)]\n\nenum CodeItem {\n\n Raw(Vec<u8>),\n\n Inst(Instruction),\n\n}\n\n\n", "file_path": "rota/src/backend/gen_code.rs", "rank": 58, "score": 73369.4384798468 }, { "content": "#[derive(Debug)]\n\nenum SymbolType {\n\n Addr,\n\n Jump,\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Tse {\n\n pub symbol_name: String,\n\n pub offset: i64,\n\n pub size: u64,\n\n pub align: u64,\n\n}\n", "file_path": "rota/src/backend/gen_code.rs", "rank": 59, "score": 73369.4384798468 }, { "content": "#[test]\n\nfn and() {\n\n do_test(\"and eax,1\", \"83 e0 01\");\n\n do_test(\"and rax,1\", \"48 83 e0 01\");\n\n do_test(\"and r9,1\", \"49 83 e1 01\");\n\n do_test(\"and eax,eax\", \"21 c0\");\n\n do_test(\"and rax,rax\", \"48 21 c0\");\n\n do_test(\"and rax,r9\", \"4c 21 c8\");\n\n do_test(\"and r9,rax\", \"49 21 c1\");\n\n do_test(\"and r9,r9\", \"4d 21 c9\");\n\n do_test(\"and rax,[rax]\", \"48 23 00\");\n\n do_test(\"and rax,[r9]\", \"49 23 01\");\n\n do_test(\"and rax,[r12]\", \"49 23 04 24\");\n\n do_test(\"and rax,[r13]\", \"49 23 45 00\");\n\n do_test(\"and [rax],rax\", \"48 21 00\");\n\n do_test(\"and [r9],rax\", \"49 21 01\");\n\n do_test(\"and rax,[rax+8]\", \"48 23 40 08\");\n\n do_test(\"and rax,[r9-8]\", \"49 23 41 f8\");\n\n do_test(\"and rax,[r9-129]\", \"49 23 81 7f ff ff ff\");\n\n do_test(\"and [rax+8],rax\", \"48 21 40 08\");\n\n do_test(\"and [r9-8],rax\", \"49 21 41 f8\");\n\n do_test(\"and [r9-129],rax\", \"49 21 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 60, "score": 70722.9002918743 }, { "content": "#[test]\n\nfn or() {\n\n do_test(\"or eax,1\", \"83 c8 01\");\n\n do_test(\"or rax,1\", \"48 83 c8 01\");\n\n do_test(\"or r9,1\", \"49 83 c9 01\");\n\n do_test(\"or eax,eax\", \"09 c0\");\n\n do_test(\"or rax,rax\", \"48 09 c0\");\n\n do_test(\"or rax,r9\", \"4c 09 c8\");\n\n do_test(\"or r9,rax\", \"49 09 c1\");\n\n do_test(\"or r9,r9\", \"4d 09 c9\");\n\n do_test(\"or rax,[rax]\", \"48 0b 00\");\n\n do_test(\"or rax,[r9]\", \"49 0b 01\");\n\n do_test(\"or rax,[r12]\", \"49 0b 04 24\");\n\n do_test(\"or rax,[r13]\", \"49 0b 45 00\");\n\n do_test(\"or [rax],rax\", \"48 09 00\");\n\n do_test(\"or [r9],rax\", \"49 09 01\");\n\n do_test(\"or rax,[rax+8]\", \"48 0b 40 08\");\n\n do_test(\"or rax,[r9-8]\", \"49 0b 41 f8\");\n\n do_test(\"or rax,[r9-129]\", \"49 0b 81 7f ff ff ff\");\n\n do_test(\"or [rax+8],rax\", \"48 09 40 08\");\n\n do_test(\"or [r9-8],rax\", \"49 09 41 f8\");\n\n do_test(\"or [r9-129],rax\", \"49 09 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 61, "score": 70722.9002918743 }, { "content": "#[test]\n\nfn syscall() {\n\n do_test(\"syscall\", \"0f 05\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 62, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn hlt() {\n\n do_test(\"hlt\", \"f4\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 63, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn ascii() {\n\n do_test(r#\".ascii \"\"\"#, \"\"); // skip\n\n do_test(r#\".ascii \"Hi!\"\"#, \"48 69 21\"); // skip\n\n do_test(r#\".ascii \"\\r\\n\"\"#, \"0d 0a\"); // skip\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 64, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn lea() {\n\n do_test(\"lea rax,[rax]\", \"48 8d 00\");\n\n do_test(\"lea rax,[r9]\", \"49 8d 01\");\n\n do_test(\"lea rax,[r12]\", \"49 8d 04 24\");\n\n do_test(\"lea rax,[r13]\", \"49 8d 45 00\");\n\n do_test(\"lea rax,[rax+8]\", \"48 8d 40 08\");\n\n do_test(\"lea rax,[r9-8]\", \"49 8d 41 f8\");\n\n do_test(\"lea rax,[r9-129]\", \"49 8d 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 65, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn unary_m() {\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Memory(Memory::new(Register::Rax, None)),\n\n ));\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Memory(Memory::new(Register::R8, None)),\n\n ));\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Memory(Memory::new(Register::Rax, Some(Displacement::Disp8(2)))),\n\n ));\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Memory(Memory::new(Register::R8, Some(Displacement::Disp8(2)))),\n\n ));\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Memory(Memory::new_disp(Displacement::Disp32(2))),\n", "file_path": "x86asm/tests/lib.rs", "rank": 66, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn unary_o() {\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Register(Register::Rax),\n\n ));\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Register(Register::R8),\n\n ));\n\n}\n\n\n", "file_path": "x86asm/tests/lib.rs", "rank": 67, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn sete() {\n\n do_test(\"sete al\", \"0f 94 c0\");\n\n do_test(\"sete r9b\", \"41 0f 94 c1\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 68, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn setg() {\n\n do_test(\"setg al\", \"0f 9f c0\");\n\n do_test(\"setg r9b\", \"41 0f 9f c1\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 69, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn simple() {\n\n let filename = \"tests/testcases/simple.s\";\n\n let source = SourceFile {\n\n filename: filename.to_string(),\n\n content: fs::read_to_string(filename).unwrap(),\n\n };\n\n let actual_output = assembler::assemble(source).unwrap();\n\n let expected_output = fs::read(\"tests/testcases/simple.o\").unwrap();\n\n\n\n assert_eq!(actual_output, expected_output);\n\n}\n", "file_path": "rota/tests/elf.rs", "rank": 70, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn cmp() {\n\n do_test(\"cmp eax,1\", \"83 f8 01\");\n\n do_test(\"cmp rax,1\", \"48 83 f8 01\");\n\n do_test(\"cmp r9,1\", \"49 83 f9 01\");\n\n do_test(\"cmp eax,eax\", \"39 c0\");\n\n do_test(\"cmp rax,rax\", \"48 39 c0\");\n\n do_test(\"cmp rax,r9\", \"4c 39 c8\");\n\n do_test(\"cmp r9,rax\", \"49 39 c1\");\n\n do_test(\"cmp r9,r9\", \"4d 39 c9\");\n\n do_test(\"cmp rax,[rax]\", \"48 3b 00\");\n\n do_test(\"cmp rax,[r9]\", \"49 3b 01\");\n\n do_test(\"cmp rax,[r12]\", \"49 3b 04 24\");\n\n do_test(\"cmp rax,[r13]\", \"49 3b 45 00\");\n\n do_test(\"cmp [rax],rax\", \"48 39 00\");\n\n do_test(\"cmp [r9],rax\", \"49 39 01\");\n\n do_test(\"cmp rax,[rax+8]\", \"48 3b 40 08\");\n\n do_test(\"cmp rax,[r9-8]\", \"49 3b 41 f8\");\n\n do_test(\"cmp rax,[r9-129]\", \"49 3b 81 7f ff ff ff\");\n\n do_test(\"cmp [rax+8],rax\", \"48 39 40 08\");\n\n do_test(\"cmp [r9-8],rax\", \"49 39 41 f8\");\n\n do_test(\"cmp [r9-129],rax\", \"49 39 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 71, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn call() {\n\n do_test(\"label: call label\", \"e8 fb ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 72, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn nullary() {\n\n do_test(Instruction::new_nullary(Mnemonic::Hlt));\n\n do_test(Instruction::new_nullary(Mnemonic::Ret));\n\n do_test(Instruction::new_nullary(Mnemonic::Syscall));\n\n}\n\n\n", "file_path": "x86asm/tests/lib.rs", "rank": 73, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn setne() {\n\n do_test(\"setne al\", \"0f 95 c0\");\n\n do_test(\"setne r9b\", \"41 0f 95 c1\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 74, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn setle() {\n\n do_test(\"setle al\", \"0f 9e c0\");\n\n do_test(\"setle r9b\", \"41 0f 9e c1\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 75, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn unary_d() {\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Call,\n\n Operand::Offset(Offset::Off32(12)),\n\n ));\n\n}\n\n\n", "file_path": "x86asm/tests/lib.rs", "rank": 76, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn zero() {\n\n do_test(\".zero 0\", \"\"); // skip\n\n do_test(\".zero 4\", \"00 00 00 00\"); // skip\n\n do_test(\".zero 8\", \"00 00 00 00 00 00 00 00\"); // skip\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 77, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn mov() {\n\n do_test(\"mov eax,1\", \"c7 c0 01 00 00 00\"); // skip\n\n do_test(\"mov rax,1\", \"48 c7 c0 01 00 00 00\");\n\n do_test(\"mov r9,1\", \"49 c7 c1 01 00 00 00\");\n\n do_test(\"mov eax,eax\", \"89 c0\");\n\n do_test(\"mov rax,rax\", \"48 89 c0\");\n\n do_test(\"mov rax,r9\", \"4c 89 c8\");\n\n do_test(\"mov r9,rax\", \"49 89 c1\");\n\n do_test(\"mov r9,r9\", \"4d 89 c9\");\n\n do_test(\"mov rax,[rax]\", \"48 8b 00\");\n\n do_test(\"mov rax,[r9]\", \"49 8b 01\");\n\n do_test(\"mov rax,[r12]\", \"49 8b 04 24\");\n\n do_test(\"mov rax,[r13]\", \"49 8b 45 00\");\n\n do_test(\"mov [rax],rax\", \"48 89 00\");\n\n do_test(\"mov [r9],rax\", \"49 89 01\");\n\n do_test(\"mov rax,[rax+8]\", \"48 8b 40 08\");\n\n do_test(\"mov rax,[r9-8]\", \"49 8b 41 f8\");\n\n do_test(\"mov rax,[r9-129]\", \"49 8b 81 7f ff ff ff\");\n\n do_test(\"mov [rax+8],rax\", \"48 89 40 08\");\n\n do_test(\"mov [r9-8],rax\", \"49 89 41 f8\");\n\n do_test(\"mov [r9-129],rax\", \"49 89 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 78, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn xor() {\n\n do_test(\"xor eax,1\", \"83 f0 01\");\n\n do_test(\"xor rax,1\", \"48 83 f0 01\");\n\n do_test(\"xor r9,1\", \"49 83 f1 01\");\n\n do_test(\"xor eax,eax\", \"31 c0\");\n\n do_test(\"xor rax,rax\", \"48 31 c0\");\n\n do_test(\"xor rax,r9\", \"4c 31 c8\");\n\n do_test(\"xor r9,rax\", \"49 31 c1\");\n\n do_test(\"xor r9,r9\", \"4d 31 c9\");\n\n do_test(\"xor rax, [rax]\", \"48 33 00\");\n\n do_test(\"xor rax, [r9]\", \"49 33 01\");\n\n do_test(\"xor rax, [r12]\", \"49 33 04 24\");\n\n do_test(\"xor rax, [r13]\", \"49 33 45 00\");\n\n do_test(\"xor [rax], rax\", \"48 31 00\");\n\n do_test(\"xor [r9], rax\", \"49 31 01\");\n\n do_test(\"xor rax,[rax+8]\", \"48 33 40 08\");\n\n do_test(\"xor rax,[r9-8]\", \"49 33 41 f8\");\n\n do_test(\"xor rax,[r9-129]\", \"49 33 81 7f ff ff ff\");\n\n do_test(\"xor [rax+8],rax\", \"48 31 40 08\");\n\n do_test(\"xor [r9-8],rax\", \"49 31 41 f8\");\n\n do_test(\"xor [r9-129],rax\", \"49 31 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 79, "score": 69645.26495609788 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n if !matches!(args.len(), 3 | 4) {\n\n show_usage();\n\n }\n\n\n\n let input_file = args[1].to_string();\n\n let output_file = args[2].to_string();\n\n let output_raw = args.get(3).map_or(false, |arg| arg == \"--raw\");\n\n\n\n let err = if output_raw {\n\n assembler::assemble_raw_to_file(input_file, output_file)\n\n } else {\n\n assembler::assemble_to_file(input_file, output_file)\n\n };\n\n\n\n if let Err(err) = err {\n\n println!(\"Failed to assemble:\\n{}\", err);\n\n std::process::exit(1);\n\n }\n\n}\n\n\n", "file_path": "rota/src/main.rs", "rank": 80, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn imul() {\n\n do_test(\"imul eax,1\", \"6b c0 01\");\n\n do_test(\"imul rax,1\", \"48 6b c0 01\");\n\n do_test(\"imul r9,1\", \"4d 6b c9 01\");\n\n do_test(\"imul eax,eax\", \"0f af c0\");\n\n do_test(\"imul rax,rax\", \"48 0f af c0\");\n\n do_test(\"imul rax,r9\", \"49 0f af c1\");\n\n do_test(\"imul r9,rax\", \"4c 0f af c8\");\n\n do_test(\"imul r9,r9\", \"4d 0f af c9\");\n\n do_test(\"imul rax, [rax]\", \"48 0f af 00\");\n\n do_test(\"imul rax, [r9]\", \"49 0f af 01\");\n\n do_test(\"imul rax, [r12]\", \"49 0f af 04 24\");\n\n do_test(\"imul rax, [r13]\", \"49 0f af 45 00\");\n\n do_test(\"imul rax,[rax+8]\", \"48 0f af 40 08\");\n\n do_test(\"imul rax,[r9-8]\", \"49 0f af 41 f8\");\n\n do_test(\"imul rax,[r9-129]\", \"49 0f af 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 81, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn setl() {\n\n do_test(\"setl al\", \"0f 9c c0\");\n\n do_test(\"setl r9b\", \"41 0f 9c c1\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 82, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn add() {\n\n do_test(\"add eax,1\", \"83 c0 01\");\n\n do_test(\"add rax,1\", \"48 83 c0 01\");\n\n do_test(\"add r9,1\", \"49 83 c1 01\");\n\n do_test(\"add eax,eax\", \"01 c0\");\n\n do_test(\"add rax,rax\", \"48 01 c0\");\n\n do_test(\"add rax,r9\", \"4c 01 c8\");\n\n do_test(\"add r9,rax\", \"49 01 c1\");\n\n do_test(\"add r9,r9\", \"4d 01 c9\");\n\n do_test(\"add rax, [rax]\", \"48 03 00\");\n\n do_test(\"add rax, [r9]\", \"49 03 01\");\n\n do_test(\"add rax, [r12]\", \"49 03 04 24\");\n\n do_test(\"add rax, [r13]\", \"49 03 45 00\");\n\n do_test(\"add [rax], rax\", \"48 01 00\");\n\n do_test(\"add [r9], rax\", \"49 01 01\");\n\n do_test(\"add rax,[rax+8]\", \"48 03 40 08\");\n\n do_test(\"add rax,[r9-8]\", \"49 03 41 f8\");\n\n do_test(\"add rax,[r9-129]\", \"49 03 81 7f ff ff ff\");\n\n do_test(\"add [rax+8],rax\", \"48 01 40 08\");\n\n do_test(\"add [r9-8],rax\", \"49 01 41 f8\");\n\n do_test(\"add [r9-129],rax\", \"49 01 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 83, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn idiv() {\n\n do_test(\"idiv eax\", \"f7 f8\");\n\n do_test(\"idiv rax\", \"48 f7 f8\");\n\n do_test(\"idiv r8\", \"49 f7 f8\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 84, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn unary_i() {\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Immediate(Immediate::Imm8(2)),\n\n ));\n\n do_test(Instruction::new_unary(\n\n Mnemonic::Push,\n\n Operand::Immediate(Immediate::Imm32(-2)),\n\n ));\n\n}\n\n\n", "file_path": "x86asm/tests/lib.rs", "rank": 85, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn link() {\n\n let input_files = vec![\"tests/testcases/file1.o\", \"tests/testcases/file2.o\"];\n\n let input_elfs = input_files\n\n .into_iter()\n\n .map(|path| Elf::read_from_file(path))\n\n .collect();\n\n let output_elf = linker::link(input_elfs).unwrap();\n\n let actual_bytes = output_elf.to_bytes();\n\n let expected_bytes = fs::read(\"tests/testcases/file\").unwrap();\n\n assert_eq!(actual_bytes, expected_bytes);\n\n}\n", "file_path": "herja/tests/lib.rs", "rank": 86, "score": 69645.26495609788 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n if !matches!(args.len(), 2 | 3) {\n\n println!(\"Usage: eir <file>\");\n\n return;\n\n }\n\n\n\n let file = args[1].as_str();\n\n let dump = args.get(2).map_or(false, |arg| arg == \"--dump\");\n\n\n\n let mut emu = Emulator::new(0x7C00, 0x7c00);\n\n emu.dump_state = dump;\n\n emu.load_elf(file);\n\n emu.run()\n\n}\n", "file_path": "eir/src/main.rs", "rank": 87, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn ret() {\n\n do_test(\"ret\", \"c3\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 88, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn jmp() {\n\n do_test(\"label: jmp label\", \"e9 fb ff ff ff\"); // skip\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 89, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn sub() {\n\n do_test(\"sub eax,1\", \"83 e8 01\");\n\n do_test(\"sub rax,1\", \"48 83 e8 01\");\n\n do_test(\"sub r9,1\", \"49 83 e9 01\");\n\n do_test(\"sub eax,eax\", \"29 c0\");\n\n do_test(\"sub rax,rax\", \"48 29 c0\");\n\n do_test(\"sub rax,r9\", \"4c 29 c8\");\n\n do_test(\"sub r9,rax\", \"49 29 c1\");\n\n do_test(\"sub r9,r9\", \"4d 29 c9\");\n\n do_test(\"sub rax, [rax]\", \"48 2b 00\");\n\n do_test(\"sub rax, [r9]\", \"49 2b 01\");\n\n do_test(\"sub rax, [r12]\", \"49 2b 04 24\");\n\n do_test(\"sub rax, [r13]\", \"49 2b 45 00\");\n\n do_test(\"sub [rax], rax\", \"48 29 00\");\n\n do_test(\"sub [r9], rax\", \"49 29 01\");\n\n do_test(\"sub rax,[rax+8]\", \"48 2b 40 08\");\n\n do_test(\"sub rax,[r9-8]\", \"49 2b 41 f8\");\n\n do_test(\"sub rax,[r9-129]\", \"49 2b 81 7f ff ff ff\");\n\n do_test(\"sub [rax+8],rax\", \"48 29 40 08\");\n\n do_test(\"sub [r9-8],rax\", \"49 29 41 f8\");\n\n do_test(\"sub [r9-129],rax\", \"49 29 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 90, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn byte() {\n\n do_test(\".byte 0\", \"00\"); // skip\n\n do_test(\".byte 42\", \"2a\"); // skip\n\n do_test(\".byte 255\", \"ff\"); // skip\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 91, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn je() {\n\n do_test(\"label: je label\", \"0f 84 fa ff ff ff\"); // skip\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 92, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn pop() {\n\n do_test(\"pop rax\", \"58\");\n\n do_test(\"pop r8\", \"41 58\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 93, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn long() {\n\n do_test(\".long 0\", \"00 00 00 00\"); // skip\n\n do_test(\".long 305419896\", \"78 56 34 12\"); // skip\n\n do_test(\".long -1\", \"ff ff ff ff\"); // skip\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 94, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn movsx() {\n\n do_test(\"movsx rax, byte ptr [rax]\", \"48 0f be 00\");\n\n do_test(\"movsx rax, byte ptr [r9]\", \"49 0f be 01\");\n\n do_test(\"movsx rax, byte ptr [r12]\", \"49 0f be 04 24\");\n\n do_test(\"movsx rax, byte ptr [r13]\", \"49 0f be 45 00\");\n\n do_test(\"movsx rax, byte ptr [rax+8]\", \"48 0f be 40 08\");\n\n do_test(\"movsx rax, byte ptr [r9-8]\", \"49 0f be 41 f8\");\n\n do_test(\"movsx rax, byte ptr [r9-129]\", \"49 0f be 81 7f ff ff ff\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 95, "score": 69645.26495609788 }, { "content": "fn main() {\n\n let args: Vec<String> = env::args().collect();\n\n if args.len() < 3 {\n\n println!(\"usage: herja <input_file>... <output_file>\");\n\n std::process::exit(0);\n\n }\n\n\n\n let args_num = args.len() - 1;\n\n let input_files = args[1..args_num].to_vec();\n\n let output_file = args[args_num].clone();\n\n\n\n if let Err(err) = linker::link_to_files(input_files, output_file) {\n\n println!(\"Failed to link: {}\", err);\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "herja/src/main.rs", "rank": 96, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn setge() {\n\n do_test(\"setge al\", \"0f 9d c0\");\n\n do_test(\"setge r9b\", \"41 0f 9d c1\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 97, "score": 69645.26495609788 }, { "content": "#[test]\n\nfn push() {\n\n do_test(\"push 1\", \"6a 01\");\n\n do_test(\"push rax\", \"50\");\n\n do_test(\"push r8\", \"41 50\");\n\n}\n\n\n", "file_path": "rota/tests/code.rs", "rank": 98, "score": 69645.26495609788 }, { "content": "fn main() {\n\n match cli::parse_arguments() {\n\n Ok(config) => {\n\n if let Err(err) = compile_to_file(config) {\n\n println!(\"failed to compile:\\n{}\", err);\n\n std::process::exit(1);\n\n }\n\n }\n\n Err(_) => println!(\"Usage: sigrun <input_file> <output_file>\"),\n\n }\n\n}\n", "file_path": "sigrun/src/main.rs", "rank": 99, "score": 69645.26495609788 } ]
Rust
yash-semantics/src/lib.rs
magicant/yash-rs
c80497794c126f3d0c378df8a625c69bee0f8567
pub mod assign; mod command_impl; pub mod command_search; pub mod expansion; mod handle_impl; pub mod redir; mod runner; pub mod trap; use annotate_snippets::display_list::DisplayList; use annotate_snippets::snippet::Snippet; use async_trait::async_trait; use std::borrow::Cow; use yash_env::io::Fd; use yash_env::Env; use yash_syntax::source::pretty::Annotation; use yash_syntax::source::pretty::AnnotationType; use yash_syntax::source::pretty::Message; use yash_syntax::source::Location; #[doc(no_inline)] pub use yash_env::semantics::*; #[async_trait(?Send)] pub trait Command { async fn execute(&self, env: &mut Env) -> Result; } #[async_trait(?Send)] pub trait Handle { async fn handle(&self, env: &mut Env) -> Result; } pub async fn print_error( env: &mut Env, title: Cow<'_, str>, label: Cow<'_, str>, location: &Location, ) { let mut a = vec![Annotation { r#type: AnnotationType::Error, label, location: location.clone(), }]; location.line.source.complement_annotations(&mut a); let message = Message { r#type: AnnotationType::Error, title, annotations: a, }; let mut snippet = Snippet::from(&message); snippet.opt.color = true; let s = format!("{}\n", DisplayList::from(snippet)); let _ = env.system.write_all(Fd::STDERR, s.as_bytes()).await; } pub use runner::read_eval_loop; pub use runner::read_eval_loop_boxed; #[cfg(test)] pub(crate) mod tests { use futures_executor::LocalSpawner; use futures_util::task::LocalSpawnExt; use itertools::Itertools; use std::cell::Cell; use std::cell::RefCell; use std::future::ready; use std::future::Future; use std::ops::ControlFlow::{Break, Continue}; use std::pin::Pin; use std::rc::Rc; use yash_env::builtin::Builtin; use yash_env::builtin::Type::{Intrinsic, Special}; use yash_env::io::Fd; use yash_env::job::Pid; use yash_env::semantics::Divert; use yash_env::semantics::ExitStatus; use yash_env::semantics::Field; use yash_env::system::r#virtual::SystemState; use yash_env::system::Errno; use yash_env::variable::Scalar; use yash_env::variable::Scope; use yash_env::variable::Variable; use yash_env::Env; use yash_env::VirtualSystem; #[derive(Clone, Debug)] pub struct LocalExecutor(pub LocalSpawner); impl yash_env::system::r#virtual::Executor for LocalExecutor { fn spawn( &self, task: Pin<Box<dyn Future<Output = ()>>>, ) -> Result<(), Box<dyn std::error::Error>> { self.0 .spawn_local(task) .map_err(|e| Box::new(e) as Box<dyn std::error::Error>) } } pub fn in_virtual_system<F, Fut>(f: F) where F: FnOnce(Env, Pid, Rc<RefCell<SystemState>>) -> Fut, Fut: Future<Output = ()> + 'static, { let system = VirtualSystem::new(); let pid = system.process_id; let state = Rc::clone(&system.state); let mut executor = futures_executor::LocalPool::new(); state.borrow_mut().executor = Some(Rc::new(LocalExecutor(executor.spawner()))); let env = Env::with_system(Box::new(system)); let shared_system = env.system.clone(); let task = f(env, pid, Rc::clone(&state)); let done = Rc::new(Cell::new(false)); let done_2 = Rc::clone(&done); executor .spawner() .spawn_local(async move { task.await; done.set(true); }) .unwrap(); while !done_2.get() { executor.run_until_stalled(); shared_system.select(false).unwrap(); SystemState::select_all(&state); } } fn return_builtin_main( _env: &mut Env, mut args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result>>> { let divert = match args.get(1) { Some(field) if field.value == "-n" => { args.remove(1); Continue(()) } _ => Break(Divert::Return), }; let exit_status = match args.get(1) { Some(field) => field.value.parse().unwrap_or(2), None => 0, }; Box::pin(ready((ExitStatus(exit_status), divert))) } pub fn return_builtin() -> Builtin { Builtin { r#type: Special, execute: return_builtin_main, } } fn local_builtin_main( env: &mut Env, args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result> + '_>> { Box::pin(async move { for Field { value, origin } in args.into_iter().skip(1) { if let Some(eq_index) = value.find('=') { let name = value[..eq_index].to_owned(); let value = value[eq_index + 1..].to_owned(); let value = Variable { value: Scalar(value), last_assigned_location: Some(origin), is_exported: false, read_only_location: None, }; if let Err(error) = env.variables.assign(Scope::Local, name, value) { unimplemented!("assignment error: {:?}", error); } } else { let name = value; if let Some(var) = env.variables.get(&name) { if let Scalar(value) = &var.value { let line = format!("{}={}\n", name, value); if let Err(errno) = env.system.write_all(Fd::STDOUT, line.as_bytes()).await { unimplemented!("write error: {:?}", errno); } } } } } (ExitStatus::SUCCESS, Continue(())) }) } pub fn local_builtin() -> Builtin { Builtin { r#type: Intrinsic, execute: local_builtin_main, } } fn echo_builtin_main( env: &mut Env, args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result> + '_>> { Box::pin(async move { let fields = (&args[1..]).iter().map(|f| &f.value).format(" "); let message = format!("{}\n", fields); let result = match env.system.write_all(Fd::STDOUT, message.as_bytes()).await { Ok(_) => ExitStatus::SUCCESS, Err(_) => ExitStatus::FAILURE, }; (result, Continue(())) }) } pub fn echo_builtin() -> Builtin { Builtin { r#type: Intrinsic, execute: echo_builtin_main, } } fn cat_builtin_main( env: &mut Env, _args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result> + '_>> { async fn inner(env: &mut Env) -> std::result::Result<(), Errno> { let mut buffer = [0; 1024]; loop { let count = env.system.read_async(Fd::STDIN, &mut buffer).await?; if count == 0 { break Ok(()); } env.system.write_all(Fd::STDOUT, &buffer[..count]).await?; } } Box::pin(async move { let result = match inner(env).await { Ok(_) => ExitStatus::SUCCESS, Err(_) => ExitStatus::FAILURE, }; (result, Continue(())) }) } pub fn cat_builtin() -> Builtin { Builtin { r#type: Intrinsic, execute: cat_builtin_main, } } }
pub mod assign; mod command_impl; pub mod command_search; pub mod expansion; mod handle_impl; pub mod redir; mod runner; pub mod trap; use annotate_snippets::display_list::DisplayList; use annotate_snippets::snippet::Snippet; use async_trait::async_trait; use std::borrow::Cow; use yash_env::io::Fd; use yash_env::Env; use yash_syntax::source::pretty::Annotation; use yash_syntax::source::pretty::AnnotationType; use yash_syntax::source::pretty::Message; use yash_syntax::source::Location; #[doc(no_inline)] pub use yash_env::semantics::*; #[async_trait(?Send)] pub trait Command { async fn execute(&self, env: &mut Env) -> Result; } #[async_trait(?Send)] pub trait Handle { async fn handle(&self, env: &mut Env) -> Result; } pub async fn print_error( env: &mut Env, title: Cow<'_, str>, label: Cow<'_, str>, location: &Location, ) { let mut a = vec![Annotation { r#type: AnnotationType::Error, label, location: location.clone(), }]; location.line.source.complement_annotations(&mut a); let message = Message { r#type: AnnotationType::Error, title, annotations: a, }; let mut snippet = Snippet::from(&message); snippet.opt.color = true; let s = format!("{}\n", DisplayList::from(snippet)); let _ = env.system.write_all(Fd::STDERR, s.as_bytes()).await; } pub use runner::read_eval_loop; pub use runner::read_eval_loop_boxed; #[cfg(test)] pub(crate) mod tests { use futures_executor::LocalSpawner; use futures_util::task::LocalSpawnExt; use itertools::Itertools; use std::cell::Cell; use std::cell::RefCell; use std::future::ready; use std::future::Future; use std::ops::ControlFlow::{Break, Continue}; use std::pin::Pin; use std::rc::Rc; use yash_env::builtin::Builtin; use yash_env::builtin::Type::{Intrinsic, Special}; use yash_env::io::Fd; use yash_env::job::Pid; use yash_env::semantics::Divert; use yash_env::semantics::ExitStatus; use yash_env::semantics::Field; use yash_env::system::r#virtual::SystemState; use yash_env::system::Errno; use yash_env::variable::Scalar; use yash_env::variable::Scope; use yash_env::variable::Variable; use yash_env::Env; use yash_env::VirtualSystem; #[derive(Clone, Debug)] pub struct LocalExecutor(pub LocalSpawner); impl yash_env::system::r#virtual::Executor for LocalExecutor { fn spawn( &self, task: Pin<Box<dyn Future<Output = ()>>>, ) -> Result<(), Box<dyn std::error::Error>> { self.0 .spawn_local(task) .map_err(|e| Box::new(e) as Box<dyn std::error::Error>) } } pub fn in_virtual_system<F, Fut>(f: F) where F: FnOnce(Env, Pid, Rc<RefCell<SystemState>>) -> Fut, Fut: Future<Output = ()> + 'static, { let system = VirtualSystem::new(); let pid = system.process_id; let state = Rc::clone(&system.state); let mut executor = futures_executor::LocalPool::new(); state.borrow_mut().executor = Some(Rc::new(LocalExecutor(executor.spawner()))); let env = Env::with_system(Box::new(system)); let shared_system = env.system.clone(); let task = f(env, pid, Rc::clone(&state)); let done = Rc::new(Cell::new(false)); let done_2 = Rc::clone(&done); executor .spawner() .spawn_local(async move { task.await; done.set(true); }) .unwrap(); while !done_2.get() { executor.run_until_stalled(); shared_system.select(false).unwrap(); SystemState::select_all(&state); } } fn return_builtin_main( _env: &mut Env, mut args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result>>> { let divert = match args.get(1) { Some(field) if field.value == "-n" => { args.remove(1); Continue(()) } _ => Break(Divert::Return), }; let exit_status = match args.get(1) { Some(field) => field.value.parse().unwrap_or(2), None => 0, }; Box::pin(ready((ExitStatus(exit_status), divert))) } pub fn return_builtin() -> Builtin { Builtin { r#type: Special, execute: return_builtin_main, } } fn local_builtin_main( env: &mut Env, args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result> + '_>> { Box::pin(async move { for Field { value, origin } in args.into_iter().skip(1) { if let Some(eq_index) = value.find('=') { let name = value[..eq_index].to_owned(); let value = value[eq_index + 1..].to_owned(); let value = Variable { value: Scalar(value), last_assigned_location: Some(origin), is_exported: false, read_only_location: None, }; if let Err(error) = env.variables.assign(Scope::Local, name, value) { unimplemented!("assignment error: {:?}", error); } } else { let name = value; if let Some(var) = env.variables.get(&name) { if let Scalar(value) = &var.value { let line = format!("{}={}\n", name, value); if let Err(errno) = env.system.write_all(Fd::STDOUT, line.as_bytes()).await { unimplemented!("write error: {:?}", errno); } } } } } (ExitStatus::SUCCESS, Continue(())) }) } pub fn local_builtin() -> Builtin { Builtin { r#type: Intrinsic, execute: local_builtin_main, } } fn echo_builtin_main( env: &mut Env, args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result> + '_>> { Box::pin(async move { let fields = (&args[1..]).iter().map(|f| &f.value).format(" "); let message = format!("{}\n", fields); let result = match env.system.write_all(Fd::STDOUT, message.as_bytes()).await { Ok(_) => ExitStatus::SUCCESS, Err(_) => ExitStatus::FAILURE, }; (result, Continue(())) }) }
fn cat_builtin_main( env: &mut Env, _args: Vec<Field>, ) -> Pin<Box<dyn Future<Output = yash_env::builtin::Result> + '_>> { async fn inner(env: &mut Env) -> std::result::Result<(), Errno> { let mut buffer = [0; 1024]; loop { let count = env.system.read_async(Fd::STDIN, &mut buffer).await?; if count == 0 { break Ok(()); } env.system.write_all(Fd::STDOUT, &buffer[..count]).await?; } } Box::pin(async move { let result = match inner(env).await { Ok(_) => ExitStatus::SUCCESS, Err(_) => ExitStatus::FAILURE, }; (result, Continue(())) }) } pub fn cat_builtin() -> Builtin { Builtin { r#type: Intrinsic, execute: cat_builtin_main, } } }
pub fn echo_builtin() -> Builtin { Builtin { r#type: Intrinsic, execute: echo_builtin_main, } }
function_block-full_function
[ { "content": "/// Implementation of the return built-in.\n\n///\n\n/// See the [module-level documentation](self) for details.\n\npub fn builtin_main_sync<E: Env>(_env: &mut E, args: Vec<Field>) -> Result {\n\n // TODO Parse arguments correctly\n\n // TODO Reject returning from an interactive session\n\n let mut i = args.iter().skip(1).peekable();\n\n let no_return = matches!(i.peek(), Some(Field { value, .. }) if value == \"-n\");\n\n if no_return {\n\n i.next();\n\n }\n\n let exit_status = match i.next() {\n\n Some(field) => field.value.parse().unwrap_or(2),\n\n None => 0,\n\n };\n\n let flow = if no_return {\n\n Continue(())\n\n } else {\n\n Break(Divert::Return)\n\n };\n\n (ExitStatus(exit_status), flow)\n\n}\n\n\n", "file_path": "yash-builtin/src/return.rs", "rank": 0, "score": 385433.0804425636 }, { "content": "/// Implementation of the alias built-in.\n\npub fn builtin_main_sync<E: Env>(env: &mut E, args: Vec<Field>) -> Result {\n\n // TODO support options\n\n // TODO print alias definitions if there are no operands\n\n\n\n let mut args = args.into_iter();\n\n args.next(); // ignore the first argument, which is the command name\n\n\n\n if args.as_ref().is_empty() {\n\n for alias in env.alias_set() {\n\n // TODO should print via IoEnv rather than directly to stdout\n\n println!(\"{}={}\", &alias.0.name, &alias.0.replacement);\n\n }\n\n return (ExitStatus::SUCCESS, Continue(()));\n\n }\n\n\n\n for Field { value, origin } in args {\n\n if let Some(eq_index) = value.find('=') {\n\n let name = value[..eq_index].to_owned();\n\n // TODO reject invalid name\n\n let replacement = value[eq_index + 1..].to_owned();\n\n let entry = HashEntry::new(name, replacement, false, origin);\n\n env.alias_set_mut().replace(entry);\n\n } else {\n\n // TODO print alias definition\n\n }\n\n }\n\n\n\n (ExitStatus::SUCCESS, Continue(()))\n\n}\n\n\n", "file_path": "yash-builtin/src/alias.rs", "rank": 1, "score": 381755.8693290786 }, { "content": "/// Implementation of the readonly built-in.\n\npub fn builtin_main_sync<E: Env>(env: &mut E, args: Vec<Field>) -> Result {\n\n // TODO support options\n\n\n\n let mut args = args.into_iter();\n\n args.next(); // ignore the first argument, which is the command name\n\n\n\n // TODO print read-only variables if there are no operands\n\n\n\n for Field { value, origin } in args {\n\n if let Some(eq_index) = value.find('=') {\n\n let name = value[..eq_index].to_owned();\n\n // TODO reject invalid name\n\n let value = value[eq_index + 1..].to_owned();\n\n let location = origin.clone();\n\n // TODO Keep the variable exported if already exported\n\n // TODO Apply all-export option\n\n let value = Variable {\n\n value: Scalar(value),\n\n last_assigned_location: Some(origin),\n\n is_exported: false,\n", "file_path": "yash-builtin/src/readonly.rs", "rank": 2, "score": 381755.8693290786 }, { "content": "/// Executor that can start new async tasks.\n\n///\n\n/// This trait abstracts the executor interface so that [`SystemState`] does not\n\n/// depend on a specific executor implementation.\n\n///\n\n/// Note that [`VirtualSystem`] does not support multi-threading. The executor\n\n/// should run concurrent tasks on a single thread.\n\npub trait Executor: Debug {\n\n /// Starts a new async task.\n\n ///\n\n /// Returns `Ok(())` if the task has been started successfully and `Err(_)`\n\n /// otherwise.\n\n fn spawn(\n\n &self,\n\n task: Pin<Box<dyn Future<Output = ()>>>,\n\n ) -> Result<(), Box<dyn std::error::Error>>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::semantics::ExitStatus;\n\n use futures_executor::LocalPool;\n\n use std::ffi::CString;\n\n\n\n impl Executor for futures_executor::LocalSpawner {\n\n fn spawn(\n", "file_path": "yash-env/src/system/virtual.rs", "rank": 3, "score": 345845.1966330439 }, { "content": "/// Opens a file for redirection.\n\nfn open_file<E: Env>(env: &mut E, option: OFlag, path: Field) -> Result<(Fd, Location), Error> {\n\n let Field { value, origin } = path;\n\n let path = match CString::new(value) {\n\n Ok(path) => path,\n\n Err(e) => {\n\n return Err(Error {\n\n cause: ErrorCause::NulByte(e),\n\n location: origin,\n\n })\n\n }\n\n };\n\n\n\n let mode = Mode::S_IRUSR\n\n | Mode::S_IWUSR\n\n | Mode::S_IRGRP\n\n | Mode::S_IWGRP\n\n | Mode::S_IROTH\n\n | Mode::S_IWOTH;\n\n\n\n match env.open(&path, option, mode) {\n", "file_path": "yash-semantics/src/redir.rs", "rank": 4, "score": 342966.4775936122 }, { "content": "/// Performs command search.\n\npub fn search<E: SearchEnv>(env: &mut E, name: &str) -> Option<Target> {\n\n if name.contains('/') {\n\n return if let Ok(path) = CString::new(name) {\n\n Some(Target::External { path })\n\n } else {\n\n None\n\n };\n\n }\n\n\n\n let builtin = env.builtins().get(name).copied();\n\n if let Some(builtin) = builtin {\n\n if builtin.r#type == Special {\n\n return Some(builtin.into());\n\n }\n\n }\n\n\n\n if let Some(function) = env.functions().get(name) {\n\n return Some(function.0.clone().into());\n\n }\n\n\n", "file_path": "yash-semantics/src/command_search.rs", "rank": 5, "score": 325846.7418668214 }, { "content": "/// API to the system-managed parts of the environment.\n\n///\n\n/// The `System` trait defines a collection of methods to access the underlying\n\n/// operating system from the shell as an application program. There are two\n\n/// substantial implementors for this trait:\n\n/// [`RealSystem`](self::real::RealSystem) and\n\n/// [`VirtualSystem`](self::virtual::VirtualSystem). Another implementor\n\n/// is [`SharedSystem`], which wraps a `System` instance to extend the interface\n\n/// with asynchronous methods.\n\npub trait System: Debug {\n\n /// Whether there is an executable file at the specified path.\n\n fn is_executable_file(&self, path: &CStr) -> bool;\n\n\n\n /// Creates an unnamed pipe.\n\n ///\n\n /// This is a thin wrapper around the `pipe` system call.\n\n /// If successful, returns the reading and writing ends of the pipe.\n\n fn pipe(&mut self) -> nix::Result<(Fd, Fd)>;\n\n\n\n /// Duplicates a file descriptor.\n\n ///\n\n /// This is a thin wrapper around the `fcntl` system call that opens a new\n\n /// FD that shares the open file description with `from`. The new FD will be\n\n /// the minimum unused FD not less than `to_min`. The `cloexec` parameter\n\n /// specifies whether the new FD should have the `CLOEXEC` flag set. If\n\n /// successful, returns `Ok(new_fd)`. On error, returns `Err(_)`.\n\n fn dup(&mut self, from: Fd, to_min: Fd, cloexec: bool) -> nix::Result<Fd>;\n\n\n\n /// Duplicates a file descriptor.\n", "file_path": "yash-env/src/system.rs", "rank": 6, "score": 318633.357931006 }, { "content": "/// Searches the `$PATH` for an executable file.\n\n///\n\n/// Returns the path if successful. Note that the returned path may not be\n\n/// absolute if the `$PATH` contains a relative path.\n\npub fn search_path<E: PathEnv>(env: &mut E, name: &str) -> Option<CString> {\n\n if let Some(path) = env.path() {\n\n for dir in path.value.split() {\n\n let mut file = PathBuf::new();\n\n file.push(dir);\n\n file.push(name);\n\n if let Ok(file) = CString::new(file.into_os_string().into_vec()) {\n\n if env.is_executable_file(&file) {\n\n return Some(file);\n\n }\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n\n#[allow(clippy::field_reassign_with_default)]\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "yash-semantics/src/command_search.rs", "rank": 7, "score": 316474.1850058396 }, { "content": "/// Expands a special parameter.\n\n///\n\n/// Returns `None` if the `name` is not a special parameter name.\n\nfn expand_special_parameter<'a, E: Env>(env: &'a mut E, name: &str) -> Option<ParamValue<'a>> {\n\n let mut chars = name.chars();\n\n let name_char = chars.next()?;\n\n if chars.next().is_some() {\n\n // A special parameter's name is always a single character.\n\n return None;\n\n }\n\n match name_char {\n\n '@' => todo!(),\n\n '*' => todo!(),\n\n '#' => todo!(),\n\n '?' => Some(env.exit_status().to_string().into()),\n\n '-' => todo!(),\n\n '$' => todo!(),\n\n '!' => Some(env.last_async_pid().to_string().into()),\n\n '0' => todo!(),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "yash-semantics/src/expansion/param.rs", "rank": 8, "score": 311854.39492331556 }, { "content": "fn expand_variable<'a, E: Env>(env: &'a E, name: &str) -> ParamValue<'a> {\n\n match env.get_variable(name) {\n\n Some(v) => ParamValue::from(&v.value),\n\n None => ParamValue::Unset,\n\n }\n\n}\n\n\n\n/// Reference to a `RawParam` or `BracedParam`.\n\npub struct ParamRef<'a> {\n\n name: &'a str,\n\n #[allow(unused)] // TODO Use this\n\n location: &'a Location,\n\n}\n\n\n\nimpl<'a> ParamRef<'a> {\n\n pub fn from_name_and_location(name: &'a str, location: &'a Location) -> Self {\n\n ParamRef { name, location }\n\n }\n\n}\n\n\n", "file_path": "yash-semantics/src/expansion/param.rs", "rank": 9, "score": 284968.7917838697 }, { "content": "#[async_trait(?Send)]\n\npub trait ChildProcess: Debug {\n\n /// Runs a task in the child process.\n\n ///\n\n /// When called in the parent process, this function returns the process ID\n\n /// of the child. When in the child, this function never returns.\n\n async fn run(&mut self, env: &mut Env, task: ChildProcessTask) -> Pid;\n\n // TODO When unsized_fn_params is stabilized,\n\n // 1. `&mut self` should be `self`\n\n // 2. `task` should be `FnOnce` rather than `FnMut`\n\n}\n\n\n\n/// System shared by a reference counter.\n\n///\n\n/// A `SharedSystem` is a reference-counted container of a [`System`] instance\n\n/// accompanied with an internal state for supporting asynchronous interactions\n\n/// with the system. As it is reference-counted, cloning a `SharedSystem`\n\n/// instance only increments the reference count without cloning the backing\n\n/// system instance. This behavior allows calling `SharedSystem`'s methods\n\n/// concurrently from different `async` tasks that each have a `SharedSystem`\n\n/// instance sharing the same state.\n", "file_path": "yash-env/src/system.rs", "rank": 10, "score": 280192.80657171016 }, { "content": "/// Part of the shell execution environment the trap built-in depends on.\n\npub trait Env: Stdout + Stderr {\n\n /// Returns an iterator for currently configured trap actions.\n\n fn iter(&self) -> Iter<'_>;\n\n\n\n /// Returns the trap action for a signal.\n\n ///\n\n /// This function returns a pair of optional trap states. The first is the\n\n /// currently configured trap action, and the second is the action set\n\n /// before entering the current subshell environment.\n\n ///\n\n /// This function does not reflect the initial signal actions the shell\n\n /// inherited on startup.\n\n fn get_trap(&self, signal: Signal) -> (Option<&TrapState>, Option<&TrapState>);\n\n\n\n /// Sets a trap action for a signal.\n\n ///\n\n /// This function installs a signal handler to the specified underlying\n\n /// system.\n\n ///\n\n /// If `override_ignore` is `false`, you cannot set a trap for a signal that\n", "file_path": "yash-builtin/src/trap.rs", "rank": 11, "score": 273043.40928825946 }, { "content": "/// System interface for signal handling configuration.\n\npub trait SignalSystem {\n\n /// Sets how a signal is handled.\n\n ///\n\n /// This function updates the signal blocking mask and the signal action for\n\n /// the specified signal and remembers the previous configuration for\n\n /// restoration.\n\n ///\n\n /// Returns the previous handler.\n\n fn set_signal_handling(\n\n &mut self,\n\n signal: Signal,\n\n handling: SignalHandling,\n\n ) -> Result<SignalHandling, Errno>;\n\n}\n\n\n\n/// Action performed when a signal is delivered to the shell process.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum Trap {\n\n /// Performs the default signal action.\n\n ///\n", "file_path": "yash-env/src/trap.rs", "rank": 12, "score": 269322.3677195731 }, { "content": "/// Abstract handle to perform I/O with.\n\npub trait OpenFileDescription: Debug {\n\n /// Returns true if you can read from this open file description.\n\n fn is_readable(&self) -> bool;\n\n\n\n /// Returns true if you can write to this open file description.\n\n fn is_writable(&self) -> bool;\n\n\n\n /// Returns true if you can read from this open file description without\n\n /// blocking.\n\n fn is_ready_for_reading(&self) -> bool;\n\n\n\n /// Returns true if you can write to this open file description without\n\n /// blocking.\n\n fn is_ready_for_writing(&self) -> bool;\n\n\n\n /// Reads from this open file description.\n\n ///\n\n /// Returns the number of bytes successfully read.\n\n fn read(&mut self, buffer: &mut [u8]) -> nix::Result<usize>;\n\n\n", "file_path": "yash-env/src/system/virtual/io.rs", "rank": 13, "score": 265652.0172758441 }, { "content": "/// Part of the shell execution environment the readonly built-in depends on.\n\npub trait Env {\n\n /// Gets a reference to the variable with the specified name.\n\n #[must_use]\n\n fn get_variable(&self, name: &str) -> Option<&Variable>;\n\n\n\n /// Assigns a variable.\n\n fn assign_variable(\n\n &mut self,\n\n scope: Scope,\n\n name: String,\n\n value: Variable,\n\n ) -> std::result::Result<Option<Variable>, ReadOnlyError>;\n\n\n\n // TODO stdout, stderr\n\n}\n\n\n\nimpl Env for yash_env::Env {\n\n fn get_variable(&self, name: &str) -> Option<&Variable> {\n\n self.variables.get(name)\n\n }\n\n fn assign_variable(\n\n &mut self,\n\n scope: Scope,\n\n name: String,\n\n value: Variable,\n\n ) -> std::result::Result<Option<Variable>, ReadOnlyError> {\n\n self.variables.assign(scope, name, value)\n\n }\n\n}\n\n\n", "file_path": "yash-builtin/src/readonly.rs", "rank": 14, "score": 254918.77418493055 }, { "content": "/// Part of the shell execution environment the alias built-in depends on.\n\npub trait Env {\n\n /// Accesses the alias set in the environment.\n\n fn alias_set(&self) -> &AliasSet;\n\n\n\n /// Accesses the alias set in the environment.\n\n fn alias_set_mut(&mut self) -> &mut AliasSet;\n\n // TODO stdout, stderr\n\n}\n\n\n\nimpl Env for yash_env::Env {\n\n fn alias_set(&self) -> &AliasSet {\n\n &self.aliases\n\n }\n\n fn alias_set_mut(&mut self) -> &mut AliasSet {\n\n &mut self.aliases\n\n }\n\n}\n\n\n", "file_path": "yash-builtin/src/alias.rs", "rank": 15, "score": 254918.77418493055 }, { "content": "/// Part of the shell execution environment the return built-in depends on.\n\npub trait Env {\n\n // TODO Current value of $?\n\n // TODO Current execution context (stack trace)\n\n // TODO stderr\n\n}\n\n\n\nimpl Env for yash_env::Env {}\n\n\n", "file_path": "yash-builtin/src/return.rs", "rank": 16, "score": 254918.77418493055 }, { "content": "/// Part of the shell execution environment that provides functionalities for\n\n/// performing redirections.\n\npub trait Env: crate::expansion::Env {\n\n fn dup(&mut self, from: Fd, to_min: Fd, cloexec: bool) -> Result<Fd, Errno>;\n\n fn open(&mut self, path: &CStr, option: OFlag, mode: Mode) -> Result<Fd, Errno>;\n\n}\n\n\n\nimpl Env for yash_env::Env {\n\n fn dup(&mut self, from: Fd, to_min: Fd, cloexec: bool) -> Result<Fd, Errno> {\n\n self.system.dup(from, to_min, cloexec)\n\n }\n\n fn open(&mut self, path: &CStr, option: OFlag, mode: Mode) -> Result<Fd, Errno> {\n\n self.system.open(path, option, mode)\n\n }\n\n}\n\n\n\nimpl<E: Env> Env for crate::expansion::ExitStatusAdapter<'_, E> {\n\n fn dup(&mut self, from: Fd, to_min: Fd, cloexec: bool) -> Result<Fd, Errno> {\n\n (**self).dup(from, to_min, cloexec)\n\n }\n\n fn open(&mut self, path: &CStr, option: OFlag, mode: Mode) -> Result<Fd, Errno> {\n\n (**self).open(path, option, mode)\n\n }\n\n}\n\n\n", "file_path": "yash-semantics/src/redir.rs", "rank": 17, "score": 240621.13927755863 }, { "content": "#[async_trait(?Send)]\n\npub trait Env: std::fmt::Debug {\n\n /// Gets a reference to the variable with the specified name.\n\n #[must_use]\n\n fn get_variable(&self, name: &str) -> Option<&Variable>;\n\n\n\n /// Assigns a variable.\n\n fn assign_variable(\n\n &mut self,\n\n scope: Scope,\n\n name: String,\n\n value: Variable,\n\n ) -> std::result::Result<Option<Variable>, ReadOnlyError>;\n\n\n\n /// Returns a reference to the positional parameters.\n\n fn positional_params(&self) -> &Variable;\n\n\n\n /// Returns a mutable reference to the positional parameters.\n\n fn positional_params_mut(&mut self) -> &mut Variable;\n\n\n\n /// Gets the exit status of the last command.\n", "file_path": "yash-semantics/src/expansion.rs", "rank": 18, "score": 237987.85251644728 }, { "content": "/// Expands a positional parameter.\n\n///\n\n/// Returns `None` if the `name` is not a positive integer.\n\nfn expand_positional_param<'a, E: Env>(env: &'a E, name: &str) -> Option<ParamValue<'a>> {\n\n let index_0 = match name.parse::<usize>() {\n\n Ok(index_1) if index_1 > 0 => index_1 - 1,\n\n Err(error) if error.kind() == &PosOverflow => return Some(ParamValue::Unset),\n\n _ => return None, // Not a positional parameter\n\n };\n\n let params = env.positional_params();\n\n match &params.value {\n\n Value::Scalar(value) => match index_0 {\n\n 0 => Some(ParamValue::from(value)),\n\n _ => Some(ParamValue::Unset),\n\n },\n\n Value::Array(values) => match values.get(index_0) {\n\n Some(value) => Some(ParamValue::from(value)),\n\n None => Some(ParamValue::Unset),\n\n },\n\n }\n\n}\n\n\n", "file_path": "yash-semantics/src/expansion/param.rs", "rank": 19, "score": 237855.20575107165 }, { "content": "/// Implementation of the trap built-in.\n\n///\n\n/// This function calls [`builtin_body`] and wraps the result in a pinned box.\n\npub fn builtin_main(\n\n env: &mut yash_env::Env,\n\n args: Vec<Field>,\n\n) -> Pin<Box<dyn Future<Output = Result> + '_>> {\n\n Box::pin(builtin_body(env, args))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use futures_executor::block_on;\n\n use futures_util::future::FutureExt;\n\n use std::rc::Rc;\n\n use yash_env::io::Fd;\n\n use yash_env::system::SignalHandling;\n\n use yash_env::Env;\n\n use yash_env::VirtualSystem;\n\n\n\n #[test]\n\n fn setting_trap_to_ignore() {\n", "file_path": "yash-builtin/src/trap.rs", "rank": 20, "score": 237728.57424827278 }, { "content": "/// Converts a source code string into an iterator of [Line]s.\n\npub fn lines(code: &str, source: Source) -> Lines<'_> {\n\n Lines {\n\n source,\n\n code,\n\n number: NonZeroU64::new(1).unwrap(),\n\n }\n\n}\n\n\n\n/// Position of a character in source code.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub struct Location {\n\n /// Line that contains the character.\n\n pub line: Rc<Line>,\n\n\n\n /// Character position in the line. Counted from 1.\n\n ///\n\n /// Characters are counted in the number of Unicode scalar values, not bytes.\n\n pub column: NonZeroU64,\n\n}\n\n\n", "file_path": "yash-syntax/src/source.rs", "rank": 21, "score": 227707.7299551204 }, { "content": "/// Stack for pushing and popping contexts.\n\n///\n\n/// Instead of calling methods of `ContextStack` directly, you should use\n\n/// [`ScopeGuard`] which calls the methods when appropriate for you.\n\npub trait ContextStack {\n\n /// Pushes a new empty context.\n\n fn push_context(&mut self, context_type: ContextType);\n\n\n\n /// Pops the last-pushed context.\n\n ///\n\n /// This function may panic if there is no context that can be popped.\n\n fn pop_context(&mut self);\n\n}\n\n\n\nimpl ContextStack for VariableSet {\n\n fn push_context(&mut self, context_type: ContextType) {\n\n self.contexts.push(Context::new(context_type));\n\n }\n\n\n\n /// Pops the last-pushed context.\n\n ///\n\n /// This function removes the topmost context from the internal stack of\n\n /// contexts in the `VariableSet`, thereby removing all the variables in the\n\n /// context.\n", "file_path": "yash-env/src/variable.rs", "rank": 22, "score": 225660.20109146886 }, { "content": "fn is_executable(path: &CStr) -> bool {\n\n let flags = AccessFlags::X_OK;\n\n access(path, flags).is_ok()\n\n // TODO Should use eaccess\n\n}\n\n\n", "file_path": "yash-env/src/system/real.rs", "rank": 23, "score": 224287.4854023199 }, { "content": "/// Part of the shell execution environment command path search depends on.\n\npub trait PathEnv {\n\n /// Accesses the `$PATH` variable in the environment.\n\n fn path(&self) -> Option<&Variable>;\n\n /// Whether there is an executable file at the specified path.\n\n fn is_executable_file(&self, path: &CStr) -> bool;\n\n // TODO Cache the results of external utility search\n\n}\n\n\n", "file_path": "yash-semantics/src/command_search.rs", "rank": 24, "score": 220839.15594378745 }, { "content": "/// Part of the shell execution environment command search depends on.\n\npub trait SearchEnv: PathEnv {\n\n /// Accesses the built-in set in the environment.\n\n fn builtins(&self) -> &HashMap<&'static str, Builtin>;\n\n /// Accesses the function set in the environment.\n\n fn functions(&self) -> &FunctionSet;\n\n}\n\n\n\nimpl PathEnv for Env {\n\n fn path(&self) -> Option<&Variable> {\n\n self.variables.get(\"PATH\")\n\n }\n\n fn is_executable_file(&self, path: &CStr) -> bool {\n\n self.system.is_executable_file(path)\n\n }\n\n}\n\n\n\nimpl SearchEnv for Env {\n\n fn builtins(&self) -> &HashMap<&'static str, Builtin> {\n\n &self.builtins\n\n }\n\n fn functions(&self) -> &FunctionSet {\n\n &self.functions\n\n }\n\n}\n\n\n", "file_path": "yash-semantics/src/command_search.rs", "rank": 25, "score": 218217.2720209561 }, { "content": "/// Parses command-line arguments into options and operands.\n\n///\n\n/// The first argument is always dropped and the remaining arguments are parsed.\n\n///\n\n/// If successful, returns a pair of option occurrences and operands.\n\npub fn parse_arguments<'a>(\n\n option_specs: &'a [OptionSpec<'a>],\n\n mode: Mode,\n\n arguments: Vec<Field>,\n\n) -> Result<(Vec<OptionOccurrence<'a>>, Vec<Field>), Error<'a>> {\n\n let mut arguments = arguments.into_iter().skip(1).peekable();\n\n\n\n let mut option_occurrences = vec![];\n\n loop {\n\n if parse_short_options(option_specs, &mut arguments, &mut option_occurrences)? {\n\n continue;\n\n }\n\n if let Some(occurrence) = parse_long_option(option_specs, mode, &mut arguments)? {\n\n option_occurrences.push(occurrence);\n\n continue;\n\n }\n\n break;\n\n }\n\n\n\n arguments.next_if(|argument| argument.value == \"--\");\n", "file_path": "yash-builtin/src/common/arg.rs", "rank": 26, "score": 216306.21575951736 }, { "content": "/// Polls the given future, assuming it returns `Ready`.\n\nfn unwrap_ready<F: Future>(f: F) -> <F as Future>::Output {\n\n use futures_util::future::FutureExt;\n\n f.now_or_never()\n\n .expect(\"Expected Ready but received Pending\")\n\n}\n\n\n", "file_path": "yash-syntax/src/parser/from_str.rs", "rank": 27, "score": 216160.60604465916 }, { "content": "#[derive(Clone, Debug)]\n\nstruct SignalState {\n\n /// User signal state that is effective in the current environment.\n\n current_user_state: UserSignalState,\n\n\n\n /// User signal state that was effective in the parent environment.\n\n parent_user_state: Option<UserSignalState>,\n\n\n\n /// Whether the internal handler has been installed in the current environment.\n\n internal_handler_enabled: bool,\n\n}\n\n\n\n/// Iterator of trap actions configured in a [trap set](TrapSet).\n\n///\n\n/// [`TrapSet::iter`] returns this type of iterator.\n\n#[must_use]\n\npub struct Iter<'a> {\n\n inner: std::collections::btree_map::Iter<'a, Signal, SignalState>,\n\n}\n\n\n\nimpl<'a> Iterator for Iter<'a> {\n", "file_path": "yash-env/src/trap.rs", "rank": 29, "score": 210072.3751342734 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct AsyncTime {\n\n timeouts: BinaryHeap<Reverse<Timeout>>,\n\n}\n\n\n", "file_path": "yash-env/src/system.rs", "rank": 30, "score": 209527.5385525548 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct AsyncSignal {\n\n awaiters: Vec<Weak<RefCell<SignalStatus>>>,\n\n}\n\n\n", "file_path": "yash-env/src/system.rs", "rank": 31, "score": 209527.5385525548 }, { "content": "#[derive(Clone, Debug, Default)]\n\nstruct AsyncIo {\n\n readers: Vec<FdAwaiter>,\n\n writers: Vec<FdAwaiter>,\n\n}\n\n\n", "file_path": "yash-env/src/system.rs", "rank": 32, "score": 209527.5385525548 }, { "content": "/// Quotes the argument.\n\n///\n\n/// If the argument needs no quoting, the return value is `Borrowed(s)`.\n\n/// Otherwise, it is `Owned(new_quoted_string)`.\n\n///\n\n/// See the [module doc](self) for more details.\n\npub fn quote(s: &str) -> Cow<'_, str> {\n\n if !s.is_empty() && !str_needs_quoting(s) {\n\n return Borrowed(s);\n\n }\n\n\n\n if s.find('\\'').is_none() {\n\n return Owned(format!(\"'{}'\", s));\n\n }\n\n\n\n let mut result = String::with_capacity(s.len().saturating_add(8));\n\n result.push('\"');\n\n for c in s.chars() {\n\n if matches!(c, '\"' | '`' | '$' | '\\\\') {\n\n result.push('\\\\');\n\n }\n\n result.push(c);\n\n }\n\n result.push('\"');\n\n Owned(result)\n\n}\n\n\n", "file_path": "yash-quote/src/lib.rs", "rank": 33, "score": 205569.29100596334 }, { "content": "/// Finds an option spec that matches the given long option name.\n\n///\n\n/// Returns `Err(all_matched_options)` if there is no match or more than one match.\n\nfn long_match<'a>(\n\n option_specs: &'a [OptionSpec<'a>],\n\n name: &str,\n\n) -> Result<&'a OptionSpec<'a>, Vec<&'a OptionSpec<'a>>> {\n\n let mut matches = Vec::new();\n\n for spec in option_specs {\n\n match spec.long_match(name) {\n\n LongMatch::None => (),\n\n LongMatch::Partial => {\n\n matches.push(spec);\n\n }\n\n LongMatch::Exact => return Ok(spec),\n\n }\n\n }\n\n if matches.len() == 1 {\n\n Ok(matches[0])\n\n } else {\n\n Err(matches)\n\n }\n\n}\n\n\n", "file_path": "yash-builtin/src/common/arg.rs", "rank": 34, "score": 202396.394914187 }, { "content": "/// Converts fields to C strings.\n\nfn to_c_strings(s: Vec<Field>) -> Vec<CString> {\n\n // TODO return something rather than dropping null-containing strings\n\n s.into_iter()\n\n .filter_map(|f| CString::new(f.value).ok())\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::tests::echo_builtin;\n\n use crate::tests::in_virtual_system;\n\n use crate::tests::local_builtin;\n\n use crate::tests::return_builtin;\n\n use assert_matches::assert_matches;\n\n use futures_executor::block_on;\n\n use std::cell::RefCell;\n\n use std::path::PathBuf;\n\n use std::rc::Rc;\n\n use yash_env::system::r#virtual::INode;\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 35, "score": 198057.67173731705 }, { "content": "/// Parses short options in an argument.\n\n///\n\n/// This function examines the first field yielded by `arguments` and consumes\n\n/// it if it contains one or more short options. If the last option requires an\n\n/// argument and the field does not include one, the following field is consumed\n\n/// as the argument.\n\n///\n\n/// This function returns `Ok(true)` if consumed one or more fields.\n\nfn parse_short_options<'a, I: Iterator<Item = Field>>(\n\n option_specs: &'a [OptionSpec<'a>],\n\n arguments: &mut Peekable<I>,\n\n option_occurrences: &mut Vec<OptionOccurrence<'a>>,\n\n) -> Result<bool, Error<'a>> {\n\n fn starts_with_single_hyphen(field: &Field) -> bool {\n\n let mut chars = field.value.chars();\n\n chars.next() == Some('-') && !matches!(chars.next(), None | Some('-'))\n\n }\n\n\n\n let field = match arguments.next_if(starts_with_single_hyphen) {\n\n None => return Ok(false),\n\n Some(field) => field,\n\n };\n\n\n\n let mut chars = field.value.chars();\n\n chars.next(); // Skip the initial hyphen\n\n\n\n while let Some(c) = chars.next() {\n\n let spec = match option_specs.iter().find(|spec| spec.get_short() == Some(c)) {\n", "file_path": "yash-builtin/src/common/arg.rs", "rank": 36, "score": 193481.89567153374 }, { "content": "/// Parses a long option.\n\n///\n\n/// This function examines the first field yielded by `arguments` and consumes\n\n/// it if it is a long option. If the option requires an argument and the field\n\n/// does not include a delimiting `=` sign, the following field is consumed as\n\n/// the argument.\n\nfn parse_long_option<'a, I: Iterator<Item = Field>>(\n\n option_specs: &'a [OptionSpec<'a>],\n\n mode: Mode,\n\n arguments: &mut Peekable<I>,\n\n) -> Result<Option<OptionOccurrence<'a>>, Error<'a>> {\n\n fn starts_with_double_hyphen(field: &Field) -> bool {\n\n match field.value.strip_prefix(\"--\") {\n\n Some(body) => !body.is_empty(),\n\n None => false,\n\n }\n\n }\n\n\n\n let field = match arguments.next_if(starts_with_double_hyphen) {\n\n Some(field) => field,\n\n None => return Ok(None),\n\n };\n\n\n\n let equal = field.value.find('=');\n\n\n\n let name = match equal {\n", "file_path": "yash-builtin/src/common/arg.rs", "rank": 37, "score": 193477.20961664472 }, { "content": "/// Implementation of the return built-in.\n\n///\n\n/// This function calls [`builtin_main_sync`] and wraps the result in a\n\n/// `Future`.\n\npub fn builtin_main(\n\n env: &mut yash_env::Env,\n\n args: Vec<Field>,\n\n) -> Pin<Box<dyn Future<Output = Result>>> {\n\n Box::pin(ready(builtin_main_sync(env, args)))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use yash_env::semantics::ExitStatus;\n\n\n\n #[derive(Default)]\n\n struct DummyEnv;\n\n\n\n impl Env for DummyEnv {}\n\n\n\n #[test]\n\n fn returns_exit_status_specified_without_n_option() {\n\n let mut env = DummyEnv::default();\n", "file_path": "yash-builtin/src/return.rs", "rank": 38, "score": 193407.31190026458 }, { "content": "/// Implementation of the readonly built-in.\n\n///\n\n/// This function calls [`builtin_main_sync`] and wraps the result in a `Future`.\n\npub fn builtin_main(\n\n env: &mut yash_env::Env,\n\n args: Vec<Field>,\n\n) -> Pin<Box<dyn Future<Output = Result>>> {\n\n Box::pin(ready(builtin_main_sync(env, args)))\n\n}\n\n\n\n#[allow(clippy::bool_assert_comparison)]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use yash_env::Env;\n\n\n\n #[test]\n\n fn builtin_defines_read_only_variable() {\n\n let mut env = Env::new_virtual();\n\n let arg0 = Field::dummy(\"\");\n\n let arg1 = Field::dummy(\"foo=bar baz\");\n\n let location = arg1.origin.clone();\n\n let args = vec![arg0, arg1];\n", "file_path": "yash-builtin/src/readonly.rs", "rank": 39, "score": 193407.31190026455 }, { "content": "/// Implementation of the alias built-in.\n\n///\n\n/// This function calls [`builtin_main_sync`] and wraps the result in a `Future`.\n\npub fn builtin_main(\n\n env: &mut yash_env::Env,\n\n args: Vec<Field>,\n\n) -> Pin<Box<dyn Future<Output = Result>>> {\n\n Box::pin(ready(builtin_main_sync(env, args)))\n\n}\n\n\n\n#[allow(clippy::bool_assert_comparison)]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use yash_syntax::source::Location;\n\n use yash_syntax::source::Source;\n\n\n\n #[derive(Default)]\n\n struct DummyEnv {\n\n aliases: AliasSet,\n\n }\n\n\n\n impl Env for DummyEnv {\n", "file_path": "yash-builtin/src/alias.rs", "rank": 40, "score": 193407.31190026458 }, { "content": "#[async_trait(?Send)]\n\npub trait Print {\n\n /// Prints a string to the standard output.\n\n ///\n\n /// If an error occurs while printing, an error message is printed to the\n\n /// standard error and a non-zero exit status is returned.\n\n async fn print(&mut self, text: &str) -> ExitStatus;\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl<T: Stdout + Stderr> Print for T {\n\n async fn print(&mut self, text: &str) -> ExitStatus {\n\n match self.try_print(text).await {\n\n Ok(()) => ExitStatus::SUCCESS,\n\n Err(errno) => {\n\n self.print_system_error(errno, format_args!(\"cannot print to the standard output\"))\n\n .await;\n\n ExitStatus::FAILURE\n\n }\n\n }\n\n }\n", "file_path": "yash-builtin/src/common.rs", "rank": 42, "score": 185857.75396372745 }, { "content": "#[async_trait(?Send)]\n\npub trait Stdout {\n\n /// Prints a string to the standard output.\n\n async fn try_print(&mut self, text: &str) -> Result<(), Errno>;\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Stdout for yash_env::Env {\n\n async fn try_print(&mut self, text: &str) -> Result<(), Errno> {\n\n self.system\n\n .write_all(Fd::STDOUT, text.as_bytes())\n\n .await\n\n .map(drop)\n\n }\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Stdout for String {\n\n async fn try_print(&mut self, text: &str) -> Result<(), Errno> {\n\n self.push_str(text);\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Part of the execution environment that allows printing to the standard\n\n/// error.\n", "file_path": "yash-builtin/src/common.rs", "rank": 43, "score": 185857.75396372745 }, { "content": "#[async_trait(?Send)]\n\npub trait Stderr {\n\n /// Convenience function that prints the given error message.\n\n ///\n\n /// This function prints the `message` to the standard error of this\n\n /// environment. (The exact format of the printed message is subject to\n\n /// change.)\n\n ///\n\n /// Any errors that may happen writing to the standard error are ignored.\n\n async fn print_error(&mut self, message: std::fmt::Arguments<'_>);\n\n\n\n /// Convenience function that prints an error message for the given `errno`.\n\n ///\n\n /// This function prints `format!(\"{}: {}\\n\", message, errno.desc())` to the\n\n /// standard error of this environment. (The exact format of the printed\n\n /// message is subject to change.)\n\n ///\n\n /// Any errors that may happen writing to the standard error are ignored.\n\n async fn print_system_error(&mut self, errno: Errno, message: std::fmt::Arguments<'_>) {\n\n self.print_error(format_args!(\"{}: {}\", message, errno.desc()))\n\n .await\n", "file_path": "yash-builtin/src/common.rs", "rank": 44, "score": 185857.75396372745 }, { "content": "#[async_trait(?Send)]\n\npub trait ExpandToField {\n\n /// Performs the initial expansion on `self`, producing a single field.\n\n ///\n\n /// This is usually used in contexts where field splitting will not be\n\n /// performed on the result.\n\n async fn expand_to_field<E: Env>(&self, env: &mut E) -> Result<AttrField>;\n\n\n\n /// Performs the initial expansion on `self`, producing any number of\n\n /// fields.\n\n ///\n\n /// This is usually used in contexts where field splitting will be performed\n\n /// on the result.\n\n ///\n\n /// This function inserts the results into `fields`.\n\n async fn expand_to_fields<E: Env, F: Extend<AttrField>>(\n\n &self,\n\n env: &mut E,\n\n fields: &mut F,\n\n ) -> Result;\n\n}\n", "file_path": "yash-semantics/src/expansion.rs", "rank": 45, "score": 182844.00590916318 }, { "content": "/// Interface to accumulate results of the initial expansion.\n\n///\n\n/// `Expansion` is implemented by types that can accumulate [`AttrChar`]s or\n\n/// vectors of them. You construct an [`Output`] using an `Expansion`\n\n/// implementor and then use it to carry out the initial expansion.\n\npub trait Expansion: std::fmt::Debug {\n\n /// Appends a character to the current field.\n\n fn push_char(&mut self, c: AttrChar);\n\n\n\n /// Appends characters to the current field.\n\n ///\n\n /// The appended characters share the same `origin`, `is_quoted`, and\n\n /// `is_quoting` attributes.\n\n fn push_str(&mut self, s: &str, origin: Origin, is_quoted: bool, is_quoting: bool) {\n\n for c in s.chars() {\n\n self.push_char(AttrChar {\n\n value: c,\n\n origin,\n\n is_quoted,\n\n is_quoting,\n\n });\n\n }\n\n }\n\n}\n\n// TODO impl Expansion::push_fields\n", "file_path": "yash-semantics/src/expansion.rs", "rank": 46, "score": 179496.49023777884 }, { "content": "fn is_regular_file(path: &CStr) -> bool {\n\n match stat(path) {\n\n Ok(stat) => stat.st_mode & S_IFMT == S_IFREG,\n\n Err(_) => false,\n\n }\n\n}\n\n\n\nstatic CAUGHT_SIGNALS: [AtomicIsize; 8] = {\n\n // In the array creation, the repeat operand must be const.\n\n #[allow(clippy::declare_interior_mutable_const)]\n\n const SIGNAL_SLOT: AtomicIsize = AtomicIsize::new(0);\n\n [SIGNAL_SLOT; 8]\n\n};\n\n\n\n/// Signal catching function.\n\n///\n\n/// TODO Elaborate\n\nextern \"C\" fn catch_signal(signal: c_int) {\n\n // This function can only perform async-signal-safe operations.\n\n // Performing unsafe operations is undefined behavior!\n", "file_path": "yash-env/src/system/real.rs", "rank": 47, "score": 179065.14654878073 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct VariableInContext {\n\n variable: Variable,\n\n context_index: usize,\n\n}\n\n\n\n/// Type of a context.\n\n///\n\n/// The context type affects the behavior of variable\n\n/// [assignment](VariableSet::assign).\n\n#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\npub enum ContextType {\n\n /// Context for normal assignments.\n\n ///\n\n /// The base context is a regular context. The context for a function's\n\n /// local assignment is also regular.\n\n Regular,\n\n\n\n /// Context for temporary assignments.\n\n ///\n\n /// A volatile context is used for holding temporary variables when\n\n /// executing a built-in or function.\n\n Volatile,\n\n}\n\n\n\n/// Variable context.\n", "file_path": "yash-env/src/variable.rs", "rank": 48, "score": 175391.0748817955 }, { "content": "/// Like [`read_eval_loop`], but returns the future in a pinned box.\n\npub fn read_eval_loop_boxed<'a>(\n\n env: &'a mut Env,\n\n lexer: &'a mut Lexer<'_>,\n\n) -> Pin<Box<dyn Future<Output = Result> + 'a>> {\n\n Box::pin(read_eval_loop(env, lexer))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::tests::{echo_builtin, return_builtin};\n\n use futures_executor::block_on;\n\n use std::ops::ControlFlow::Break;\n\n use std::rc::Rc;\n\n use yash_env::semantics::Divert;\n\n use yash_env::system::r#virtual::VirtualSystem;\n\n use yash_env::trap::Signal;\n\n use yash_env::trap::Trap;\n\n use yash_syntax::source::Location;\n\n use yash_syntax::source::Source;\n", "file_path": "yash-semantics/src/runner.rs", "rank": 49, "score": 170228.82896220236 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nstruct Context {\n\n /// Context type.\n\n r#type: ContextType,\n\n\n\n /// Positional parameters.\n\n ///\n\n /// This variable is very special:\n\n ///\n\n /// - Its value is always an `Array`.\n\n /// - It is never exported nor read-only.\n\n positional_params: Variable,\n\n}\n\n\n\nimpl Context {\n\n fn new(r#type: ContextType) -> Self {\n\n Context {\n\n r#type,\n\n positional_params: Variable {\n\n value: Array(Vec::default()),\n\n last_assigned_location: None,\n", "file_path": "yash-env/src/variable.rs", "rank": 50, "score": 167982.0205274994 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Timeout {\n\n target: Instant,\n\n waker: Weak<RefCell<Option<Waker>>>,\n\n}\n\n\n\nimpl PartialEq for Timeout {\n\n fn eq(&self, rhs: &Self) -> bool {\n\n self.target == rhs.target\n\n }\n\n}\n\n\n\nimpl Eq for Timeout {}\n\n\n\nimpl PartialOrd for Timeout {\n\n fn partial_cmp(&self, rhs: &Self) -> Option<Ordering> {\n\n Some(self.cmp(rhs))\n\n }\n\n}\n\n\n\nimpl Ord for Timeout {\n", "file_path": "yash-env/src/system.rs", "rank": 51, "score": 167288.0274637149 }, { "content": "#[derive(Clone, Debug)]\n\nstruct FdAwaiter {\n\n fd: Fd,\n\n waker: Weak<RefCell<Option<Waker>>>,\n\n}\n\n\n\nimpl Drop for FdAwaiter {\n\n /// Wakes the waker when `FdAwaiter` is dropped.\n\n fn drop(&mut self) {\n\n if let Some(waker) = self.waker.upgrade() {\n\n if let Some(waker) = waker.borrow_mut().take() {\n\n waker.wake();\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl AsyncIo {\n\n /// Returns a new empty `AsyncIo`.\n\n pub fn new() -> Self {\n\n Self::default()\n", "file_path": "yash-env/src/system.rs", "rank": 52, "score": 163779.32690798063 }, { "content": "#[derive(Clone, Copy, Default)]\n\nstruct PipeSet {\n\n read_previous: Option<Fd>,\n\n /// Reader and writer to the next command.\n\n next: Option<(Fd, Fd)>,\n\n}\n\n\n\nimpl PipeSet {\n\n fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n /// Updates the pipe set for the next command.\n\n ///\n\n /// Closes FDs that are no longer necessary and opens a new pipe if there is\n\n /// a next command.\n\n fn shift(&mut self, env: &mut Env, has_next: bool) -> std::result::Result<(), Errno> {\n\n if let Some(fd) = self.read_previous {\n\n let _ = env.system.close(fd);\n\n }\n\n\n", "file_path": "yash-semantics/src/command_impl/pipeline.rs", "rank": 53, "score": 158553.81380491215 }, { "content": "#[derive(Debug)]\n\nstruct RealChildProcess;\n\n\n\n#[async_trait(?Send)]\n\nimpl ChildProcess for RealChildProcess {\n\n async fn run(\n\n &mut self,\n\n env: &mut Env,\n\n mut task: Box<dyn for<'a> FnMut(&'a mut Env) -> Pin<Box<dyn Future<Output = ()> + 'a>>>,\n\n ) -> Pid {\n\n task(env).await;\n\n std::process::exit(env.exit_status.0)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // This test depends on static variables.\n\n #[test]\n", "file_path": "yash-env/src/system/real.rs", "rank": 54, "score": 157270.71194539932 }, { "content": "#[derive(Debug)]\n\nstruct DummyChildProcess {\n\n /// State of the system.\n\n state: Rc<RefCell<SystemState>>,\n\n /// Executor to run the child process's task.\n\n executor: Rc<dyn Executor>,\n\n /// Process ID of this child process.\n\n process_id: Pid,\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl ChildProcess for DummyChildProcess {\n\n async fn run(&mut self, env: &mut Env, mut task: super::ChildProcessTask) -> Pid {\n\n let state = Rc::clone(&self.state);\n\n let process_id = self.process_id;\n\n let system = VirtualSystem { state, process_id };\n\n let mut child_env = env.clone_with_system(Box::new(system));\n\n\n\n let state = Rc::clone(&self.state);\n\n {\n\n let mut state = state.borrow_mut();\n", "file_path": "yash-env/src/system/virtual.rs", "rank": 55, "score": 157270.71194539932 }, { "content": "#[derive(Debug)]\n\nstruct DummyChildProcess {\n\n child_process_id: Pid,\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl ChildProcess for DummyChildProcess {\n\n async fn run(&mut self, _env: &mut Env, _task: super::ChildProcessTask) -> Pid {\n\n self.child_process_id\n\n }\n\n}\n\n\n\n/// Implementor of [`ChildProcess`] that is returned from\n\n/// [`RealSystem::new_child_process`] in the child process.\n", "file_path": "yash-env/src/system/real.rs", "rank": 56, "score": 157270.71194539932 }, { "content": "/// Tests if a character can be part of a variable name.\n\n///\n\n/// The current implementation is the same as [`is_portable_name_char`].\n\n/// Other (POSIXly non-portable) characters may be supported in the future.\n\npub fn is_name_char(c: char) -> bool {\n\n // TODO support other Unicode name characters\n\n is_portable_name_char(c)\n\n}\n\n\n\nimpl WordLexer<'_, '_> {\n\n /// Tests if there is a length prefix (`#`).\n\n ///\n\n /// This function may consume many characters, possibly beyond the length\n\n /// prefix, regardless of the result. The caller should rewind to the index\n\n /// this function returns.\n\n async fn has_length_prefix(&mut self) -> Result<bool> {\n\n if !self.skip_if(|c| c == '#').await? {\n\n return Ok(false);\n\n }\n\n\n\n // Remember that a parameter expansion cannot have both a prefix and\n\n // suffix modifier. For example, `${#-?}` is not considered to have a\n\n // prefix. We need to look ahead to see if it is okay to treat the `#`\n\n // as a prefix.\n", "file_path": "yash-syntax/src/parser/lex/braced_param.rs", "rank": 57, "score": 153290.5579872194 }, { "content": "/// Tests if a character names a special parameter.\n\n///\n\n/// A special parameter is one of: `@*#?-$!0`.\n\npub fn is_special_parameter_char(c: char) -> bool {\n\n matches!(c, '@' | '*' | '#' | '?' | '-' | '$' | '!' | '0')\n\n}\n\n\n", "file_path": "yash-syntax/src/parser/lex/raw_param.rs", "rank": 58, "score": 150750.8061390049 }, { "content": "/// Tests if a character is a valid single-character raw parameter name.\n\n///\n\n/// If this function returns true, the character is a valid parameter name for a\n\n/// raw parameter expansion, but the next character is never treated as part of\n\n/// the name.\n\n///\n\n/// This function returns true for ASCII digits and special parameter names.\n\npub fn is_single_char_name(c: char) -> bool {\n\n c.is_ascii_digit() || is_special_parameter_char(c)\n\n}\n\n\n\nimpl Lexer<'_> {\n\n /// Parses a parameter expansion that is not enclosed in braces.\n\n ///\n\n /// The initial `$` must have been consumed before calling this function.\n\n /// This functions checks if the next character is a valid POSIXly-portable\n\n /// parameter name. If so, the name is consumed and returned. Otherwise, no\n\n /// characters are consumed and the return value is `Ok(Err(location))`.\n\n ///\n\n /// The `location` parameter should be the location of the initial `$`. It\n\n /// is used to construct the result, but this function does not check if it\n\n /// actually is a location of `$`.\n\n pub async fn raw_param(\n\n &mut self,\n\n location: Location,\n\n ) -> Result<std::result::Result<TextUnit, Location>> {\n\n if let Some(c) = self.consume_char_if(is_single_char_name).await? {\n", "file_path": "yash-syntax/src/parser/lex/raw_param.rs", "rank": 59, "score": 150733.84236516006 }, { "content": "/// Tests if a character can be part of a POSIXly-portable name.\n\n///\n\n/// Returns true if the character is an ASCII alphanumeric or underscore.\n\n///\n\n/// Note that a valid name cannot start with a digit.\n\npub fn is_portable_name_char(c: char) -> bool {\n\n matches!(c, '0'..='9' | 'A'..='Z' | '_' | 'a'..='z')\n\n}\n\n\n", "file_path": "yash-syntax/src/parser/lex/raw_param.rs", "rank": 60, "score": 150728.38441444287 }, { "content": "/// Removing quotes from syntax without performing expansion.\n\n///\n\n/// This trail will be useful only in a limited number of use cases. In the\n\n/// normal word expansion process, quote removal is done after other kinds of\n\n/// expansions like parameter expansion, so this trait is not used.\n\npub trait Unquote {\n\n /// Converts `self` to a string with all quotes removed and writes to `w`.\n\n fn write_unquoted<W: fmt::Write>(&self, w: &mut W) -> UnquoteResult;\n\n\n\n /// Converts `self` to a string with all quotes removed.\n\n ///\n\n /// Returns a tuple of a string and a bool. The string is an unquoted version\n\n /// of `self`. The bool tells whether there is any quotes contained in\n\n /// `self`.\n\n fn unquote(&self) -> (String, bool) {\n\n let mut unquoted = String::new();\n\n let is_quoted = self\n\n .write_unquoted(&mut unquoted)\n\n .expect(\"`write_unquoted` should not fail\");\n\n (unquoted, is_quoted)\n\n }\n\n}\n\n\n", "file_path": "yash-syntax/src/syntax.rs", "rank": 61, "score": 140510.46389012682 }, { "content": "#[async_trait(?Send)]\n\npub trait Expand {\n\n /// Performs the initial expansion.\n\n ///\n\n /// The results should be pushed to the output.\n\n async fn expand<E: Env>(&self, env: &mut E, output: &mut Output<'_>) -> Result;\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl<T: Expand> Expand for [T] {\n\n /// Expands a slice.\n\n ///\n\n /// This function expands each item of the slice in sequence.\n\n async fn expand<E: Env>(&self, env: &mut E, output: &mut Output<'_>) -> Result {\n\n for item in self {\n\n item.expand(env, output).await?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n/// Syntactic construct that can be expanded to an [`AttrField`].\n\n///\n\n/// Implementors of this trait expand themselves directly to an `AttrField` or\n\n/// a vector of `AttrField`s. See also [`Expand`].\n", "file_path": "yash-semantics/src/expansion.rs", "rank": 62, "score": 140505.82245283684 }, { "content": "#[async_trait(?Send)]\n\npub trait Input {\n\n /// Reads a next line of the source code.\n\n ///\n\n /// The input function is line-oriented; that is, this function returns a [`Line`] that is\n\n /// terminated by a newline unless the end of input (EOF) is reached, in which case the\n\n /// remaining characters up to the EOF must be returned without a trailing newline. If there\n\n /// are no more characters at all, the returned line is empty.\n\n ///\n\n /// Errors returned from this function are considered unrecoverable. Once an error is returned,\n\n /// this function should not be called any more.\n\n ///\n\n /// Because the current Rust compiler does not support `async` functions in a trait, this\n\n /// function is explicitly declared to return a `Future` in a pinned box.\n\n async fn next_line(&mut self, context: &Context) -> Result;\n\n}\n\n\n\n/// Input function that reads from a string in memory.\n\npub struct Memory<'a> {\n\n lines: Lines<'a>,\n\n}\n", "file_path": "yash-syntax/src/input.rs", "rank": 63, "score": 140505.82245283684 }, { "content": "/// Possibly literal syntax element.\n\n///\n\n/// A syntax element is _literal_ if it is not quoted and does not contain any\n\n/// expansions. Such an element can be expanded to a string independently of the\n\n/// shell execution environment.\n\n///\n\n/// ```\n\n/// # use yash_syntax::syntax::MaybeLiteral;\n\n/// # use yash_syntax::syntax::Text;\n\n/// # use yash_syntax::syntax::TextUnit::Literal;\n\n/// let text = Text(vec![Literal('f'), Literal('o'), Literal('o')]);\n\n/// let expanded = text.to_string_if_literal().unwrap();\n\n/// assert_eq!(expanded, \"foo\");\n\n/// ```\n\n///\n\n/// ```\n\n/// # use yash_syntax::syntax::MaybeLiteral;\n\n/// # use yash_syntax::syntax::Text;\n\n/// # use yash_syntax::syntax::TextUnit::Backslashed;\n\n/// let backslashed = Text(vec![Backslashed('a')]);\n\n/// assert_eq!(backslashed.to_string_if_literal(), None);\n\n/// ```\n\npub trait MaybeLiteral {\n\n /// Checks if `self` is literal and, if so, converts to a string and appends\n\n /// it to `result`.\n\n ///\n\n /// If `self` is literal, `self` converted to a string is appended to\n\n /// `result` and `Ok(result)` is returned. Otherwise, `result` is not\n\n /// modified and `Err(result)` is returned.\n\n fn extend_if_literal<T: Extend<char>>(&self, result: T) -> Result<T, T>;\n\n\n\n /// Checks if `self` is literal and, if so, converts to a string.\n\n fn to_string_if_literal(&self) -> Option<String> {\n\n self.extend_if_literal(String::new()).ok()\n\n }\n\n}\n\n\n\nimpl<T: Unquote> Unquote for [T] {\n\n fn write_unquoted<W: fmt::Write>(&self, w: &mut W) -> UnquoteResult {\n\n self.iter()\n\n .try_fold(false, |quoted, item| Ok(quoted | item.write_unquoted(w)?))\n\n }\n", "file_path": "yash-syntax/src/syntax.rs", "rank": 64, "score": 138257.23455340706 }, { "content": "/// Returns true iff any character needs quoting.\n\nfn str_needs_quoting(s: &str) -> bool {\n\n if s.chars().any(char_needs_quoting) {\n\n return true;\n\n }\n\n\n\n // `#` or `~` occurring at the beginning of the string\n\n if let Some(c) = s.chars().next() {\n\n if c == '#' || c == '~' {\n\n return true;\n\n }\n\n }\n\n\n\n // `{` preceding `}`\n\n if let Some(i) = s.find('{') {\n\n let sub = &s[i + 1..];\n\n if sub.find('}').is_some() {\n\n return true;\n\n }\n\n }\n\n\n", "file_path": "yash-quote/src/lib.rs", "rank": 65, "score": 135129.49292665042 }, { "content": "/// Parses a tilde expansion except the initial tilde.\n\n///\n\n/// Returns the literal string up to the next non-applicable word unit.\n\nfn parse_name<I: Iterator<Item = WordUnit>>(i: &mut Peekable<I>) -> String {\n\n let mut name = String::new();\n\n\n\n while let Some(Unquoted(Literal(c))) =\n\n i.next_if(|unit| matches!(unit, Unquoted(Literal(c)) if !matches!(*c, '/' | ':')))\n\n {\n\n name.push(c)\n\n }\n\n\n\n name\n\n}\n\n\n\nimpl Word {\n\n fn parse_tilde(&mut self, everywhere: bool) {\n\n let mut i = self.units.drain(..).peekable();\n\n let mut is_after_colon = true;\n\n let mut units = vec![];\n\n\n\n loop {\n\n is_after_colon = match i.next() {\n", "file_path": "yash-syntax/src/parser/lex/tilde.rs", "rank": 66, "score": 134891.70529446084 }, { "content": "/// Quote removal.\n\n///\n\n/// The quote removal is a step of the word expansion that removes quotes from\n\n/// the field. The [`do_quote_removal`](Self::do_quote_removal) function\n\n/// converts an [`AttrChar`] string to a regular string.\n\npub trait QuoteRemoval {\n\n /// Return type of [`do_quote_removal`](Self::do_quote_removal).\n\n type Output;\n\n\n\n /// Performs the quote removal on `self`.\n\n ///\n\n /// TODO Add a parameter to specify how characters in the result should be\n\n /// escaped.\n\n fn do_quote_removal(self) -> Self::Output;\n\n}\n\n\n\nimpl QuoteRemoval for &[AttrChar] {\n\n type Output = String;\n\n fn do_quote_removal(self) -> String {\n\n // TODO Remove quotes correctly\n\n self.iter()\n\n .filter(|c| !c.is_quoting)\n\n .map(|c| c.value)\n\n .collect()\n\n }\n", "file_path": "yash-semantics/src/expansion/quote_removal.rs", "rank": 67, "score": 134045.50777773221 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\nenum LongMatch {\n\n None,\n\n Partial,\n\n Exact,\n\n}\n\n\n\nimpl OptionSpec<'_> {\n\n fn long_match(&self, name: &str) -> LongMatch {\n\n if let Some(long) = self.long {\n\n if long.starts_with(name) {\n\n return if long.len() == name.len() {\n\n LongMatch::Exact\n\n } else {\n\n LongMatch::Partial\n\n };\n\n }\n\n }\n\n LongMatch::None\n\n }\n\n}\n", "file_path": "yash-builtin/src/common/arg.rs", "rank": 68, "score": 133269.26808463124 }, { "content": "pub fn bin_main() -> i32 {\n\n use env::Env;\n\n use env::RealSystem;\n\n use futures_util::task::LocalSpawnExt;\n\n use std::cell::Cell;\n\n use std::rc::Rc;\n\n use std::task::Poll;\n\n\n\n // SAFETY: This is the only instance of RealSystem we create in the whole\n\n // process.\n\n let system = unsafe { RealSystem::new() };\n\n let env = Env::with_system(Box::new(system));\n\n let system = env.system.clone();\n\n let mut pool = futures_executor::LocalPool::new();\n\n let task = parse_and_print(env);\n\n let result = Rc::new(Cell::new(Poll::Pending));\n\n let result_2 = Rc::clone(&result);\n\n pool.spawner()\n\n .spawn_local(async move {\n\n let result = task.await;\n", "file_path": "yash/src/lib.rs", "rank": 69, "score": 131943.29330922724 }, { "content": "#[derive(Clone, Debug, Eq, PartialEq)]\n\nenum UserSignalState {\n\n /// The user has not yet set a trap for the signal, and the disposition the\n\n /// shell has inherited from the pre-exec process is `SIG_DFL`.\n\n InitiallyDefaulted,\n\n /// The user has not yet set a trap for the signal, and the disposition the\n\n /// shell has inherited from the pre-exec process is `SIG_IGN`.\n\n InitiallyIgnored,\n\n /// User-defined trap.\n\n Trap(TrapState),\n\n}\n\n\n\nimpl UserSignalState {\n\n fn as_trap(&self) -> Option<&TrapState> {\n\n if let UserSignalState::Trap(trap) = self {\n\n Some(trap)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "yash-env/src/trap.rs", "rank": 70, "score": 131912.483616896 }, { "content": "/// Result of [`Unquote::write_unquoted`].\n\n///\n\n/// If there is some quotes to be removed, the result will be `Ok(true)`. If no\n\n/// quotes, `Ok(false)`. On error, `Err(Error)`.\n\ntype UnquoteResult = Result<bool, fmt::Error>;\n\n\n", "file_path": "yash-syntax/src/syntax.rs", "rank": 71, "score": 130156.21372507158 }, { "content": "/// Partial abstract syntax tree (AST) that can be filled with missing parts to create the whole,\n\n/// final AST.\n\npub trait Fill<T = HereDoc> {\n\n /// Final AST created by filling `self`.\n\n type Full;\n\n\n\n /// Takes some items from the iterator and fills the missing parts of `self` to create\n\n /// the complete AST.\n\n ///\n\n /// # Panics\n\n ///\n\n /// May panic if a value has to be filled but the iterator returns `None`.\n\n fn fill(self, i: &mut dyn Iterator<Item = T>) -> Result<Self::Full>;\n\n}\n\n\n\nimpl<T> Fill for Option<T>\n\nwhere\n\n T: Fill,\n\n{\n\n type Full = Option<<T as Fill>::Full>;\n\n fn fill(self, i: &mut dyn Iterator<Item = HereDoc>) -> Result<Self::Full> {\n\n self.map(|v| v.fill(i)).transpose()\n", "file_path": "yash-syntax/src/parser/fill.rs", "rank": 72, "score": 123161.67880595813 }, { "content": "#[derive(Clone, Copy, Debug, Eq, PartialEq)]\n\nstruct SavedFd {\n\n /// File descriptor by which the original open file description was\n\n /// previously accessible.\n\n original: Fd,\n\n /// Temporary file descriptor that remembers the original open file\n\n /// description.\n\n save: Option<Fd>,\n\n}\n\n\n\n/// Types of errors that may occur in the redirection.\n\n#[derive(Clone, Debug, Eq, PartialEq)]\n\npub enum ErrorCause {\n\n /// Expansion error.\n\n Expansion(crate::expansion::ErrorCause),\n\n /// Pathname containing a nul byte.\n\n NulByte(NulError),\n\n /// The target file descriptor could not be modified for the redirection.\n\n FdNotOverwritten(Fd, Errno),\n\n /// Error while opening a file.\n\n ///\n", "file_path": "yash-semantics/src/redir.rs", "rank": 73, "score": 120355.9664544496 }, { "content": "/// Helper for implementing FromStr.\n\ntrait Shift {\n\n type Output;\n\n fn shift(self) -> Self::Output;\n\n}\n\n\n\nimpl<T, E> Shift for Result<Option<T>, E> {\n\n type Output = Result<T, Option<E>>;\n\n fn shift(self) -> Result<T, Option<E>> {\n\n match self {\n\n Ok(Some(t)) => Ok(t),\n\n Ok(None) => Err(None),\n\n Err(e) => Err(Some(e)),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Param {\n\n type Err = Option<Error>;\n\n fn from_str(s: &str) -> Result<Param, Option<Error>> {\n\n match TextUnit::from_str(s) {\n", "file_path": "yash-syntax/src/parser/from_str.rs", "rank": 74, "score": 120137.83780280036 }, { "content": "/// Finds the minimum available FD.\n\n///\n\n/// The returned FD is the minimum that is equal to or greater than `min` and\n\n/// not included in `existings`. Items of `existings` must be sorted.\n\nfn min_unused_fd<'a, I: IntoIterator<Item = &'a Fd>>(min: Fd, existings: I) -> Fd {\n\n let candidates = (min.0..).map(Fd);\n\n let rejections = existings\n\n .into_iter()\n\n .skip_while(|fd| **fd < min)\n\n .map(Some)\n\n .chain(std::iter::repeat(None));\n\n candidates\n\n .zip(rejections)\n\n .skip_while(|(candidate, rejection)| Some(candidate) == *rejection)\n\n .map(|(candidate, _rejection)| candidate)\n\n .next()\n\n .unwrap()\n\n}\n\n\n\nimpl Process {\n\n /// Creates a new running process.\n\n pub fn with_parent(ppid: Pid) -> Process {\n\n Process {\n\n ppid,\n", "file_path": "yash-env/src/system/virtual/process.rs", "rank": 75, "score": 117521.74504005135 }, { "content": "/// Returns true if the character is a blank character.\n\npub fn is_blank(c: char) -> bool {\n\n // TODO locale\n\n c != '\\n' && c.is_whitespace()\n\n}\n\n\n\n/// Result of [`LexerCore::peek_char`].\n", "file_path": "yash-syntax/src/parser/lex/core.rs", "rank": 76, "score": 117248.53183027286 }, { "content": "/// Tests whether the given character is the first character of an operator.\n\npub fn is_operator_char(c: char) -> bool {\n\n OPERATORS.edge(c).is_some()\n\n}\n\n\n", "file_path": "yash-syntax/src/parser/lex/op.rs", "rank": 77, "score": 115382.45833436445 }, { "content": "/// Tests whether the given character is a token delimiter.\n\n///\n\n/// A character is a token delimiter if it is either a whitespace or [operator](is_operator_char).\n\npub fn is_token_delimiter_char(c: char) -> bool {\n\n is_operator_char(c) || is_blank(c)\n\n}\n\n\n\nimpl Lexer<'_> {\n\n /// Determines the token ID for the word.\n\n ///\n\n /// This is a helper function used by [`Lexer::token`] and does not support\n\n /// operators.\n\n async fn token_id(&mut self, word: &Word) -> Result<TokenId> {\n\n if word.units.is_empty() {\n\n return Ok(TokenId::EndOfInput);\n\n }\n\n\n\n if let Some(literal) = word.to_string_if_literal() {\n\n if let Ok(keyword) = Keyword::try_from(literal.as_str()) {\n\n return Ok(TokenId::Token(Some(keyword)));\n\n }\n\n\n\n if literal.chars().all(|c| c.is_ascii_digit()) {\n", "file_path": "yash-syntax/src/parser/lex/token.rs", "rank": 78, "score": 113598.7557950345 }, { "content": "#[derive(Default)]\n\nstruct Builder<H> {\n\n assigns: Vec<Assign>,\n\n words: Vec<Word>,\n\n redirs: Vec<Redir<H>>,\n\n}\n\n\n\nimpl<H> Builder<H> {\n\n fn is_empty(&self) -> bool {\n\n self.assigns.is_empty() && self.words.is_empty() && self.redirs.is_empty()\n\n }\n\n}\n\n\n\nimpl<H> From<Builder<H>> for SimpleCommand<H> {\n\n fn from(builder: Builder<H>) -> Self {\n\n SimpleCommand {\n\n assigns: builder.assigns,\n\n words: builder.words,\n\n redirs: builder.redirs.into(),\n\n }\n\n }\n", "file_path": "yash-syntax/src/parser/simple_command.rs", "rank": 79, "score": 111283.96209547913 }, { "content": " let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus::NOT_FOUND);\n\n }\n\n\n\n #[test]\n\n fn simple_command_assigns_variables_in_volatile_context_for_external_command() {\n\n in_virtual_system(|mut env, _pid, _state| async move {\n\n let command: syntax::SimpleCommand = \"a=123 /foo/bar\".parse().unwrap();\n\n command.execute(&mut env).await;\n\n assert_eq!(env.variables.get(\"a\"), None);\n\n });\n\n }\n\n\n\n #[test]\n\n fn simple_command_performs_redirections_and_assignments_for_target_not_found() {\n\n in_virtual_system(|mut env, _pid, state| async move {\n\n // TODO Test with assignment with side-effect: foo=${bar=baz}\n\n let command: syntax::SimpleCommand =\n\n \"foo=bar no_such_utility >/tmp/file\".parse().unwrap();\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 80, "score": 99199.29886490988 }, { "content": " in_virtual_system(|mut env, _pid, _state| async move {\n\n env.builtins.insert(\"return\", return_builtin());\n\n let command: syntax::SimpleCommand = \">/tmp/foo$(return -n 42)\".parse().unwrap();\n\n command.execute(&mut env).await;\n\n assert_eq!(env.exit_status, ExitStatus(42));\n\n });\n\n }\n\n\n\n #[test]\n\n fn simple_command_handles_subshell_error_with_absent_target() {\n\n let system = VirtualSystem::new();\n\n let state = Rc::clone(&system.state);\n\n let mut env = Env::with_system(Box::new(system));\n\n let command: syntax::SimpleCommand = \">/tmp/foo\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Break(Divert::Interrupt(Some(ExitStatus::ERROR))));\n\n\n\n let state = state.borrow();\n\n let stderr = state.file_system.get(\"/dev/stderr\").unwrap().borrow();\n\n assert!(!stderr.content.is_empty());\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 81, "score": 99196.87611563104 }, { "content": " }\n\n\n\n #[test]\n\n fn simple_command_performs_assignment_with_absent_target() {\n\n let mut env = Env::new_virtual();\n\n let command: syntax::SimpleCommand = \"a=b\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus::SUCCESS);\n\n assert_eq!(\n\n env.variables.get(\"a\").unwrap().value,\n\n Value::Scalar(\"b\".to_string())\n\n );\n\n }\n\n\n\n #[test]\n\n fn simple_command_returns_command_substitution_exit_status_from_assignment() {\n\n in_virtual_system(|mut env, _pid, _state| async move {\n\n env.builtins.insert(\"return\", return_builtin());\n\n let command: syntax::SimpleCommand = \"a=$(return -n 12)\".parse().unwrap();\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 82, "score": 99196.85415545355 }, { "content": " #[test]\n\n fn simple_command_calls_execve_with_correct_arguments() {\n\n in_virtual_system(|mut env, _pid, state| async move {\n\n let path = PathBuf::from(\"/some/file\");\n\n let mut content = INode::default();\n\n content.permissions.0 |= 0o100;\n\n content.is_native_executable = true;\n\n let content = Rc::new(RefCell::new(content));\n\n state.borrow_mut().file_system.save(path, content);\n\n\n\n env.variables\n\n .assign(\n\n Scope::Global,\n\n \"env\".to_string(),\n\n Variable {\n\n value: Value::Scalar(\"scalar\".to_string()),\n\n last_assigned_location: None,\n\n is_exported: true,\n\n read_only_location: None,\n\n },\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 83, "score": 99195.77155615916 }, { "content": " use yash_env::variable::Scope;\n\n use yash_env::variable::Variable;\n\n use yash_env::VirtualSystem;\n\n use yash_syntax::source::Location;\n\n\n\n #[test]\n\n fn simple_command_performs_redirection_with_absent_target() {\n\n in_virtual_system(|mut env, _pid, state| async move {\n\n let command: syntax::SimpleCommand = \">/tmp/foo\".parse().unwrap();\n\n let result = command.execute(&mut env).await;\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus::SUCCESS);\n\n let state = state.borrow();\n\n let file = state.file_system.get(\"/tmp/foo\").unwrap().borrow();\n\n assert_eq!(file.content, []);\n\n });\n\n }\n\n\n\n #[test]\n\n fn simple_command_returns_command_substitution_exit_status_from_redirection() {\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 84, "score": 99194.92750600287 }, { "content": "\n\n if path.to_bytes().is_empty() {\n\n print_error(\n\n &mut env,\n\n format!(\"cannot execute external utility {:?}\", name.value).into(),\n\n \"utility not found\".into(),\n\n &name.origin,\n\n )\n\n .await;\n\n env.exit_status = ExitStatus::NOT_FOUND;\n\n return Continue(());\n\n }\n\n\n\n let subshell = env.run_in_subshell(move |env| {\n\n Box::pin(async move {\n\n env.traps.disable_internal_handlers(&mut env.system).ok();\n\n\n\n let envs = env.variables.env_c_strings();\n\n let result = env.system.execve(path.as_c_str(), &args, &envs);\n\n // TODO Prefer into_err to unwrap_err\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 85, "score": 99193.02957223605 }, { "content": " /// exit status is 127 for `ENOENT` and `ENOTDIR` and 126 for others.\n\n ///\n\n /// POSIX leaves many aspects of the simple command execution unspecified.\n\n /// The detail semantics may differ in other shell implementations.\n\n async fn execute(&self, env: &mut Env) -> Result {\n\n let fields = match expand_words(env, &self.words).await {\n\n Ok(fields) => fields,\n\n Err(error) => return error.handle(env).await,\n\n };\n\n\n\n use crate::command_search::Target::{Builtin, External, Function};\n\n if let Some(name) = fields.get(0) {\n\n match search(env, &name.value) {\n\n Some(Builtin(builtin)) => {\n\n execute_builtin(env, builtin, &self.assigns, fields, &self.redirs).await\n\n }\n\n Some(Function(function)) => {\n\n execute_function(env, function, &self.assigns, fields, &self.redirs).await\n\n }\n\n Some(External { path }) => {\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 86, "score": 99191.97813620267 }, { "content": " command.execute(&mut env).await;\n\n assert_eq!(env.exit_status, ExitStatus(12));\n\n })\n\n }\n\n\n\n #[test]\n\n fn simple_command_handles_assignment_error_with_absent_target() {\n\n let system = VirtualSystem::new();\n\n let state = Rc::clone(&system.state);\n\n let mut env = Env::with_system(Box::new(system));\n\n env.variables\n\n .assign(\n\n Scope::Global,\n\n \"a\".to_string(),\n\n Variable {\n\n value: Value::Scalar(\"\".to_string()),\n\n last_assigned_location: None,\n\n is_exported: false,\n\n read_only_location: Some(Location::dummy(\"ROL\")),\n\n },\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 87, "score": 99191.18549779656 }, { "content": " let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Continue(()));\n\n\n\n let state = state.borrow();\n\n let file = state.file_system.get(\"/dev/stdout\").unwrap().borrow();\n\n assert_eq!(file.content, \"bar-baz-\\n\".as_bytes());\n\n }\n\n\n\n #[test]\n\n fn simple_command_creates_temporary_context_executing_function() {\n\n use yash_env::function::HashEntry;\n\n let system = VirtualSystem::new();\n\n let state = Rc::clone(&system.state);\n\n let mut env = Env::with_system(Box::new(system));\n\n env.builtins.insert(\"echo\", echo_builtin());\n\n env.builtins.insert(\"local\", local_builtin());\n\n env.functions.insert(HashEntry(Rc::new(Function {\n\n name: \"foo\".to_string(),\n\n body: Rc::new(\"{ local x=42; echo $x; }\".parse().unwrap()),\n\n origin: Location::dummy(\"dummy\"),\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 88, "score": 99189.95937654488 }, { "content": " is_read_only: false,\n\n })));\n\n let command: syntax::SimpleCommand = \"foo\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus(13));\n\n }\n\n\n\n #[test]\n\n fn simple_command_applies_redirections_to_function() {\n\n use yash_env::function::HashEntry;\n\n let system = VirtualSystem::new();\n\n let state = Rc::clone(&system.state);\n\n let mut env = Env::with_system(Box::new(system));\n\n env.builtins.insert(\"echo\", echo_builtin());\n\n env.functions.insert(HashEntry(Rc::new(Function {\n\n name: \"foo\".to_string(),\n\n body: Rc::new(\"{ echo ok; }\".parse().unwrap()),\n\n origin: Location::dummy(\"dummy\"),\n\n is_read_only: false,\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 89, "score": 99188.95219264363 }, { "content": " let command: syntax::SimpleCommand = \"foo\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus(26));\n\n }\n\n\n\n #[test]\n\n fn simple_command_passes_arguments_to_function() {\n\n use yash_env::function::HashEntry;\n\n let system = VirtualSystem::new();\n\n let state = Rc::clone(&system.state);\n\n let mut env = Env::with_system(Box::new(system));\n\n env.builtins.insert(\"echo\", echo_builtin());\n\n env.functions.insert(HashEntry(Rc::new(Function {\n\n name: \"foo\".to_string(),\n\n body: Rc::new(\"{ echo $1-$2-$3; }\".parse().unwrap()),\n\n origin: Location::dummy(\"dummy\"),\n\n is_read_only: false,\n\n })));\n\n let command: syntax::SimpleCommand = \"foo bar baz\".parse().unwrap();\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 90, "score": 99188.80936699422 }, { "content": " )\n\n .unwrap();\n\n env.variables\n\n .assign(\n\n Scope::Global,\n\n \"local\".to_string(),\n\n Variable {\n\n value: Value::Scalar(\"ignored\".to_string()),\n\n last_assigned_location: None,\n\n is_exported: false,\n\n read_only_location: None,\n\n },\n\n )\n\n .unwrap();\n\n\n\n let command: syntax::SimpleCommand = \"var=123 /some/file foo bar\".parse().unwrap();\n\n let result = command.execute(&mut env).await;\n\n assert_eq!(result, Continue(()));\n\n\n\n let state = state.borrow();\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 91, "score": 99188.7579910992 }, { "content": "use crate::expansion::ExitStatusAdapter;\n\nuse crate::print_error;\n\nuse crate::redir::RedirGuard;\n\nuse crate::Command;\n\nuse crate::Handle;\n\nuse async_trait::async_trait;\n\nuse std::ffi::CString;\n\nuse std::ops::ControlFlow::{Break, Continue};\n\nuse std::rc::Rc;\n\nuse yash_env::builtin::Builtin;\n\nuse yash_env::function::Function;\n\nuse yash_env::semantics::Divert;\n\nuse yash_env::semantics::ExitStatus;\n\nuse yash_env::semantics::Field;\n\nuse yash_env::semantics::Result;\n\nuse yash_env::system::Errno;\n\nuse yash_env::variable::ContextType;\n\nuse yash_env::variable::Scope;\n\nuse yash_env::variable::ScopeGuard;\n\nuse yash_env::variable::Value;\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 92, "score": 99188.20702777834 }, { "content": " )\n\n .unwrap();\n\n let command: syntax::SimpleCommand = \"a=b\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Break(Divert::Interrupt(Some(ExitStatus::ERROR))));\n\n\n\n let state = state.borrow();\n\n let stderr = state.file_system.get(\"/dev/stderr\").unwrap().borrow();\n\n assert!(!stderr.content.is_empty());\n\n }\n\n\n\n #[test]\n\n fn simple_command_returns_exit_status_from_builtin_without_divert() {\n\n let mut env = Env::new_virtual();\n\n env.builtins.insert(\"return\", return_builtin());\n\n let command: syntax::SimpleCommand = \"return -n 93\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus(93));\n\n }\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 93, "score": 99187.02133955297 }, { "content": " body: Rc::new(\"{ echo; }\".parse().unwrap()),\n\n origin: Location::dummy(\"dummy\"),\n\n is_read_only: false,\n\n })));\n\n let variable = Variable {\n\n value: Value::Scalar(\"\".to_string()),\n\n last_assigned_location: None,\n\n is_exported: false,\n\n read_only_location: Some(Location::dummy(\"readonly\")),\n\n };\n\n env.variables\n\n .assign(Scope::Global, \"x\".to_string(), variable)\n\n .unwrap();\n\n let command: syntax::SimpleCommand = \"x=hello foo\".parse().unwrap();\n\n let result = block_on(command.execute(&mut env));\n\n assert_matches!(result, Break(Divert::Interrupt(Some(exit_status))) => {\n\n assert_ne!(exit_status, ExitStatus::SUCCESS);\n\n });\n\n }\n\n\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 94, "score": 99186.68370551318 }, { "content": " body: Rc::new(\"{ echo $x; }\".parse().unwrap()),\n\n origin: Location::dummy(\"dummy\"),\n\n is_read_only: false,\n\n })));\n\n let command: syntax::SimpleCommand = \"x=hello foo\".parse().unwrap();\n\n block_on(command.execute(&mut env));\n\n assert_eq!(env.variables.get(\"x\"), None);\n\n\n\n let state = state.borrow();\n\n let stdout = state.file_system.get(\"/dev/stdout\").unwrap().borrow();\n\n assert_eq!(stdout.content, \"hello\\n\".as_bytes());\n\n }\n\n\n\n #[test]\n\n fn function_fails_on_reassigning_to_read_only_variable() {\n\n use yash_env::function::HashEntry;\n\n let mut env = Env::new_virtual();\n\n env.builtins.insert(\"echo\", echo_builtin());\n\n env.functions.insert(HashEntry(Rc::new(Function {\n\n name: \"foo\".to_string(),\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 95, "score": 99186.58004554627 }, { "content": " env: &mut Env,\n\n assigns: &[Assign],\n\n redirs: Rc<Vec<Redir>>,\n\n) -> Result {\n\n // Perform redirections in a subshell\n\n let exit_status = if let Some(redir) = redirs.first() {\n\n let first_redir_location = redir.body.operand().location.clone();\n\n let redir_results = env.run_in_subshell(move |env| {\n\n Box::pin(async move {\n\n let env = &mut ExitStatusAdapter::new(env);\n\n let env = &mut RedirGuard::new(env);\n\n perform_redirs(env, &*redirs).await?;\n\n env.exit_status = env.last_command_subst_exit_status().unwrap_or_default();\n\n Continue(())\n\n })\n\n });\n\n match redir_results.await {\n\n Ok(exit_status) => exit_status,\n\n Err(errno) => {\n\n print_error(\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 96, "score": 99186.45456629345 }, { "content": "\n\n #[test]\n\n fn simple_command_returns_127_for_non_existing_file() {\n\n in_virtual_system(|mut env, _pid, _state| async move {\n\n let command: syntax::SimpleCommand = \"/some/file\".parse().unwrap();\n\n let result = command.execute(&mut env).await;\n\n assert_eq!(result, Continue(()));\n\n assert_eq!(env.exit_status, ExitStatus::NOT_FOUND);\n\n });\n\n }\n\n\n\n #[test]\n\n fn simple_command_returns_126_on_exec_failure() {\n\n in_virtual_system(|mut env, _pid, state| async move {\n\n let path = PathBuf::from(\"/some/file\");\n\n let mut content = INode::default();\n\n content.permissions.0 |= 0o100;\n\n let content = Rc::new(RefCell::new(content));\n\n state.borrow_mut().file_system.save(path, content);\n\n\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 97, "score": 99186.01421071714 }, { "content": " let mut env = Env::with_system(Box::new(system));\n\n env.builtins.insert(\"local\", local_builtin());\n\n let command: syntax::SimpleCommand = \"v=42 local v\".parse().unwrap();\n\n block_on(command.execute(&mut env));\n\n assert_eq!(env.variables.get(\"v\"), None);\n\n\n\n let state = state.borrow();\n\n let file = state.file_system.get(\"/dev/stdout\").unwrap().borrow();\n\n assert_eq!(file.content, \"v=42\\n\".as_bytes());\n\n }\n\n\n\n #[test]\n\n fn simple_command_returns_exit_status_from_function() {\n\n use yash_env::function::HashEntry;\n\n let mut env = Env::new_virtual();\n\n env.builtins.insert(\"return\", return_builtin());\n\n env.functions.insert(HashEntry(Rc::new(Function {\n\n name: \"foo\".to_string(),\n\n body: Rc::new(\"{ return -n 13; }\".parse().unwrap()),\n\n origin: Location::dummy(\"dummy\"),\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 98, "score": 99185.77065812764 }, { "content": " let state = state.borrow();\n\n let file = state.file_system.get(\"/tmp/file\").unwrap().borrow();\n\n assert_eq!(file.content, \"hello\\n\".as_bytes());\n\n }\n\n\n\n #[test]\n\n fn simple_command_assigns_permanently_for_special_builtin() {\n\n let mut env = Env::new_virtual();\n\n env.builtins.insert(\"return\", return_builtin());\n\n let command: syntax::SimpleCommand = \"v=42 return -n 0\".parse().unwrap();\n\n block_on(command.execute(&mut env));\n\n let v = env.variables.get(\"v\").unwrap();\n\n assert_eq!(v.value, Value::Scalar(\"42\".to_string()));\n\n assert!(!v.is_exported);\n\n }\n\n\n\n #[test]\n\n fn simple_command_assigns_temporarily_for_regular_builtin() {\n\n let system = VirtualSystem::new();\n\n let state = Rc::clone(&system.state);\n", "file_path": "yash-semantics/src/command_impl/simple_command.rs", "rank": 99, "score": 99185.35187807729 } ]
Rust
src/lib.rs
ordovicia/mix-distribution
eb87605c4dd005f5f49ad2b1a6de18bed1a5b51c
use std::{fmt, marker::PhantomData, ops::AddAssign}; use rand::Rng; use rand_distr::{ uniform::{SampleBorrow, SampleUniform}, weighted::{WeightedError, WeightedIndex}, Distribution, }; pub struct Mix<T, U, X> where T: Distribution<U>, X: SampleUniform + PartialOrd, { distributions: Vec<T>, weights: WeightedIndex<X>, _marker: PhantomData<U>, } impl<T, U, X> Mix<T, U, X> where T: Distribution<U>, X: SampleUniform + PartialOrd, { pub fn new<I, J>(dists: I, weights: J) -> Result<Self, WeightedError> where I: IntoIterator<Item = T>, J: IntoIterator, J::Item: SampleBorrow<X>, X: for<'a> AddAssign<&'a X> + Clone + Default, { Ok(Self { distributions: dists.into_iter().collect(), weights: WeightedIndex::new(weights)?, _marker: PhantomData, }) } pub fn with_zip<W>( dists_weights: impl IntoIterator<Item = (T, W)>, ) -> Result<Self, WeightedError> where W: SampleBorrow<X>, X: for<'a> AddAssign<&'a X> + Clone + Default, { let (distributions, weights): (Vec<_>, Vec<_>) = dists_weights.into_iter().unzip(); Ok(Self { distributions, weights: WeightedIndex::new(weights)?, _marker: PhantomData, }) } } impl<T, U, X> Distribution<U> for Mix<T, U, X> where T: Distribution<U>, X: SampleUniform + PartialOrd, { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> U { let idx = self.weights.sample(rng); self.distributions[idx].sample(rng) } } impl<T, U, X> Clone for Mix<T, U, X> where T: Distribution<U> + Clone, X: SampleUniform + PartialOrd + Clone, X::Sampler: Clone, { fn clone(&self) -> Self { Self { distributions: self.distributions.clone(), weights: self.weights.clone(), _marker: PhantomData, } } } impl<T, U, X> fmt::Debug for Mix<T, U, X> where T: Distribution<U> + fmt::Debug, X: SampleUniform + PartialOrd + fmt::Debug, X::Sampler: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Mix") .field("distributions", &self.distributions) .field("weights", &self.weights) .finish() } } #[cfg(test)] mod tests { use super::*; use rand_distr::{Normal, Uniform}; #[test] #[ignore] fn test_mix_plot() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![ Normal::new(0.0, 1.0).unwrap(), Normal::new(5.0, 2.0).unwrap(), ]; let weights = &[2, 1]; Mix::new(dists, weights).unwrap() }; for _ in 0..30000 { println!("{} # mix", mix.sample(&mut rng)); } } #[test] fn test_mix_2() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1)]; let weights = &[2, 1]; Mix::new(dists, weights).unwrap() }; let data = mix.sample_iter(&mut rng).take(300).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); assert_eq!(zeros + ones, 300); assert_eq!((zeros as f64 / 100.0).round() as i32, 2); assert_eq!((ones as f64 / 100.0).round() as i32, 1); } #[test] fn test_mix_3() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![ Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1), Uniform::new_inclusive(2, 2), ]; let weights = &[3, 2, 1]; Mix::new(dists, weights).unwrap() }; let data = mix.sample_iter(&mut rng).take(600).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); let twos = data.iter().filter(|&&x| x == 2).count(); assert_eq!(zeros + ones + twos, 600); assert_eq!((zeros as f64 / 100.0).round() as i32, 3); assert_eq!((ones as f64 / 100.0).round() as i32, 2); assert_eq!((twos as f64 / 100.0).round() as i32, 1); } #[test] fn test_weight_f64() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1)]; let weights = &[0.4, 0.6]; Mix::new(dists, weights).unwrap() }; let data = mix.sample_iter(&mut rng).take(1000).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); assert_eq!(zeros + ones, 1000); assert_eq!((zeros as f64 / 100.0).round() as i32, 4); assert_eq!((ones as f64 / 100.0).round() as i32, 6); } #[test] fn test_zip() { let mut rng = rand::thread_rng(); let mix = Mix::with_zip(vec![ (Uniform::new_inclusive(0, 0), 2), (Uniform::new_inclusive(1, 1), 1), ]) .unwrap(); let data = mix.sample_iter(&mut rng).take(300).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); assert_eq!(zeros + ones, 300); assert_eq!((zeros as f64 / 100.0).round() as i32, 2); assert_eq!((ones as f64 / 100.0).round() as i32, 1); } #[test] fn error_invalid_weights() { let dists = vec![Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1)]; let weights = &[2, 1][0..0]; assert_eq!( Mix::new(dists.clone(), weights).unwrap_err(), WeightedError::NoItem, ); let weights = &[2, -1]; assert_eq!( Mix::new(dists.clone(), weights).unwrap_err(), WeightedError::InvalidWeight, ); let weights = &[0, 0]; assert_eq!( Mix::new(dists, weights).unwrap_err(), WeightedError::AllWeightsZero, ); } }
use std::{fmt, marker::PhantomData, ops::AddAssign}; use rand::Rng; use rand_distr::{ uniform::{SampleBorrow, SampleUniform}, weighted::{WeightedError, WeightedIndex}, Distribution, }; pub struct Mix<T, U, X> where T: Distribution<U>, X: SampleUniform + PartialOrd, { distributions: Vec<T>, weights: WeightedIndex<X>, _marker: PhantomData<U>, } impl<T, U, X> Mix<T, U, X> where T: Distribution<U>, X: SampleUniform + PartialOrd, { pub fn new<I, J>(dists: I, weights: J) -> Result<Self, WeightedError> where I: IntoIterator<Item = T>, J: IntoIterator, J::Item: SampleBorrow<X>, X: for<'a> AddAssign<&'a X> + Clone + Default, { Ok(Self { distributions: dists.into_iter().collect(), weights: WeightedIndex::new(weights)?, _marker: PhantomData, }) } pub fn with_zip<W>( dists_weights: impl IntoIterator<Item = (T, W)>, ) -> Result<Self, WeightedError> where W: SampleBorrow<X>, X: for<'a> AddAssign<&'a X> + Clone + Default, { let (distributions, weights): (Vec<_>, Vec<_>) = dists_weights.into_iter().unzip(); Ok(Self { distributions, weights: WeightedIndex::new(weights)?, _marker: PhantomData, }) } } impl<T, U, X> Distribution<U> for Mix<T, U, X> where T: Distribution<U>, X: SampleUniform + PartialOrd, { fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> U { let idx = self.weights.sample(rng); self.distributions[idx].sample(rng) } } impl<T, U, X> Clone for Mix<T, U, X> where T: Distribution<U> + Clone, X: SampleUniform + PartialOrd + Clone, X::Sampler: Clone, { fn clone(&self) -> Self { Self { distributions: self.distributions.clone(), weights: self.weights.clone(), _marker: PhantomData, } } } impl<T, U, X> fmt::Debug for Mix<T, U, X> where T: Distribution<U> + fmt::Debug, X: SampleUniform + PartialOrd + fmt::Debug, X::Sampler: fmt::Debug, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("Mix") .field("distributions", &self.distributions) .field("weights", &self.weights) .finish() } } #[cfg(test)] mod tests { use super::*; use rand_distr::{Normal, Uniform}; #[test] #[ignore] fn test_mix_p
} #[test] fn test_mix_2() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1)]; let weights = &[2, 1]; Mix::new(dists, weights).unwrap() }; let data = mix.sample_iter(&mut rng).take(300).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); assert_eq!(zeros + ones, 300); assert_eq!((zeros as f64 / 100.0).round() as i32, 2); assert_eq!((ones as f64 / 100.0).round() as i32, 1); } #[test] fn test_mix_3() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![ Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1), Uniform::new_inclusive(2, 2), ]; let weights = &[3, 2, 1]; Mix::new(dists, weights).unwrap() }; let data = mix.sample_iter(&mut rng).take(600).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); let twos = data.iter().filter(|&&x| x == 2).count(); assert_eq!(zeros + ones + twos, 600); assert_eq!((zeros as f64 / 100.0).round() as i32, 3); assert_eq!((ones as f64 / 100.0).round() as i32, 2); assert_eq!((twos as f64 / 100.0).round() as i32, 1); } #[test] fn test_weight_f64() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1)]; let weights = &[0.4, 0.6]; Mix::new(dists, weights).unwrap() }; let data = mix.sample_iter(&mut rng).take(1000).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); assert_eq!(zeros + ones, 1000); assert_eq!((zeros as f64 / 100.0).round() as i32, 4); assert_eq!((ones as f64 / 100.0).round() as i32, 6); } #[test] fn test_zip() { let mut rng = rand::thread_rng(); let mix = Mix::with_zip(vec![ (Uniform::new_inclusive(0, 0), 2), (Uniform::new_inclusive(1, 1), 1), ]) .unwrap(); let data = mix.sample_iter(&mut rng).take(300).collect::<Vec<_>>(); let zeros = data.iter().filter(|&&x| x == 0).count(); let ones = data.iter().filter(|&&x| x == 1).count(); assert_eq!(zeros + ones, 300); assert_eq!((zeros as f64 / 100.0).round() as i32, 2); assert_eq!((ones as f64 / 100.0).round() as i32, 1); } #[test] fn error_invalid_weights() { let dists = vec![Uniform::new_inclusive(0, 0), Uniform::new_inclusive(1, 1)]; let weights = &[2, 1][0..0]; assert_eq!( Mix::new(dists.clone(), weights).unwrap_err(), WeightedError::NoItem, ); let weights = &[2, -1]; assert_eq!( Mix::new(dists.clone(), weights).unwrap_err(), WeightedError::InvalidWeight, ); let weights = &[0, 0]; assert_eq!( Mix::new(dists, weights).unwrap_err(), WeightedError::AllWeightsZero, ); } }
lot() { let mut rng = rand::thread_rng(); let mix = { let dists = vec![ Normal::new(0.0, 1.0).unwrap(), Normal::new(5.0, 2.0).unwrap(), ]; let weights = &[2, 1]; Mix::new(dists, weights).unwrap() }; for _ in 0..30000 { println!("{} # mix", mix.sample(&mut rng)); }
function_block-random_span
[ { "content": "# Mixture Distributions\n\n\n\n[![Build Status][build-img]][build-link]\n\n[![mix-distribution][cratesio-img]][cratesio-link]\n\n[![mix-distribution][docsrs-img]][docsrs-link]\n\n\n\n[build-img]: https://travis-ci.com/ordovicia/mix-distribution.svg?branch=master\n\n[build-link]: https://travis-ci.com/ordovicia/mix-distribution\n\n\n\n[cratesio-img]: https://img.shields.io/crates/v/mix-distribution.svg\n\n[cratesio-link]: https://crates.io/crates/mix-distribution\n\n\n\n[docsrs-img]: https://docs.rs/mix-distribution/badge.svg\n\n[docsrs-link]: https://docs.rs/mix-distribution\n\n\n\n## Examples\n\n\n\n```rust\n\nuse rand_distr::{Distribution, Normal};\n\nuse mix_distribution::Mix;\n\n\n\nlet mut rng = rand::thread_rng();\n\n\n\n// Mixture of two distributions\n\nlet mix = {\n\n let dists = vec![\n\n Normal::new(0.0, 1.0).unwrap(),\n\n Normal::new(1.0, 2.0).unwrap(),\n\n ];\n\n let weights = &[2, 1];\n\n Mix::new(dists, weights).unwrap()\n\n};\n\nmix.sample(&mut rng);\n\n\n\n// Mixture of three distributions\n\nlet mix = {\n\n let dists = vec![\n\n Normal::new(0.0, 1.0).unwrap(),\n\n Normal::new(1.0, 2.0).unwrap(),\n\n Normal::new(-1.0, 1.0).unwrap(),\n\n ];\n\n let weights = &[2, 1, 3];\n\n Mix::new(dists, weights).unwrap()\n\n};\n\nmix.sample(&mut rng);\n\n\n\n// From iterator over (distribution, weight) pairs\n\nlet mix = Mix::with_zip(vec![\n\n (Uniform::new_inclusive(0, 0), 2),\n\n (Uniform::new_inclusive(1, 1), 1),\n\n])\n\n.unwrap();\n\nmix.sample(&mut rng);\n\n```\n\n\n\n## License\n\n\n\nCopyright 2018 Hidehito Yabuuchi \\<[email protected]\\>\n\n\n\nLicensed under the MIT license <LICENSE-MIT or http://opensource.org/licenses/MIT>, or the Apache\n\nLicense, Version 2.0 <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> at your option.\n\nAll files in the project carrying such notice may not be copied, modified, or distributed except\n\naccording to those terms.\n", "file_path": "README.md", "rank": 0, "score": 6670.180226032013 } ]
Rust
lib/src/schema.rs
joepio/atomic
10b1e390d807b3defe0ce0f993b160b4ade46359
use crate::{datatype::DataType, errors::AtomicResult, urls, Resource, Value}; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Property { pub class_type: Option<String>, pub data_type: DataType, pub shortname: String, pub subject: String, pub description: String, } impl PartialEq for Property { fn eq(&self, other: &Self) -> bool { self.subject == other.subject } } impl Property { pub fn from_resource(resource: Resource) -> AtomicResult<Property> { let data_type = resource.get(urls::DATATYPE_PROP)?.to_string().parse()?; let shortname = resource.get(urls::SHORTNAME)?.to_string(); let description = resource.get(urls::DESCRIPTION)?.to_string(); let class_type = match resource.get(urls::CLASSTYPE_PROP) { Ok(classtype) => Some(classtype.to_string()), Err(_) => None, }; Ok(Property { class_type, data_type, shortname, description, subject: resource.get_subject().into(), }) } pub fn to_resource(&self) -> AtomicResult<Resource> { let mut resource = Resource::new(self.subject.clone()); resource.set_propval_unsafe( urls::IS_A.into(), Value::ResourceArray(vec![urls::PROPERTY.into()]), )?; resource.set_propval_unsafe(urls::SHORTNAME.into(), Value::Slug(self.shortname.clone()))?; resource.set_propval_unsafe( urls::DESCRIPTION.into(), Value::String(self.description.clone()), )?; resource.set_propval_unsafe( urls::DATATYPE_PROP.into(), Value::AtomicUrl(self.data_type.to_string()), )?; if let Some(classtype) = &self.class_type { resource.set_propval_unsafe( urls::CLASSTYPE_PROP.into(), Value::AtomicUrl(classtype.clone()), )?; } Ok(resource) } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Class { pub requires: Vec<String>, pub recommends: Vec<String>, pub shortname: String, pub description: String, pub subject: String, } impl Class { pub fn from_resource(resource: Resource) -> AtomicResult<Class> { let mut requires = Vec::new(); if let Ok(reqs) = resource.get(urls::REQUIRES) { for prop_sub in reqs.to_vec()? { requires.push(prop_sub.clone()) } } let mut recommends = Vec::new(); if let Ok(recs) = resource.get(urls::RECOMMENDS) { for rec_subject in recs.to_vec()? { recommends.push(rec_subject.clone()) } } let shortname = resource.get(urls::SHORTNAME)?.to_string(); let description = resource.get(urls::DESCRIPTION)?.to_string(); Ok(Class { requires, recommends, shortname, subject: resource.get_subject().into(), description, }) } pub fn to_resource(&self) -> AtomicResult<Resource> { let mut resource = Resource::new(self.subject.clone()); resource.set_propval_unsafe( urls::IS_A.into(), Value::ResourceArray(vec![urls::CLASS.into()]), )?; resource.set_propval_unsafe(urls::SHORTNAME.into(), Value::Slug(self.shortname.clone()))?; resource.set_propval_unsafe( urls::DESCRIPTION.into(), Value::String(self.description.clone()), )?; if !self.requires.is_empty() { resource.set_propval_unsafe( urls::REQUIRES.into(), Value::ResourceArray(self.requires.clone()), )?; } if !self.requires.is_empty() { resource.set_propval_unsafe( urls::RECOMMENDS.into(), Value::ResourceArray(self.recommends.clone()), )?; } Ok(resource) } }
use crate::{datatype::DataType, errors::AtomicResult, urls, Resource, Value}; use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Property { pub class_type: Option<String>, pub data_type: DataType, pub shortname: String, pub subject: String, pub description: String, } impl PartialEq for Property { fn eq(&self, other: &Self) -> bool { self.subject == other.subject } } impl Property { pub fn from_resource(resource: Resource) -> AtomicResult<Property> { let data_type = resource.get(urls::DATATYPE_PROP)?.to_string().parse()?; let shortname = resource.get(urls::SHORTNAME)?.to_string(); let description = resource.get(urls::DESCRIPTION)?.to_string(); let class_type = match resource.get(urls::CLASSTYPE_PROP) { Ok(classtype) => Some(classtype.to_string()), Err(_) => None, }; Ok(Property { class_type, data_type, shortname, description, subject: resource.get_subject().into(), }) } pub fn to_resource(&self) -> AtomicResult<Resource> { let mut resource = Resource::new(self.subject.clone()); resource.set_propval_unsafe( urls::IS_A.into(), Value::ResourceArray(vec![urls::PROPERTY.into()]), )?; resource.set_propval_unsafe(urls::SHORTNAME.into(), Value::Slug(self.shortname.clone()))?; resource.set_propval_unsafe( urls::DESCRIPTION.into(), Value::String(self.description.clone()), )?; resource.set_propval_unsafe( urls::DATATYPE_PROP.into(), Value::AtomicUrl(self.data_type.to_string()), )?; if let Some(classtype) = &self.class_type { resource.set_propval_unsafe( urls::CLASSTYPE_PROP.into(), Value::AtomicUrl(classtype.clone()), )?; } Ok(resource) } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Class { pub requires: Vec<String>, pub recommends: Vec<String>, pub shortname: String, pub description: String, pub subject: String, } impl Class { pub fn from_resource(resource: Resource) -> AtomicResult<Class> { let mut requires = Vec::new(); if let Ok(reqs) = resource.get(urls::REQUIRES) { for prop_sub in reqs.to_vec()? { requires.push(prop_sub.clone()) } } let mut recommends = Vec::new(); if let Ok(recs) = resource.get(urls::RECOMMENDS) { for rec_subject in recs.to_vec()? { recommends.push(rec_subject.clone()) } } let shortname = resource.get(urls::SHORTNAME)?.to_string(); let description = resource.get(urls::DESCRIPTION)?.to_string(); Ok(Class { requires, recommends, shortname, subject: resource.get_subject().into(), description, }) } pub fn to_resource(&self) -> AtomicResult<Resource> { let mut resource = Resource::new(self.subject.clone()); resource.set_propval_unsafe( urls::IS_A.into(), Value::ResourceArray(vec![urls::CLASS.into()]), )?; resource.set_propval_unsafe(urls::SHORTNAME.into(), Value::Slug(self.shortname.clone()))?; resource.set_propval_unsafe( urls::DESCRIPTION.into(), Value::String(self.description.clone()), )?; if !self.requires.is_empty() { resource.set_propval_unsafe( urls::REQUIRES.into(), Value::ResourceArray(self.requires.clone()), )?; }
Ok(resource) } }
if !self.requires.is_empty() { resource.set_propval_unsafe( urls::RECOMMENDS.into(), Value::ResourceArray(self.recommends.clone()), )?; }
if_condition
[ { "content": "/// Check if something is a URL\n\npub fn is_url(string: &str) -> bool {\n\n // TODO: Probably delete this second one, might break some tests though.\n\n string.starts_with(\"http\") || string.starts_with(\"_:\")\n\n}\n\n\n\nimpl IntoIterator for Mapping {\n\n type Item = (String, String);\n\n type IntoIter = IntoIter<String, String>;\n\n\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.hashmap.into_iter()\n\n }\n\n}\n", "file_path": "lib/src/mapping.rs", "rank": 0, "score": 246021.33764506294 }, { "content": "pub fn match_datatype(string: &str) -> DataType {\n\n match string {\n\n urls::ATOMIC_URL => DataType::AtomicUrl,\n\n urls::BOOLEAN => DataType::Boolean,\n\n urls::DATE => DataType::Date,\n\n urls::INTEGER => DataType::Integer,\n\n urls::FLOAT => DataType::Float,\n\n urls::MARKDOWN => DataType::Markdown,\n\n urls::RESOURCE_ARRAY => DataType::ResourceArray,\n\n urls::SLUG => DataType::Slug,\n\n urls::STRING => DataType::String,\n\n urls::TIMESTAMP => DataType::Timestamp,\n\n unsupported_datatype => DataType::Unsupported(unsupported_datatype.into()),\n\n }\n\n}\n\n\n\nimpl std::str::FromStr for DataType {\n\n type Err = ParseError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n", "file_path": "lib/src/datatype.rs", "rank": 1, "score": 220999.59440514827 }, { "content": "/// Prints a resource for the terminal with readble formatting and colors\n\npub fn pretty_print_resource(resource: &Resource, store: &impl Storelike) -> AtomicResult<String> {\n\n let mut output = String::new();\n\n output.push_str(&*format!(\n\n \"{0: <15}{1: <10} \\n\",\n\n \"subject\".blue().bold(),\n\n resource.get_subject()\n\n ));\n\n for (prop_url, val) in resource.get_propvals() {\n\n let prop_shortname = store.get_property(&prop_url)?.shortname;\n\n output.push_str(&*format!(\n\n \"{0: <15}{1: <10} \\n\",\n\n prop_shortname.blue().bold(),\n\n val.to_string()\n\n ));\n\n }\n\n Ok(output)\n\n}\n\n\n", "file_path": "cli/src/print.rs", "rank": 2, "score": 215315.22529808484 }, { "content": "/// Fetches a resource, makes sure its subject matches.\n\n/// Checks the datatypes for the Values.\n\n/// Ignores all atoms where the subject is different.\n\n/// WARNING: Calls store methods, and is called by store methods, might get stuck in a loop!\n\npub fn fetch_resource(subject: &str, store: &impl Storelike) -> AtomicResult<Resource> {\n\n let body = fetch_body(subject, crate::parse::JSON_AD_MIME)?;\n\n let resource = parse_json_ad_resource(&body, store)\n\n .map_err(|e| format!(\"Error parsing body of {}: {}\", subject, e))?;\n\n Ok(resource)\n\n}\n\n\n", "file_path": "lib/src/client.rs", "rank": 3, "score": 213996.28413858503 }, { "content": "/// Looks for children relations, adds to the resource. Performs a TPF query, might be expensive.\n\npub fn add_children(store: &impl Storelike, resource: &mut Resource) -> AtomicResult<Resource> {\n\n let atoms = store.tpf(\n\n None,\n\n Some(urls::PARENT),\n\n Some(resource.get_subject()),\n\n false,\n\n )?;\n\n let mut children: Vec<String> = Vec::new();\n\n for atom in atoms {\n\n children.push(atom.subject)\n\n }\n\n resource.set_propval(urls::CHILDREN.into(), children.into(), store)?;\n\n Ok(resource.to_owned())\n\n}\n\n\n", "file_path": "lib/src/hierarchy.rs", "rank": 4, "score": 212819.03588955264 }, { "content": "pub fn base_url(url: &str) -> AtomicResult<String> {\n\n let mut parsed: Url = Url::parse(url)?;\n\n\n\n match parsed.path_segments_mut() {\n\n Ok(mut path) => {\n\n path.clear();\n\n }\n\n Err(_) => return Err(format!(\"Url {} is not valid.\", url).into()),\n\n }\n\n\n\n parsed.set_query(None);\n\n\n\n Ok(parsed.to_string())\n\n}\n\n\n", "file_path": "lib/src/url_helpers.rs", "rank": 5, "score": 211172.89964340618 }, { "content": "/// Serializes a vector or Resources to a JSON-AD string\n\npub fn resources_to_json_ad(resources: Vec<Resource>) -> AtomicResult<String> {\n\n let array: Vec<serde_json::Value> = resources\n\n .into_iter()\n\n .map(|r: Resource| {\n\n crate::serialize::propvals_to_json_ad_map(\n\n r.get_propvals(),\n\n Some(r.get_subject().clone()),\n\n )\n\n .expect(\"could not serialize to json-ad \")\n\n })\n\n .collect();\n\n let serde_array = serde_json::Value::from(array);\n\n serde_json::to_string_pretty(&serde_array).map_err(|_| \"Could not serialize to JSON-AD\".into())\n\n}\n\n\n", "file_path": "lib/src/serialize.rs", "rank": 6, "score": 206971.95225338894 }, { "content": "pub fn serialize_json_array(items: &[String]) -> AtomicResult<String> {\n\n let string = serde_json::to_string(items)?;\n\n Ok(string)\n\n}\n\n\n\n#[cfg(feature = \"rdf\")]\n", "file_path": "lib/src/serialize.rs", "rank": 7, "score": 199663.79725776764 }, { "content": "/// Serializes Atoms to Ntriples (which is also valid Turtle / Notation3).\n\npub fn atoms_to_ntriples(atoms: Vec<Atom>, store: &impl Storelike) -> AtomicResult<String> {\n\n use rio_api::formatter::TriplesFormatter;\n\n use rio_api::model::{Literal, NamedNode, Term, Triple};\n\n use rio_turtle::NTriplesFormatter;\n\n\n\n let mut formatter = NTriplesFormatter::new(Vec::default());\n\n for atom in atoms {\n\n let subject = NamedNode { iri: &atom.subject }.into();\n\n let predicate = NamedNode {\n\n iri: &atom.property,\n\n };\n\n let datatype = store.get_property(&atom.property)?.data_type;\n\n let value = &atom.value.to_string();\n\n let datatype_url = datatype.to_string();\n\n let object: Term = match &datatype {\n\n DataType::AtomicUrl => NamedNode { iri: value }.into(),\n\n // Maybe these should be converted to RDF collections / lists?\n\n // DataType::ResourceArray => {}\n\n DataType::String => Literal::Simple { value }.into(),\n\n _dt => Literal::Typed {\n", "file_path": "lib/src/serialize.rs", "rank": 8, "score": 196368.6006469478 }, { "content": "/// Serializes Atoms to Ntriples (which is also valid Turtle / Notation3).\n\npub fn atoms_to_turtle(atoms: Vec<Atom>, store: &impl Storelike) -> AtomicResult<String> {\n\n use rio_api::formatter::TriplesFormatter;\n\n use rio_api::model::{Literal, NamedNode, Term, Triple};\n\n use rio_turtle::TurtleFormatter;\n\n\n\n let mut formatter = TurtleFormatter::new(Vec::default());\n\n\n\n for atom in atoms {\n\n let subject = NamedNode { iri: &atom.subject }.into();\n\n let predicate = NamedNode {\n\n iri: &atom.property,\n\n };\n\n let datatype = store.get_property(&atom.property)?.data_type;\n\n let value = &atom.value.to_string();\n\n let datatype_url = datatype.to_string();\n\n let object: Term = match &datatype {\n\n DataType::AtomicUrl => NamedNode { iri: value }.into(),\n\n // Maybe these should be converted to RDF collections / lists?\n\n // DataType::ResourceArray => {}\n\n DataType::String => Literal::Simple { value }.into(),\n", "file_path": "lib/src/serialize.rs", "rank": 9, "score": 196368.6006469478 }, { "content": "/// Parses JSON-AD strings to resources\n\npub fn parse_json_ad_array(string: &str, store: &impl Storelike) -> AtomicResult<Vec<Resource>> {\n\n let parsed: serde_json::Value = serde_json::from_str(string)?;\n\n let mut vec = Vec::new();\n\n match parsed {\n\n serde_json::Value::Array(arr) => {\n\n for item in arr {\n\n match item {\n\n serde_json::Value::Object(obj) => {\n\n vec.push(json_ad_object_to_resource(obj, store)?)\n\n }\n\n wrong => {\n\n return Err(\n\n format!(\"Wrong datatype, expected object, got: {:?}\", wrong).into()\n\n )\n\n }\n\n }\n\n }\n\n }\n\n serde_json::Value::Object(obj) => vec.push(json_ad_object_to_resource(obj, store)?),\n\n _other => return Err(\"Root JSON element must be an object or array.\".into()),\n\n }\n\n Ok(vec)\n\n}\n\n\n", "file_path": "lib/src/parse.rs", "rank": 10, "score": 189686.36737362703 }, { "content": "// Asks for and saves the bookmark. Returns the shortname.\n\nfn prompt_bookmark(mapping: &mut mapping::Mapping, subject: &str) -> Option<String> {\n\n let re = Regex::new(atomic_lib::values::SLUG_REGEX).unwrap();\n\n let mut shortname: Option<String> = prompt_opt(\"Local Bookmark (optional)\").unwrap();\n\n loop {\n\n match shortname.as_ref() {\n\n Some(sn) => {\n\n if mapping.contains_key(sn) {\n\n let msg = format!(\n\n \"You're already using that shortname for {:?}, try something else\",\n\n mapping.get(sn).unwrap()\n\n );\n\n shortname = prompt_opt(msg).unwrap();\n\n } else if re.is_match(&sn.as_str()) {\n\n mapping.insert(sn.into(), subject.into());\n\n return Some(String::from(sn));\n\n } else {\n\n shortname =\n\n prompt_opt(\"Not a valid bookmark, only use letters, numbers, and '-'\")\n\n .unwrap();\n\n }\n\n }\n\n None => return None,\n\n }\n\n }\n\n}\n", "file_path": "cli/src/new.rs", "rank": 11, "score": 184087.3477502411 }, { "content": "fn handle_all_versions_request(url: url::Url, store: &impl Storelike) -> AtomicResult<Resource> {\n\n let params = url.query_pairs();\n\n let mut target_subject = None;\n\n for (k, v) in params {\n\n if let \"subject\" = k.as_ref() {\n\n target_subject = Some(v.to_string())\n\n };\n\n }\n\n if target_subject.is_none() {\n\n return all_versions_endpoint().to_resource(store);\n\n }\n\n let target = target_subject.unwrap();\n\n let collection_builder = CollectionBuilder {\n\n subject: url.to_string(),\n\n property: Some(urls::SUBJECT.into()),\n\n value: Some(target.clone()),\n\n sort_by: None,\n\n sort_desc: false,\n\n current_page: 0,\n\n page_size: 20,\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 12, "score": 183462.9584110909 }, { "content": "fn handle_path_request(url: url::Url, store: &impl Storelike) -> AtomicResult<Resource> {\n\n let params = url.query_pairs();\n\n let mut path = None;\n\n for (k, v) in params {\n\n if let \"path\" = k.as_ref() {\n\n path = Some(v.to_string())\n\n };\n\n }\n\n if path.is_none() {\n\n return path_endpoint().to_resource(store);\n\n }\n\n let result = store.get_path(&path.unwrap(), None)?;\n\n match result {\n\n crate::storelike::PathReturn::Subject(subject) => store.get_resource(&subject),\n\n crate::storelike::PathReturn::Atom(atom) => {\n\n let mut resource = Resource::new(url.to_string());\n\n resource.set_propval_string(urls::ATOM_SUBJECT.into(), &atom.subject, store)?;\n\n resource.set_propval_string(urls::ATOM_PROPERTY.into(), &atom.property, store)?;\n\n resource.set_propval_string(urls::ATOM_VALUE.into(), &atom.value.to_string(), store)?;\n\n Ok(resource)\n\n }\n\n }\n\n}\n", "file_path": "lib/src/plugins/path.rs", "rank": 13, "score": 183462.9584110909 }, { "content": "fn handle_version_request(url: url::Url, store: &impl Storelike) -> AtomicResult<Resource> {\n\n let params = url.query_pairs();\n\n let mut commit_url = None;\n\n for (k, v) in params {\n\n if let \"commit\" = k.as_ref() {\n\n commit_url = Some(v.to_string())\n\n };\n\n }\n\n if commit_url.is_none() {\n\n return version_endpoint().to_resource(store);\n\n }\n\n let mut resource = construct_version(&commit_url.unwrap(), store)?;\n\n resource.set_subject(url.to_string());\n\n Ok(resource)\n\n}\n\n\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 14, "score": 183462.9584110909 }, { "content": "/// Fetches a URL, returns its body\n\npub fn fetch_body(url: &str, content_type: &str) -> AtomicResult<String> {\n\n if !url.starts_with(\"http\") {\n\n return Err(format!(\"Could not fetch url '{}', must start with http.\", url).into());\n\n }\n\n let resp = ureq::get(url)\n\n .set(\"Accept\", content_type)\n\n .timeout_read(2000)\n\n .call();\n\n if resp.status() != 200 {\n\n return Err(format!(\"Could not fetch url '{}'. Status: {}\", url, resp.status()).into());\n\n };\n\n let body = resp\n\n .into_string()\n\n .map_err(|e| format!(\"Could not parse response {}: {}\", url, e))?;\n\n Ok(body)\n\n}\n\n\n", "file_path": "lib/src/client.rs", "rank": 15, "score": 181143.3231503982 }, { "content": "/// Constructs a Resource version for a specific Commit\n\n/// Only works if the current store has the required Commits\n\npub fn construct_version(commit_url: &str, store: &impl Storelike) -> AtomicResult<Resource> {\n\n let commit = store.get_resource(commit_url)?;\n\n // Get all the commits for the subject of that Commit\n\n let subject = &commit.get(urls::SUBJECT)?.to_string();\n\n let mut commits = get_commits_for_resource(subject, store)?;\n\n // Sort all commits by date\n\n commits.sort_by(|a, b| a.created_at.cmp(&b.created_at));\n\n let mut version = Resource::new(subject.into());\n\n for commit in commits {\n\n if let Some(current_commit) = commit.url.clone() {\n\n let updated = commit.apply_changes(version, store, false)?;\n\n version = updated;\n\n // Stop iterating when the target commit has been applied.\n\n if current_commit == commit_url {\n\n break;\n\n }\n\n }\n\n }\n\n Ok(version)\n\n}\n\n\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 16, "score": 175570.283140029 }, { "content": "/// Gets a version of a Resource by Commit.\n\n/// Tries cached version, constructs one if there is no cached version.\n\npub fn get_version(commit_url: &str, store: &impl Storelike) -> AtomicResult<Resource> {\n\n let version_url = construct_version_endpoint_url(store, commit_url);\n\n match store.get_resource(&version_url) {\n\n Ok(cached) => Ok(cached),\n\n Err(_not_cached) => {\n\n let version = construct_version(commit_url, store)?;\n\n // Store constructed version for caching\n\n store.add_resource(&version)?;\n\n Ok(version)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::{Resource, Store};\n\n\n\n #[test]\n\n fn constructs_versions() {\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 17, "score": 175566.16208475322 }, { "content": "/// Creates the versioning URL for some specific Commit\n\nfn construct_version_endpoint_url(store: &impl Storelike, commit_url: &str) -> String {\n\n format!(\n\n \"{}/versioning?commit={}\",\n\n store.get_base_url(),\n\n urlencoding::encode(commit_url)\n\n )\n\n}\n\n\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 18, "score": 172454.39508000133 }, { "content": "// Returns None if the string is empty.\n\n// Useful for parsing form inputs.\n\npub fn empty_to_nothing(string: Option<String>) -> Option<String> {\n\n match string.as_ref() {\n\n Some(st) => {\n\n if st.is_empty() {\n\n None\n\n } else {\n\n string\n\n }\n\n }\n\n None => None,\n\n }\n\n}\n", "file_path": "server/src/helpers.rs", "rank": 19, "score": 170841.3414655084 }, { "content": "/// Checks if the certificates need to be renewed.\n\npub fn check_expiration_certs() -> bool {\n\n let created_at = std::fs::read_to_string(CERTS_CREATED_AT)\n\n .expect(&*format!(\"Unable to read {}\", CERTS_CREATED_AT))\n\n .parse::<chrono::DateTime<chrono::Utc>>()\n\n .expect(&*format!(\"failed to parse {}\", CERTS_CREATED_AT));\n\n let certs_age: chrono::Duration = chrono::Utc::now() - created_at;\n\n // Let's Encrypt certificates are valid for three months, but I think renewing earlier provides a better UX.\n\n let expired = certs_age > chrono::Duration::weeks(4);\n\n if expired {\n\n log::warn!(\"HTTPS Certificates expired, requesting new ones...\")\n\n // This is where I might need to remove the `.https/` folder, but it seems like it's not necessary\n\n };\n\n expired\n\n}\n", "file_path": "server/src/https.rs", "rank": 20, "score": 166826.14461659238 }, { "content": "/// Throws an error if the URL is not a valid URL\n\npub fn check_valid_url(url: &str) -> AtomicResult<()> {\n\n if !url.starts_with(\"http\") {\n\n return Err(format!(\"Url does not start with http: {}\", url).into());\n\n }\n\n Ok(())\n\n}\n", "file_path": "lib/src/url_helpers.rs", "rank": 21, "score": 163525.49417173536 }, { "content": "fn corrupt_db_message(subject: &str) -> String {\n\n return format!(\"Could not deserialize item {} from database. DB is possibly corrupt, could be due to an update or a lack of migrations. Restore to a previous version, export / serialize your data and import your data again.\", subject);\n\n}\n\n\n\nconst DB_CORRUPT_MSG: &str = \"Could not deserialize item from database. DB is possibly corrupt, could be due to an update or a lack of migrations. Restore to a previous version, export / serialize your data and import your data again.\";\n\n\n\n#[cfg(test)]\n\npub mod test {\n\n use crate::urls;\n\n\n\n use super::*;\n\n use ntest::timeout;\n\n\n\n /// Creates new temporary database, populates it, removes previous one.\n\n /// Can only be run one thread at a time, because it requires a lock on the DB file.\n\n fn init() -> Db {\n\n let tmp_dir_path = \"tmp/db\";\n\n let _try_remove_existing = std::fs::remove_dir_all(tmp_dir_path);\n\n let store = Db::init(tmp_dir_path, \"https://localhost\".into()).unwrap();\n\n let agent = store.create_agent(None).unwrap();\n", "file_path": "lib/src/db.rs", "rank": 22, "score": 162744.8650889296 }, { "content": "pub fn parse_json_array(string: &str) -> AtomicResult<Vec<String>> {\n\n let vector: Vec<String> = serde_json::from_str(string)?;\n\n Ok(vector)\n\n}\n\n\n\nuse serde_json::Map;\n\n\n", "file_path": "lib/src/parse.rs", "rank": 23, "score": 156569.30449467257 }, { "content": "/// Searches the local store for all commits with this subject\n\nfn get_commits_for_resource(subject: &str, store: &impl Storelike) -> AtomicResult<Vec<Commit>> {\n\n let commit_atoms = store.tpf(None, Some(urls::SUBJECT), Some(subject), false)?;\n\n let mut commit_resources = Vec::new();\n\n for atom in commit_atoms {\n\n let commit = crate::Commit::from_resource(store.get_resource(&atom.subject)?)?;\n\n commit_resources.push(commit)\n\n }\n\n Ok(commit_resources)\n\n}\n\n\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 24, "score": 154384.27211673948 }, { "content": "pub fn encode_hex(bytes: &[u8]) -> String {\n\n let mut s = String::with_capacity(bytes.len() * 2);\n\n for &b in bytes {\n\n write!(&mut s, \"{:02x}\", b).unwrap();\n\n }\n\n s\n\n}\n", "file_path": "lib/examples/compare_ring.rs", "rank": 25, "score": 151863.31804150884 }, { "content": "/// Create a new instance of some class through a series of prompts, adds it to the store\n\npub fn new(context: &mut Context) -> AtomicResult<()> {\n\n let class_input = context\n\n .matches\n\n .subcommand_matches(\"new\")\n\n .unwrap()\n\n .value_of(\"class\")\n\n .expect(\"Add a class value\");\n\n let class_url = context\n\n .mapping\n\n .lock()\n\n .unwrap()\n\n .try_mapping_or_url(class_input)\n\n .unwrap();\n\n let class = context.store.get_class(&class_url)?;\n\n println!(\"Enter a new {}: {}\", class.shortname, class.description);\n\n let (resource, _bookmark) = prompt_instance(context, &class, None)?;\n\n println!(\n\n \"Succesfully created a new {}: subject: {}\",\n\n class.shortname,\n\n resource.get_subject()\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/new.rs", "rank": 26, "score": 149311.93615968924 }, { "content": "/// Returns preffered serialization format. Defaults to pretty.\n\npub fn get_serialization(argmatches: &ArgMatches) -> AtomicResult<Format> {\n\n let format = if let Some(preffered_format) = argmatches.value_of(\"as\") {\n\n match preffered_format {\n\n \"pretty\" => (Format::Pretty),\n\n \"json\" => (Format::Json),\n\n \"jsonld\" => (Format::JsonLd),\n\n \"jsonad\" => (Format::JsonAd),\n\n \"nt\" => (Format::NTriples),\n\n \"turtle\" => (Format::NTriples),\n\n \"n3\" => (Format::NTriples),\n\n format => {\n\n return Err(\n\n format!(\"As {} not supported. Try {:?}\", format, SERIALIZE_OPTIONS).into(),\n\n );\n\n }\n\n }\n\n } else {\n\n Format::Pretty\n\n };\n\n Ok(format)\n\n}\n\n\n", "file_path": "cli/src/print.rs", "rank": 27, "score": 147470.06065980942 }, { "content": "/// Prints a resource to the command line\n\npub fn print_resource(\n\n context: &Context,\n\n resource: &Resource,\n\n argmatches: &ArgMatches,\n\n) -> AtomicResult<()> {\n\n let out = match get_serialization(argmatches)? {\n\n Format::Json => resource.to_json(&context.store)?,\n\n Format::JsonLd => resource.to_json_ld(&context.store)?,\n\n Format::JsonAd => resource.to_json_ad()?,\n\n Format::NTriples => serialize::atoms_to_ntriples(resource.to_atoms()?, &context.store)?,\n\n Format::Pretty => pretty_print_resource(&resource, &context.store)?,\n\n };\n\n println!(\"{}\", out);\n\n Ok(())\n\n}\n", "file_path": "cli/src/print.rs", "rank": 28, "score": 146946.57741358088 }, { "content": "/// Resolves an Atomic Path query\n\npub fn get_path(context: &mut Context) -> AtomicResult<()> {\n\n let subcommand_matches = context.matches.subcommand_matches(\"get\").unwrap();\n\n let path_vec: Vec<&str> = subcommand_matches\n\n .values_of(\"path\")\n\n .expect(\"Add a URL, shortname or path\")\n\n .collect();\n\n let path_string: String = path_vec.join(\" \");\n\n let serialization: Format = get_serialization(subcommand_matches)?;\n\n\n\n // Returns a URL or Value\n\n let store = &mut context.store;\n\n let path = store.get_path(&path_string, Some(&context.mapping.lock().unwrap()))?;\n\n let out = match path {\n\n storelike::PathReturn::Subject(subject) => {\n\n let resource = store.get_resource_extended(&subject)?;\n\n print_resource(context, &resource, subcommand_matches)?;\n\n return Ok(());\n\n }\n\n storelike::PathReturn::Atom(atom) => match serialization {\n\n Format::JsonLd | Format::Json | Format::JsonAd | Format::Pretty => {\n", "file_path": "cli/src/path.rs", "rank": 29, "score": 146674.45787098524 }, { "content": "/// Generates some nice collections for classes, such as `/agent` and `/collection`.\n\n/// Requires a `self_url` to be set in the store.\n\npub fn populate_collections(store: &impl Storelike) -> AtomicResult<()> {\n\n use crate::collections::CollectionBuilder;\n\n\n\n let classes_atoms = store.tpf(\n\n None,\n\n Some(\"https://atomicdata.dev/properties/isA\"),\n\n Some(\"https://atomicdata.dev/classes/Class\"),\n\n true,\n\n )?;\n\n\n\n for atom in classes_atoms {\n\n let class = store.get_class(&atom.subject)?;\n\n // Can't import this for some reason - even if it's there in cargo.toml\n\n // let plural_name = pluralize_rs::to_plural(class.shortname);\n\n\n\n // Pluralize the shortname\n\n let pluralized = match class.shortname.as_ref() {\n\n \"class\" => \"classes\".to_string(),\n\n \"property\" => \"properties\".to_string(),\n\n other => format!(\"{}s\", other).to_string(),\n", "file_path": "lib/src/populate.rs", "rank": 30, "score": 146051.09204745368 }, { "content": "/// Adds the hierarchy related items (Drive, default Folder) to the Store.\n\n/// Sets the home page as the top level node, and gives write rights to the default agent.\n\n/// Requires a `self_url` to be set in the store.\n\npub fn populate_hierarchy(store: &impl Storelike) -> AtomicResult<()> {\n\n let self_url = store\n\n .get_self_url()\n\n .ok_or(\"No self_url set, cannot populate store with Drive\")?;\n\n let mut drive = crate::Resource::new_instance(urls::DRIVE, store)?;\n\n drive.set_subject(self_url);\n\n let base_url = url::Url::parse(store.get_base_url())?;\n\n drive.set_propval_string(\n\n urls::NAME.into(),\n\n base_url.host_str().ok_or(\"Can't use current base URL\")?,\n\n store,\n\n )?;\n\n // The root agent does not yet exist\n\n // let root_agent = store.get_default_agent()?.subject;\n\n // drive.set_propval(\n\n // urls::READ.into(),\n\n // Value::ResourceArray(vec![root_agent.clone()]),\n\n // store,\n\n // )?;\n\n // drive.set_propval(\n\n // urls::WRITE.into(),\n\n // Value::ResourceArray(vec![root_agent]),\n\n // store,\n\n // )?;\n\n drive.save_locally(store)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/src/populate.rs", "rank": 31, "score": 146045.5723673261 }, { "content": "/// Serializes a Resource to a Serde JSON Map\n\npub fn propvals_to_json_ld(\n\n propvals: &PropVals,\n\n subject: Option<String>,\n\n store: &impl Storelike,\n\n json_ld: bool,\n\n) -> AtomicResult<serde_json::Value> {\n\n // Initiate JSON object\n\n let mut root = Map::new();\n\n // For JSON-LD serialization\n\n let mut context = Map::new();\n\n // For every atom, find the key, datatype and add it to the @context\n\n for (prop_url, value) in propvals.iter() {\n\n // The property is only needed in JSON-LD and JSON for shortnames\n\n let property = store.get_property(prop_url)?;\n\n if json_ld {\n\n // In JSON-LD, the value of a Context Item can be a string or an object.\n\n // This object can contain information about the translation or datatype of the value\n\n let ctx_value: SerdeValue = match value.datatype() {\n\n DataType::AtomicUrl => {\n\n let mut obj = Map::new();\n", "file_path": "lib/src/serialize.rs", "rank": 32, "score": 143864.79255312757 }, { "content": "/// Populates a store with some of the most fundamental Properties and Classes needed to bootstrap the whole.\n\n/// This is necessary to prevent a loop where Property X (like the `shortname` Property)\n\n/// cannot be added, because it's Property Y (like `description`) has to be fetched before it can be added,\n\n/// which in turn has property Property X (`shortname`) which needs to be fetched before.\n\n/// https://github.com/joepio/atomic/issues/60\n\npub fn populate_base_models(store: &impl Storelike) -> AtomicResult<()> {\n\n // Start with adding the most fundamental properties - the properties for Properties\n\n\n\n let shortname = Property {\n\n class_type: None,\n\n data_type: DataType::Slug,\n\n shortname: \"shortname\".into(),\n\n description: \"A short name of something. It can only contain letters, numbers and dashes `-`. Use dashes to denote spaces between words. Not case sensitive - lowercase only. Useful in programming contexts where the user should be able to type something short to identify a specific thing.\".into(),\n\n subject: urls::SHORTNAME.into(),\n\n }.to_resource()?;\n\n store.add_resource_unsafe(&shortname)?;\n\n\n\n let description = Property {\n\n class_type: None,\n\n data_type: DataType::Markdown,\n\n shortname: \"description\".into(),\n\n description: \"A textual description of something. When making a description, make sure that the first few words tell the most important part. Give examples. Since the text supports markdown, you're free to use links and more.\".into(),\n\n subject: urls::DESCRIPTION.into(),\n\n }.to_resource()?;\n\n store.add_resource_unsafe(&description)?;\n", "file_path": "lib/src/populate.rs", "rank": 33, "score": 143577.40627906195 }, { "content": "/// Imports the Atomic Data Core items (the entire atomicdata.dev Ontology / Vocabulary) from default_store.jsonld\n\npub fn populate_default_store(store: &impl Storelike) -> AtomicResult<()> {\n\n let json = include_str!(\"../defaults/default_store.json\");\n\n store.import(json)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/src/populate.rs", "rank": 34, "score": 143562.01391426404 }, { "content": "/// Serializes a Resource to a Serde JSON Map\n\npub fn propvals_to_json_ad_map(\n\n propvals: &PropVals,\n\n subject: Option<String>,\n\n) -> AtomicResult<serde_json::Value> {\n\n let mut root = Map::new();\n\n for (prop_url, value) in propvals.iter() {\n\n root.insert(prop_url.clone(), val_to_serde(value.clone())?);\n\n }\n\n if let Some(sub) = subject {\n\n root.insert(\"@id\".into(), SerdeValue::String(sub));\n\n }\n\n let obj = SerdeValue::Object(root);\n\n Ok(obj)\n\n}\n\n\n", "file_path": "lib/src/serialize.rs", "rank": 35, "score": 140668.3150675883 }, { "content": "/// Parse a single Json AD string, convert to Atoms\n\n/// WARNING: Does not match all props to datatypes (in Nested Resources), so it could result in invalid data, if the input data does not match the required datatypes.\n\npub fn parse_json_ad_resource(\n\n string: &str,\n\n store: &impl crate::Storelike,\n\n) -> AtomicResult<Resource> {\n\n let json: Map<String, serde_json::Value> = serde_json::from_str(string)?;\n\n json_ad_object_to_resource(json, store)\n\n}\n\n\n", "file_path": "lib/src/parse.rs", "rank": 36, "score": 140361.33364570257 }, { "content": "pub fn config_routes(app: &mut actix_web::web::ServiceConfig) {\n\n app.service(web::resource(\"/ws\").to(handlers::web_sockets::web_socket_handler))\n\n // Catch all HTML requests and send them to the single page app\n\n .service(\n\n web::resource(\"/*\")\n\n .guard(actix_web::guard::Method(Method::GET))\n\n .guard(actix_web::guard::fn_guard(|head| {\n\n content_types::get_accept(&head.headers()) == content_types::ContentType::Html\n\n }))\n\n .to(handlers::single_page_app::single_page),\n\n )\n\n .service(\n\n web::scope(\"/tpf\").service(web::resource(\"\").route(web::get().to(handlers::tpf::tpf))),\n\n )\n\n .service(\n\n web::resource(\"/commit\")\n\n .guard(actix_web::guard::Method(Method::POST))\n\n .to(handlers::commit::post_commit),\n\n )\n\n .service(\n\n web::scope(\"/{path:[^{}]+}\")\n\n .service(web::resource(\"\").route(web::get().to(handlers::resource::get_resource))),\n\n )\n\n // Also allow the home resource\n\n .service(web::resource(\"/\").to(handlers::resource::get_resource));\n\n}\n", "file_path": "server/src/routes.rs", "rank": 37, "score": 138802.10733243922 }, { "content": "/// Parse a single Json AD string, convert to Atoms\n\n/// WARNING: Does not match all props to datatypes (in Nested Resources), so it could result in invalid data, if the input data does not match the required datatypes.\n\npub fn parse_json_ad_commit_resource(\n\n string: &str,\n\n store: &impl crate::Storelike,\n\n) -> AtomicResult<Resource> {\n\n let json: Map<String, serde_json::Value> = serde_json::from_str(string)?;\n\n let signature = json\n\n .get(urls::SUBJECT)\n\n .ok_or(\"No subject field in Commit.\")?\n\n .to_string();\n\n let subject = format!(\"{}/commits/{}\", store.get_base_url(), signature);\n\n let mut resource = Resource::new(subject);\n\n let propvals = parse_json_ad_map_to_propvals(json, store)?;\n\n for (prop, val) in propvals {\n\n resource.set_propval(prop, val, store)?\n\n }\n\n Ok(resource)\n\n}\n\n\n", "file_path": "lib/src/parse.rs", "rank": 38, "score": 137383.32204809278 }, { "content": "/// Writes config file from a specified path\n\n/// Overwrites any existing config\n\npub fn write_config(path: &Path, config: Config) -> AtomicResult<String> {\n\n let out =\n\n toml::to_string_pretty(&config).map_err(|e| format!(\"Error serializing config. {}\", e))?;\n\n std::fs::write(path, out.clone())\n\n .map_err(|e| format!(\"Error writing config to {:?}. {}\", path, e))?;\n\n Ok(out)\n\n}\n", "file_path": "lib/src/config.rs", "rank": 39, "score": 135297.26394664618 }, { "content": "/// Converts dots to 'None'\n\nfn tpf_value(string: &str) -> Option<&str> {\n\n if string == \".\" {\n\n None\n\n } else {\n\n Some(string)\n\n }\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 40, "score": 132990.33431076785 }, { "content": "/// Posts a Commit to the endpoint of the Subject from the Commit\n\npub fn post_commit(commit: &crate::Commit, store: &impl Storelike) -> AtomicResult<()> {\n\n let base_url = crate::url_helpers::base_url(commit.get_subject())?;\n\n // Default Commit endpoint is `https://example.com/commit`\n\n let endpoint = format!(\"{}commit\", base_url);\n\n post_commit_custom_endpoint(&endpoint, commit, store)\n\n}\n\n\n", "file_path": "lib/src/client.rs", "rank": 41, "score": 130731.31749161355 }, { "content": "#[derive(Debug)]\n\nstruct AtomicError(String);\n\n\n\nimpl fmt::Display for AtomicError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"There is an error: {}\", self.0)\n\n }\n\n}\n\n\n\nimpl Error for AtomicError {}\n", "file_path": "lib/src/errors.rs", "rank": 42, "score": 127456.20396786426 }, { "content": "/// Returns the @id in a JSON object\n\nfn get_id(object: serde_json::Map<String, serde_json::Value>) -> AtomicResult<String> {\n\n Ok(object\n\n .get(\"@id\")\n\n .ok_or(\"Missing `@id` value in top level JSON. Could not determine Subject of Resource.\")?\n\n .as_str()\n\n .ok_or(\"`@id` is not a string - should be the Subject of the Resource (a URL)\")?\n\n .to_string())\n\n}\n\n\n", "file_path": "lib/src/parse.rs", "rank": 43, "score": 127325.29340017398 }, { "content": "/// Parses a single argument (URL or Bookmark), should return a valid URL\n\nfn argument_to_url(context: &Context, argument: &str) -> AtomicResult<String> {\n\n let command_name = context.matches.subcommand_name().unwrap();\n\n let subcommand_matches = context.matches.subcommand_matches(command_name).unwrap();\n\n let user_arg = subcommand_matches\n\n .value_of(argument)\n\n .ok_or(format!(\"No argument value for {} found\", argument))?;\n\n let id_url: String = context\n\n .mapping\n\n .lock()\n\n .unwrap()\n\n .try_mapping_or_url(&String::from(user_arg))\n\n .ok_or(&*format!(\"No url found for {}\", user_arg))?;\n\n Ok(id_url)\n\n}\n", "file_path": "cli/src/commit.rs", "rank": 44, "score": 120811.42811937557 }, { "content": "/// Converts an Atomic Value to a Serde Value.\n\nfn val_to_serde(value: Value) -> AtomicResult<SerdeValue> {\n\n let json_val: SerdeValue = match value {\n\n Value::AtomicUrl(val) => SerdeValue::String(val),\n\n Value::Date(val) => SerdeValue::String(val),\n\n // TODO: Handle big numbers\n\n Value::Integer(val) => serde_json::from_str(&val.to_string()).unwrap_or_default(),\n\n Value::Float(val) => serde_json::from_str(&val.to_string()).unwrap_or_default(),\n\n Value::Markdown(val) => SerdeValue::String(val),\n\n Value::ResourceArray(val) => SerdeValue::Array(\n\n val.iter()\n\n .map(|item| SerdeValue::String(item.clone()))\n\n .collect(),\n\n ),\n\n Value::Slug(val) => SerdeValue::String(val),\n\n Value::String(val) => SerdeValue::String(val),\n\n Value::Timestamp(val) => SerdeValue::Number(val.into()),\n\n Value::Unsupported(val) => SerdeValue::String(val.value),\n\n Value::Boolean(val) => SerdeValue::Bool(val),\n\n // TODO: fix this for nested resources in json and json-ld serialization, because this will cause them to fall back to json-ad\n\n Value::NestedResource(res) => propvals_to_json_ad_map(&res, None)?,\n\n };\n\n Ok(json_val)\n\n}\n\n\n", "file_path": "lib/src/serialize.rs", "rank": 45, "score": 118448.82127625457 }, { "content": "/// Uses a TPF endpoint, returns a Vector of matching resources\n\npub fn fetch_tpf(\n\n endpoint: &str,\n\n q_subject: Option<&str>,\n\n q_property: Option<&str>,\n\n q_value: Option<&str>,\n\n store: &impl Storelike,\n\n) -> AtomicResult<Vec<Resource>> {\n\n let mut url = Url::parse(endpoint)?;\n\n if let Some(val) = q_subject {\n\n url.query_pairs_mut().append_pair(\"subject\", val);\n\n }\n\n if let Some(val) = q_property {\n\n url.query_pairs_mut().append_pair(\"property\", val);\n\n }\n\n if let Some(val) = q_value {\n\n url.query_pairs_mut().append_pair(\"value\", val);\n\n }\n\n let body = fetch_body(url.as_str(), \"application/ad+json\")?;\n\n crate::parse::parse_json_ad_array(&body, store)\n\n}\n\n\n", "file_path": "lib/src/client.rs", "rank": 46, "score": 113900.82505284564 }, { "content": "/// Recursively checks a Resource and its Parents for write.\n\npub fn check_write(\n\n store: &impl Storelike,\n\n resource: &Resource,\n\n agent: String,\n\n) -> AtomicResult<bool> {\n\n // Check if the resource's write rights explicitly refers to the agent\n\n if let Ok(arr_val) = resource.get(urls::WRITE) {\n\n if arr_val.to_vec()?.contains(&agent) {\n\n return Ok(true);\n\n };\n\n }\n\n // Try the parents recursively\n\n if let Ok(val) = resource.get(urls::PARENT) {\n\n let parent = store.get_resource(&val.to_string())?;\n\n if resource.get_subject() == parent.get_subject() {\n\n // return Err(format!(\"Parent ({}) is the same as the current resource - there is a circular parent relationship.\", val).into());\n\n return Ok(false);\n\n }\n\n check_write(store, &parent, agent)\n\n } else {\n", "file_path": "lib/src/hierarchy.rs", "rank": 47, "score": 113892.69894671798 }, { "content": "#[allow(dead_code, unreachable_code)]\n\npub fn validate_store(\n\n store: &impl crate::Storelike,\n\n fetch_items: bool,\n\n) -> crate::validate::ValidationReport {\n\n type Error = String;\n\n let mut resource_count: u8 = 0;\n\n let mut atom_count: u8 = 0;\n\n let mut unfetchable: Vec<(String, Error)> = Vec::new();\n\n let mut invalid_value: Vec<(crate::Atom, Error)> = Vec::new();\n\n let mut unfetchable_props: Vec<(String, Error)> = Vec::new();\n\n let mut unfetchable_classes: Vec<(String, Error)> = Vec::new();\n\n // subject, property, class\n\n let mut missing_props: Vec<(String, String, String)> = Vec::new();\n\n for resource in store.all_resources(true) {\n\n let subject = resource.get_subject();\n\n let propvals = resource.get_propvals();\n\n println!(\"Subject: {:?}\", subject);\n\n println!(\"Resource: {:?}\", propvals);\n\n resource_count += 1;\n\n\n", "file_path": "lib/src/validate.rs", "rank": 48, "score": 113888.84969899876 }, { "content": "/// Builds a collection from query params\n\npub fn construct_collection(\n\n store: &impl Storelike,\n\n query_params: url::form_urlencoded::Parse,\n\n resource: &mut Resource,\n\n) -> AtomicResult<Resource> {\n\n let mut sort_by = None;\n\n let mut sort_desc = false;\n\n let mut current_page = 0;\n\n let mut page_size = DEFAULT_PAGE_SIZE;\n\n let mut value = None;\n\n let mut property = None;\n\n let mut name = None;\n\n\n\n if let Ok(val) = resource.get(urls::COLLECTION_PROPERTY) {\n\n property = Some(val.to_string());\n\n }\n\n if let Ok(val) = resource.get(urls::COLLECTION_VALUE) {\n\n value = Some(val.to_string());\n\n }\n\n if let Ok(val) = resource.get(urls::NAME) {\n", "file_path": "lib/src/collections.rs", "rank": 49, "score": 113888.84969899876 }, { "content": "/// Adds the requested rights to the target resource.\n\n/// Overwrites the target resource to include the new rights.\n\n/// Checks if the Agent has a valid URL.\n\n/// Will not throw an error if the Agent already has the rights.\n\npub fn add_rights(\n\n agent: &str,\n\n target: &str,\n\n write: bool,\n\n store: &impl Storelike,\n\n) -> AtomicResult<()> {\n\n check_valid_url(agent)?;\n\n // Get the Resource that the user is being invited to\n\n let mut target = store.get_resource(target)?;\n\n let right = if write { urls::WRITE } else { urls::READ };\n\n let mut rights_vector: Vec<String> = match target.get(right) {\n\n // Rights have been set, add to the list\n\n Ok(val) => {\n\n let vec = val.to_vec().map_err(|_| \"Invalid value for rights\")?;\n\n // If the vector already contains the agent, throw an error;\n\n for a in vec {\n\n if a == agent {\n\n return Ok(());\n\n }\n\n }\n", "file_path": "lib/src/plugins/invite.rs", "rank": 50, "score": 111631.94743846366 }, { "content": "/// Validates the store\n\nfn validate(context: &mut Context) {\n\n let reportstring = context.store.validate().to_string();\n\n println!(\"{}\", reportstring);\n\n}\n", "file_path": "cli/src/main.rs", "rank": 51, "score": 110270.61937943197 }, { "content": "/// List all bookmarks\n\nfn list(context: &mut Context) {\n\n let mut string = String::new();\n\n for (shortname, url) in context.mapping.lock().unwrap().clone().into_iter() {\n\n string.push_str(&*format!(\n\n \"{0: <15}{1: <10} \\n\",\n\n shortname.blue().bold(),\n\n url\n\n ));\n\n }\n\n println!(\"{}\", string)\n\n}\n\n\n", "file_path": "cli/src/main.rs", "rank": 52, "score": 110270.61937943197 }, { "content": "/// If there is a valid Agent in the correct query param, and the invite is valid, update the rights and respond with a redirect to the target resource\n\npub fn construct_invite_redirect(\n\n store: &impl Storelike,\n\n query_params: url::form_urlencoded::Parse,\n\n invite_resource: &mut Resource,\n\n subject: &str,\n\n) -> AtomicResult<Resource> {\n\n let mut pub_key = None;\n\n let mut invite_agent = None;\n\n for (k, v) in query_params {\n\n match k.as_ref() {\n\n \"public-key\" | urls::INVITE_PUBKEY => pub_key = Some(v.to_string()),\n\n \"agent\" | urls::AGENT => invite_agent = Some(v.to_string()),\n\n _ => {}\n\n }\n\n }\n\n\n\n // Check if there is either a publicKey or an Agent present in the request. Either one is needed to continue accepting the invite.\n\n let agent = match (pub_key, invite_agent) {\n\n (None, None) => return Ok(invite_resource.to_owned()),\n\n (None, Some(agent_url)) => agent_url,\n", "file_path": "lib/src/plugins/invite.rs", "rank": 53, "score": 109516.1461537019 }, { "content": "/// Posts a Commit to an endpoint\n\n/// Default commit endpoint is `https://example.com/commit`\n\npub fn post_commit_custom_endpoint(\n\n endpoint: &str,\n\n commit: &crate::Commit,\n\n store: &impl Storelike,\n\n) -> AtomicResult<()> {\n\n let json = commit.clone().into_resource(store)?.to_json_ad()?;\n\n\n\n let resp = ureq::post(&endpoint)\n\n .set(\"Content-Type\", \"application/json\")\n\n .timeout_read(2000)\n\n .send_string(&json);\n\n\n\n if resp.error() {\n\n Err(format!(\n\n \"Failed applying commit to {}. Status: {} Body: {}\",\n\n endpoint,\n\n resp.status(),\n\n resp.into_string()?\n\n )\n\n .into())\n", "file_path": "lib/src/client.rs", "rank": 54, "score": 109512.62925882224 }, { "content": "/// Returns the current UNIX timestamp in milliseconds\n\npub fn now() -> i64 {\n\n std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .expect(\"You're a time traveler\")\n\n .as_millis() as i64\n\n}\n", "file_path": "lib/src/datetime_helpers.rs", "rank": 55, "score": 108686.69244982599 }, { "content": "/// Parse a single Json AD string, convert to Atoms\n\n/// Does not match all props to datatypes, so it could result in invalid data.\n\npub fn parse_json_ad_map_to_propvals(\n\n json: Map<String, serde_json::Value>,\n\n store: &impl crate::Storelike,\n\n) -> AtomicResult<PropVals> {\n\n let mut propvals = PropVals::new();\n\n for (prop, val) in json {\n\n if prop == \"@id\" {\n\n // Not sure if this is the correct behavior.\n\n // This will turn named resources into nested ones!\n\n // To fix this, we need to use an Enum for Value::ResourceArray(enum)\n\n continue;\n\n }\n\n let atomic_val = match val {\n\n serde_json::Value::Null => return Err(\"Null not allowed in JSON-AD\".into()),\n\n serde_json::Value::Bool(bool) => Value::Boolean(bool),\n\n serde_json::Value::Number(num) => {\n\n let property = store.get_property(&prop)?;\n\n // Also converts numbers to strings, not sure what to think about this.\n\n // Does not result in invalid atomic data, but does allow for weird inputs\n\n Value::new(&num.to_string(), &property.data_type)?\n", "file_path": "lib/src/parse.rs", "rank": 56, "score": 107549.77932109695 }, { "content": "pub fn all_versions_endpoint() -> Endpoint {\n\n Endpoint {\n\n path: \"/all-versions\".to_string(),\n\n params: [urls::SUBJECT.to_string()].into(),\n\n description: \"Shows all versions for some resource. Constructs these using Commits.\"\n\n .to_string(),\n\n shortname: \"all-versions\".to_string(),\n\n handle: handle_all_versions_request,\n\n }\n\n}\n\n\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 57, "score": 106574.77220408578 }, { "content": "pub fn version_endpoint() -> Endpoint {\n\n Endpoint {\n\n path: \"/version\".to_string(),\n\n params: [urls::SUBJECT.to_string()].into(),\n\n description: \"Constructs a version of a resource from a Commit URL.\".to_string(),\n\n shortname: \"versions\".to_string(),\n\n handle: handle_version_request,\n\n }\n\n}\n\n\n", "file_path": "lib/src/plugins/versioning.rs", "rank": 58, "score": 106574.77220408578 }, { "content": "pub fn path_endpoint() -> Endpoint {\n\n Endpoint {\n\n path: \"/path\".to_string(),\n\n params: [urls::PATH.to_string()].into(),\n\n description: \"An Atomic Path is a string that starts with the URL of some Atomic Resource, followed by one or multiple other Property URLs or Property Shortnames. It resolves to one specific Resource or Value. At this moment, Values are not yet supported.\".to_string(),\n\n shortname: \"path\".to_string(),\n\n handle: handle_path_request,\n\n }\n\n}\n\n\n", "file_path": "lib/src/plugins/path.rs", "rank": 59, "score": 106574.77220408578 }, { "content": "/// Creates the server config, reads .env values and sets defaults\n\npub fn init() -> BetterResult<Config> {\n\n // Parse CLI options, .env values, set defaults\n\n let opts: Opts = Opts::parse();\n\n\n\n dotenv().ok();\n\n let config_dir = if let Some(dir) = &opts.config_dir {\n\n dir.clone()\n\n } else {\n\n atomic_lib::config::default_config_dir_path()?\n\n };\n\n let mut config_file_path = atomic_lib::config::default_config_file_path()?;\n\n let mut store_path = config_dir.clone();\n\n store_path.push(\"db\");\n\n let mut https_path = config_dir.clone();\n\n https_path.push(\"https\");\n\n let mut cert_path = config_dir.clone();\n\n cert_path.push(\"https/cert.pem\");\n\n let mut key_path = config_dir.clone();\n\n key_path.push(\"https/key.pem\");\n\n\n", "file_path": "server/src/config.rs", "rank": 60, "score": 103974.1564222734 }, { "content": "pub fn default_endpoints() -> Vec<Endpoint> {\n\n vec![version_endpoint(), all_versions_endpoint(), path_endpoint()]\n\n}\n", "file_path": "lib/src/endpoints.rs", "rank": 61, "score": 103970.17166159218 }, { "content": "/// Parses a single argument as string\n\nfn argument_to_string(context: &Context, argument: &str) -> AtomicResult<String> {\n\n let command_name = context.matches.subcommand_name().unwrap();\n\n let subcommand_matches = context.matches.subcommand_matches(command_name).unwrap();\n\n let user_arg = subcommand_matches\n\n .value_of(argument)\n\n .ok_or(format!(\"No argument value for {} found\", argument))?;\n\n Ok(user_arg.into())\n\n}\n\n\n", "file_path": "cli/src/commit.rs", "rank": 62, "score": 103707.60840182302 }, { "content": "#[cfg(test)]\n\npub fn init_store() -> crate::Store {\n\n use crate::Storelike;\n\n\n\n let store = crate::Store::init().unwrap();\n\n store.populate().unwrap();\n\n let agent = store.create_agent(None).unwrap();\n\n store.set_default_agent(agent);\n\n store\n\n}\n", "file_path": "lib/src/test_utils.rs", "rank": 63, "score": 101995.75009155173 }, { "content": "fn exec_command(context: &mut Context) -> AtomicResult<()> {\n\n match context.matches.subcommand_name() {\n\n Some(\"destroy\") => {\n\n commit::destroy(context)?;\n\n }\n\n Some(\"edit\") => {\n\n #[cfg(feature = \"native\")]\n\n {\n\n commit::edit(context)?;\n\n }\n\n #[cfg(not(feature = \"native\"))]\n\n {\n\n return Err(\"Feature not available. Compile with `native` feature.\".into());\n\n }\n\n }\n\n Some(\"get\") => {\n\n path::get_path(context)?;\n\n }\n\n Some(\"list\") => {\n\n list(context);\n", "file_path": "cli/src/main.rs", "rank": 64, "score": 101753.302251412 }, { "content": "/// Get the Drive resource (base URL), set agent as the Root user, provide write access\n\nfn set_up_drive(store: &impl Storelike) -> BetterResult<()> {\n\n log::info!(\"Setting rights to Drive {}\", store.get_base_url());\n\n // Now let's add the agent as the Root user and provide write access\n\n let mut drive = store.get_resource(store.get_base_url())?;\n\n let agents = vec![store.get_default_agent()?.subject];\n\n // TODO: add read rights to public, maybe\n\n drive.set_propval(atomic_lib::urls::WRITE.into(), agents.clone().into(), store)?;\n\n drive.set_propval(atomic_lib::urls::READ.into(), agents.into(), store)?;\n\n drive.set_propval_string(atomic_lib::urls::DESCRIPTION.into(), &format!(\"Welcome to your Atomic-Server! Register your User by visiting [`/setup`]({}/setup). After that, edit this page by pressing `edit` in the navigation bar menu.\", store.get_base_url()), store)?;\n\n drive.save_locally(store)?;\n\n Ok(())\n\n}\n", "file_path": "server/src/appstate.rs", "rank": 65, "score": 101089.92651147276 }, { "content": "/// Apply a Commit using the Remove method - removes a property from a resource\n\npub fn remove(context: &Context) -> AtomicResult<()> {\n\n let subject = argument_to_url(context, \"subject\")?;\n\n let prop = argument_to_string(context, \"property\")?;\n\n let mut resource = context.store.get_resource(&subject)?;\n\n resource.remove_propval_shortname(&prop, &context.store)?;\n\n post(context, resource.get_commit_builder().clone())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/commit.rs", "rank": 66, "score": 99682.50780145597 }, { "content": "/// Apply a Commit using the Set method - create or update a value in a resource\n\npub fn set(context: &Context) -> AtomicResult<()> {\n\n let subject = argument_to_url(context, \"subject\")?;\n\n let property = argument_to_string(context, \"property\")?;\n\n let value = argument_to_string(context, \"value\")?;\n\n // If the resource is not found, create it\n\n let mut resource = match context.store.get_resource(&subject) {\n\n Ok(r) => r,\n\n Err(_) => atomic_lib::Resource::new(subject),\n\n };\n\n resource.set_propval_shortname(&property, &value, &context.store)?;\n\n post(context, resource.get_commit_builder().clone())?;\n\n Ok(())\n\n}\n\n\n\n/// Apply a Commit using the Set method, where the value is edited in the user's text editor.\n", "file_path": "cli/src/commit.rs", "rank": 67, "score": 99682.30484779907 }, { "content": "/// Apply a Commit using the destroy method - removes a resource\n\npub fn destroy(context: &Context) -> AtomicResult<()> {\n\n let subject = argument_to_url(context, \"subject\")?;\n\n let mut commit_builder = atomic_lib::commit::CommitBuilder::new(subject);\n\n commit_builder.destroy(true);\n\n post(context, commit_builder)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/commit.rs", "rank": 68, "score": 99678.54090495377 }, { "content": "#[cfg(feature = \"native\")]\n\npub fn edit(context: &Context) -> AtomicResult<()> {\n\n let subject = argument_to_url(context, \"subject\")?;\n\n let prop = argument_to_string(context, \"property\")?;\n\n // If the resource is not found, create it\n\n let mut resource = match context.store.get_resource(&subject) {\n\n Ok(r) => r,\n\n Err(_) => atomic_lib::Resource::new(subject),\n\n };\n\n // If the prop is not found, create it\n\n let current_val = match resource.get_shortname(&prop, &context.store) {\n\n Ok(val) => val.to_string(),\n\n Err(_) => \"\".to_string(),\n\n };\n\n let edited = edit::edit(current_val)?;\n\n // Remove newline - or else I can's save shortnames or numbers using vim;\n\n let trimmed = edited.trim_end_matches('\\n');\n\n resource.set_propval_shortname(&prop, trimmed, &context.store)?;\n\n post(context, resource.get_commit_builder().clone())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/commit.rs", "rank": 69, "score": 99670.74503934482 }, { "content": "/// Creates the first Invitation that is opened by the user on the Home page.\n\nfn set_up_initial_invite(store: &impl Storelike) -> BetterResult<()> {\n\n let subject = format!(\"{}/setup\", store.get_base_url());\n\n log::info!(\"Creating initial Invite at {}\", subject);\n\n let mut invite = atomic_lib::Resource::new_instance(atomic_lib::urls::INVITE, store)?;\n\n invite.set_subject(subject);\n\n // This invite can be used only once\n\n invite.set_propval(\n\n atomic_lib::urls::USAGES_LEFT.into(),\n\n atomic_lib::Value::Integer(1),\n\n store,\n\n )?;\n\n invite.set_propval(\n\n atomic_lib::urls::WRITE_BOOL.into(),\n\n atomic_lib::Value::Boolean(true),\n\n store,\n\n )?;\n\n invite.set_propval(\n\n atomic_lib::urls::TARGET.into(),\n\n atomic_lib::Value::AtomicUrl(store.get_base_url().into()),\n\n store,\n\n )?;\n\n invite.set_propval_string(\n\n atomic_lib::urls::DESCRIPTION.into(),\n\n \"Use this Invite to create an Agent, or use an existing one. Accepting will grant your Agent the necessary rights to edit the data in your Atomic Server. This can only be used once. If you, for whatever reason, need a new `/setup` invite, you can pass the `--init` flag to `atomic-server`.\",\n\n store,\n\n )?;\n\n invite.save_locally(store)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "server/src/appstate.rs", "rank": 70, "score": 99328.88382137474 }, { "content": "/// Removes the process id file in the config directory meant for signaling this instance is running.\n\npub fn remove_pid(config: &Config) -> BetterResult<()> {\n\n if std::fs::remove_file(pid_path(config)).is_err() {\n\n log::warn!(\n\n \"Could not remove process file at {}\",\n\n pid_path(config).to_str().unwrap()\n\n )\n\n }\n\n Ok(())\n\n}\n\n\n\nconst PID_NAME: &str = \"atomic_server_process_id\";\n\n\n", "file_path": "server/src/process.rs", "rank": 71, "score": 97820.81750232822 }, { "content": "/// Returns the default path for the config file: `~/.config/atomic/config.toml`\n\npub fn default_config_file_path() -> AtomicResult<PathBuf> {\n\n let mut default_dir = default_config_dir_path()?;\n\n default_dir.push(\"config.toml\");\n\n Ok(default_dir)\n\n}\n\n\n", "file_path": "lib/src/config.rs", "rank": 72, "score": 96775.14451775231 }, { "content": "/// Returns the default path for the config file: `~/.config/atomic`\n\npub fn default_config_dir_path() -> AtomicResult<PathBuf> {\n\n Ok(dirs::home_dir()\n\n .ok_or(\"Could not open home dir\")?\n\n .join(\".config/atomic\"))\n\n}\n\n\n", "file_path": "lib/src/config.rs", "rank": 73, "score": 96775.14451775231 }, { "content": "/// Checks if the server is running. If it is, kill that process. Also creates creates a new PID.\n\npub fn terminate_existing_processes(config: &Config) -> BetterResult<()> {\n\n let pid_maybe = match std::fs::read_to_string(pid_path(config)) {\n\n Ok(content) => str::parse::<i32>(&content).ok(),\n\n Err(_e) => None,\n\n };\n\n if let Some(pid) = pid_maybe {\n\n let retry_secs = 1;\n\n let mut tries_left = 15;\n\n match futures::executor::block_on(heim::process::get(pid)) {\n\n Ok(process) => {\n\n log::warn!(\n\n \"Terminating existing running instance of atomic-server (process ID: {})...\",\n\n process.pid()\n\n );\n\n futures::executor::block_on(process.terminate())\n\n .expect(\"Found running atomic-server, but could not terminate it.\");\n\n log::info!(\"Checking if other server has succesfully terminated...\",);\n\n loop {\n\n if let Err(_e) = futures::executor::block_on(heim::process::get(pid)) {\n\n log::info!(\"No other atomic-server is running, continuing start-up\",);\n", "file_path": "server/src/process.rs", "rank": 74, "score": 96083.9692072687 }, { "content": "/// Returns a Key Pair (including public key) from a private key, base64 encoded.\n\npub fn generate_public_key(private_key: &str) -> Pair {\n\n use ring::signature::KeyPair;\n\n let private_key_bytes = base64::decode(private_key).unwrap();\n\n let key_pair = ring::signature::Ed25519KeyPair::from_seed_unchecked(private_key_bytes.as_ref())\n\n .map_err(|_| \"Error generating keypair\")\n\n .unwrap();\n\n Pair {\n\n private: base64::encode(private_key_bytes),\n\n public: base64::encode(key_pair.public_key().as_ref()),\n\n }\n\n}\n\n\n", "file_path": "lib/src/agents.rs", "rank": 75, "score": 96083.9692072687 }, { "content": "/// Returns the preffered content type.\n\n/// Defaults to HTML if none is found.\n\npub fn get_accept(map: &HeaderMap) -> ContentType {\n\n let accept_header = match map.get(\"Accept\") {\n\n Some(header) => header.to_str().unwrap_or(\"\"),\n\n None => return ContentType::Html,\n\n };\n\n parse_accept_header(accept_header)\n\n}\n\n\n", "file_path": "server/src/content_types.rs", "rank": 76, "score": 94454.30275955657 }, { "content": "/// Parses an HTTP Accept header\n\n/// Does not fully adhere to the RFC spec: https://tools.ietf.org/html/rfc7231\n\n/// Does not take into consideration the q value, simply reads the first thing before the comma\n\n/// Defaults to HTML\n\npub fn parse_accept_header(header: &str) -> ContentType {\n\n for mimepart in header.split(',') {\n\n if mimepart.contains(MIME_JSONAD) {\n\n return ContentType::JsonAd;\n\n }\n\n if mimepart.contains(MIME_HTML) {\n\n return ContentType::Html;\n\n }\n\n if mimepart.contains(MIME_XML) {\n\n return ContentType::Html;\n\n }\n\n if mimepart.contains(MIME_JSON) {\n\n return ContentType::Json;\n\n }\n\n if mimepart.contains(MIME_JSONLD) {\n\n return ContentType::JsonLd;\n\n }\n\n if mimepart.contains(MIME_TURTLE) {\n\n return ContentType::Turtle;\n\n }\n", "file_path": "server/src/content_types.rs", "rank": 77, "score": 94453.50914266461 }, { "content": "/// Checks if the public key is a valid ED25519 base64 key.\n\n/// Not perfect - only checks byte length and parses base64.\n\npub fn verify_public_key(public_key: &str) -> AtomicResult<()> {\n\n let pubkey_bin = base64::decode(public_key)\n\n .map_err(|e| format!(\"Invalid public key. Not valid Base64. {}\", e))?;\n\n println!(\"{}\", public_key.len());\n\n if pubkey_bin.len() != 32 {\n\n return Err(format!(\n\n \"Invalid public key, should be 32 bytes long instead of {}. Key: {}\",\n\n pubkey_bin.len(),\n\n public_key\n\n )\n\n .into());\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[cfg(test)]\n\n use super::*;\n\n\n", "file_path": "lib/src/agents.rs", "rank": 78, "score": 94450.1394655454 }, { "content": "/// Adds default Endpoints (versioning) to the Db.\n\n/// Makes sure they are fetchable\n\npub fn populate_endpoints(store: &crate::Db) -> AtomicResult<()> {\n\n let endpoints = crate::endpoints::default_endpoints();\n\n for endpoint in endpoints {\n\n let mut resource = endpoint.to_resource(store)?;\n\n resource.save_locally(store)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "lib/src/populate.rs", "rank": 79, "score": 93995.8324443401 }, { "content": "/// Reads config file from a specified path\n\npub fn read_config(path: &Path) -> AtomicResult<Config> {\n\n let config_string = std::fs::read_to_string(path)\n\n .map_err(|e| format!(\"Error reading config from {:?}. {}\", path, e))?;\n\n let config: Config = toml::from_str(&config_string)\n\n .map_err(|e| format!(\"Could not parse toml in config file {:?}. {}\", path, e))?;\n\n Ok(config)\n\n}\n\n\n", "file_path": "lib/src/config.rs", "rank": 80, "score": 93995.8324443401 }, { "content": "/// Creates the server context.\n\n/// Initializes a store on disk.\n\n/// Creates a new agent, if neccessary.\n\npub fn init(config: Config) -> BetterResult<AppState> {\n\n // Check if atomic-server is already running somwehere, and try to stop it. It's not a problem if things go wrong here, so errors are simply logged.\n\n let _ = crate::process::terminate_existing_processes(&config)\n\n .map_err(|e| log::error!(\"Could not check for running instance: {}\", e));\n\n\n\n // Enable all logging\n\n std::env::set_var(\"RUST_LOG\", \"info\");\n\n env_logger::init();\n\n\n\n const VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\n log::info!(\"Atomic-server {}. Use --help for more options. Visit https://docs.atomicdata.dev and https://github.com/joepio/atomic-data-rust.\", VERSION);\n\n\n\n let store = atomic_lib::Db::init(&config.store_path, config.local_base_url.clone())?;\n\n if config.initialize {\n\n log::info!(\"Initialize: creating and populating new Database...\");\n\n atomic_lib::populate::populate_default_store(&store)?;\n\n // Building the index here is needed to perform TPF queries on imported resources\n\n store.build_index(true)?;\n\n }\n\n set_default_agent(&config, &store)?;\n", "file_path": "server/src/appstate.rs", "rank": 81, "score": 93995.8324443401 }, { "content": "/// Add a tray item to the OS bar.\n\n/// Kind of experimental feature.\n\npub fn tray_icon_process(config: crate::config::Config) {\n\n actix_web::rt::spawn(async move {\n\n let mut tray = match tray_item::TrayItem::new(\"Atomic\", \"\") {\n\n Ok(item) => item,\n\n Err(_e) => return,\n\n };\n\n let _ = tray.add_menu_item(\"Open\", move || match open::that(&config.local_base_url) {\n\n Ok(_) => (),\n\n Err(err) => (log::error!(\"Can't open app. {}\", err)),\n\n });\n\n let _ = tray.add_menu_item(\"Config folder\", move || {\n\n match open::that(&config.config_dir) {\n\n Ok(_) => (),\n\n Err(err) => (log::error!(\"Can't open config folder. {}\", err)),\n\n }\n\n });\n\n let _ = tray.add_menu_item(\"About\", move || {\n\n match open::that(\"https://github.com/joepio/atomic\") {\n\n Ok(_) => (),\n\n Err(err) => (log::error!(\"Can't open about page. {}\", err)),\n\n }\n\n });\n\n let inner = tray.inner_mut();\n\n inner.add_quit_item(\"Quit\");\n\n inner.display();\n\n });\n\n}\n", "file_path": "server/src/tray_icon.rs", "rank": 82, "score": 92362.00270261682 }, { "content": "/// Create a new agent if it does not yet exist.\n\nfn set_default_agent(config: &Config, store: &impl Storelike) -> BetterResult<()> {\n\n let ag_cfg: atomic_lib::config::Config = match atomic_lib::config::read_config(\n\n &config.config_file_path,\n\n ) {\n\n Ok(agent_config) => {\n\n match store.get_resource(&agent_config.agent) {\n\n Ok(_) => agent_config,\n\n Err(e) => {\n\n if agent_config.agent.contains(&config.local_base_url) {\n\n // If there is an agent in the config, but not in the store,\n\n // That probably means that the DB has been erased and only the config file exists.\n\n // This means that the Agent from the Config file should be recreated, using its private key.\n\n log::info!(\"Agent not retrievable, but config was found. Recreating Agent in new store.\");\n\n let recreated_agent = Agent::new_from_private_key(\n\n \"root\".into(),\n\n store,\n\n &agent_config.private_key,\n\n );\n\n store.add_resource(&recreated_agent.to_resource(store)?)?;\n\n agent_config\n", "file_path": "server/src/appstate.rs", "rank": 83, "score": 91888.05827214824 }, { "content": "/// Sorts a vector or resources by some property.\n\nfn sort_resources(\n\n mut resources: ResourceCollection,\n\n sort_by: &str,\n\n sort_desc: bool,\n\n) -> ResourceCollection {\n\n resources.sort_by(|a, b| {\n\n let val_a = a.get(sort_by);\n\n let val_b = b.get(sort_by);\n\n if val_a.is_err() || val_b.is_err() {\n\n return std::cmp::Ordering::Equal;\n\n }\n\n if val_b.unwrap().to_string() > val_a.unwrap().to_string() {\n\n if sort_desc {\n\n std::cmp::Ordering::Greater\n\n } else {\n\n std::cmp::Ordering::Less\n\n }\n\n } else if sort_desc {\n\n std::cmp::Ordering::Less\n\n } else {\n", "file_path": "lib/src/collections.rs", "rank": 84, "score": 91560.25882061456 }, { "content": "/// Writes keys to disk using LetsEncrypt\n\npub fn request_cert(config: &crate::config::Config) -> Result<(), Error> {\n\n // Use DirectoryUrl::LetsEncrypStaging for dev/testing.\n\n let url = if config.opts.development {\n\n DirectoryUrl::LetsEncryptStaging\n\n } else {\n\n DirectoryUrl::LetsEncrypt\n\n };\n\n\n\n fs::create_dir_all(PathBuf::from(&config.https_path))?;\n\n\n\n // Save/load keys and certificates to current dir.\n\n let persist = FilePersist::new(&config.https_path);\n\n\n\n // Create a directory entrypoint.\n\n let dir = Directory::from_url(persist, url)?;\n\n\n\n // Reads the private account key from persistence, or\n\n // creates a new one before accessing the API to establish\n\n // that it's there.\n\n let email = config\n", "file_path": "server/src/https.rs", "rank": 85, "score": 88773.35900434192 }, { "content": "/// Signs a string using a base64 encoded ed25519 private key. Outputs a base64 encoded ed25519 signature.\n\nfn sign_message(message: &str, private_key: &str, public_key: &str) -> String {\n\n let private_key_vec: Vec<u8> = decode_hex(private_key).expect(\"Invalid Hex String\");\n\n let public_key_vec: Vec<u8> = decode_hex(public_key).expect(\"Invalid Hex String\");\n\n // I'm using PKCS8 in my actual Rust implementation, but I think this might make things more complicated in this example.\n\n // I can't get it to match signatures with either.\n\n // let key_pair = ring::signature::Ed25519KeyPair::from_pkcs8(&private_key_vec).unwrap();\n\n let key_pair = ring::signature::Ed25519KeyPair::from_seed_and_public_key(\n\n &private_key_vec,\n\n &public_key_vec,\n\n )\n\n .unwrap();\n\n let signature = key_pair.sign(&message.as_bytes());\n\n let signature_bytes = signature.as_ref();\n\n encode_hex(signature_bytes)\n\n}\n\n\n\nuse std::{fmt::Write, num::ParseIntError};\n\n\n", "file_path": "lib/examples/compare_ring.rs", "rank": 86, "score": 87075.08774390089 }, { "content": "/// Parses a JSON-AD object, converts it to an Atomic Resource\n\nfn json_ad_object_to_resource(\n\n json: Map<String, serde_json::Value>,\n\n store: &impl crate::Storelike,\n\n) -> AtomicResult<Resource> {\n\n let mut resource = Resource::new(get_id(json.clone())?);\n\n let propvals = parse_json_ad_map_to_propvals(json, store)?;\n\n for (prop, val) in propvals {\n\n resource.set_propval(prop, val, store)?\n\n }\n\n Ok(resource)\n\n}\n\n\n", "file_path": "lib/src/parse.rs", "rank": 87, "score": 86837.50354160534 }, { "content": "pub fn decode_hex(s: &str) -> Result<Vec<u8>, ParseIntError> {\n\n (0..s.len())\n\n .step_by(2)\n\n .map(|i| u8::from_str_radix(&s[i..i + 2], 16))\n\n .collect()\n\n}\n\n\n", "file_path": "lib/examples/compare_ring.rs", "rank": 88, "score": 85771.7170330471 }, { "content": "/// Signs a string using a base64 encoded ed25519 private key. Outputs a base64 encoded ed25519 signature.\n\nfn sign_message(message: &str, private_key: &str, public_key: &str) -> AtomicResult<String> {\n\n let private_key_bytes = base64::decode(private_key.to_string()).map_err(|e| {\n\n format!(\n\n \"Failed decoding private key {}: {}\",\n\n private_key.to_string(),\n\n e\n\n )\n\n })?;\n\n let public_key_bytes = base64::decode(public_key.to_string()).map_err(|e| {\n\n format!(\n\n \"Failed decoding public key {}: {}\",\n\n public_key.to_string(),\n\n e\n\n )\n\n })?;\n\n let key_pair = ring::signature::Ed25519KeyPair::from_seed_and_public_key(\n\n &private_key_bytes,\n\n &public_key_bytes,\n\n )\n\n .map_err(|_| \"Can't create Ed25519 keypair from Agent's Private Key.\")?;\n", "file_path": "lib/src/commit.rs", "rank": 89, "score": 84103.48622488548 }, { "content": "pub fn json_error_handler(err: error::JsonPayloadError, _req: &HttpRequest) -> error::Error {\n\n use actix_web::error::JsonPayloadError;\n\n\n\n let detail = err.to_string();\n\n let resp = match &err {\n\n JsonPayloadError::ContentType => HttpResponse::UnsupportedMediaType().body(detail),\n\n JsonPayloadError::Deserialize(json_err) if json_err.is_data() => {\n\n HttpResponse::UnprocessableEntity().body(detail)\n\n }\n\n _ => HttpResponse::BadRequest().body(detail),\n\n };\n\n error::InternalError::from_response(err, resp).into()\n\n}\n", "file_path": "server/src/jsonerrors.rs", "rank": 90, "score": 80301.56880033942 }, { "content": "// RUSTLS\n\npub fn get_https_config(config: &crate::config::Config) -> Result<rustls::ServerConfig, Error> {\n\n use rustls::internal::pemfile::{certs, pkcs8_private_keys};\n\n let mut https_config = rustls::ServerConfig::new(rustls::NoClientAuth::new());\n\n let cert_file =\n\n &mut BufReader::new(File::open(config.cert_path.clone()).expect(\"No HTTPS TLS key found.\"));\n\n let key_file = &mut BufReader::new(File::open(&config.key_path).unwrap());\n\n let cert_chain = certs(cert_file).unwrap();\n\n let mut keys = pkcs8_private_keys(key_file).unwrap();\n\n if keys.is_empty() {\n\n panic!(\"No key found. Consider deleting the `.https` directory and restart to create new keys.\")\n\n }\n\n https_config\n\n .set_single_cert(cert_chain, keys.remove(0))\n\n .unwrap();\n\n Ok(https_config)\n\n}\n\n\n", "file_path": "server/src/https.rs", "rank": 91, "score": 80107.72074810258 }, { "content": "/// Finds the extension\n\nfn try_extension(path: &str) -> Option<(ContentType, &str)> {\n\n let items: Vec<&str> = path.split('.').collect();\n\n if items.len() == 2 {\n\n let path = items[0];\n\n let content_type = match items[1] {\n\n \"json\" => ContentType::Json,\n\n \"jsonld\" => ContentType::JsonLd,\n\n \"jsonad\" => ContentType::JsonAd,\n\n \"html\" => ContentType::Html,\n\n \"ttl\" => ContentType::Turtle,\n\n _ => return None,\n\n };\n\n return Some((content_type, path));\n\n }\n\n None\n\n}\n", "file_path": "server/src/handlers/resource.rs", "rank": 92, "score": 71425.26127397452 }, { "content": "/// Signs a CommitBuilder at a specific unix timestamp.\n\nfn sign_at(\n\n commitbuilder: CommitBuilder,\n\n agent: &crate::agents::Agent,\n\n sign_date: i64,\n\n store: &impl Storelike,\n\n) -> AtomicResult<Commit> {\n\n let mut commit = Commit {\n\n subject: commitbuilder.subject,\n\n signer: agent.subject.clone(),\n\n set: Some(commitbuilder.set),\n\n remove: Some(commitbuilder.remove.into_iter().collect()),\n\n destroy: Some(commitbuilder.destroy),\n\n created_at: sign_date,\n\n signature: None,\n\n url: None,\n\n };\n\n let stringified = commit\n\n .serialize_deterministically_json_ad(store)\n\n .map_err(|e| format!(\"Failed serializing commit: {}\", e))?;\n\n let private_key = agent.private_key.clone().ok_or(\"No private key in agent\")?;\n\n let signature = sign_message(&stringified, &private_key, &agent.public_key).map_err(|e| {\n\n format!(\n\n \"Failed to sign message for resource {} with agent {}: {}\",\n\n commit.subject, agent.subject, e\n\n )\n\n })?;\n\n commit.signature = Some(signature);\n\n Ok(commit)\n\n}\n\n\n", "file_path": "lib/src/commit.rs", "rank": 93, "score": 58562.2842242202 }, { "content": "fn main() {\n\n // Import the `Storelike` trait to get access to most functions\n\n use atomic_lib::Storelike;\n\n // Start with initializing the in-memory store\n\n let store = atomic_lib::Store::init().unwrap();\n\n // Pre-load the default Atomic Data Atoms (from atomicdata.dev),\n\n // this is not necessary, but will probably make your project a bit faster\n\n store.populate().unwrap();\n\n // We can create a new Resource, linked to the store.\n\n // Note that since this store only exists in memory, it's data cannot be accessed from the internet.\n\n // Let's make a new Property instance! Let's create \"age\".\n\n let mut new_property =\n\n atomic_lib::Resource::new_instance(\"https://atomicdata.dev/classes/Property\", &store)\n\n .unwrap();\n\n // And add a description for that Property\n\n new_property\n\n .set_propval_shortname(\"description\", \"the age of a person\", &store)\n\n .unwrap();\n\n // A subject URL for the new resource has been created automatically.\n\n let subject = new_property.get_subject().clone();\n", "file_path": "lib/examples/basic.rs", "rank": 94, "score": 58562.2842242202 }, { "content": "fn main() {\n\n use ring::{\n\n rand,\n\n signature::{self, KeyPair},\n\n };\n\n\n\n // Generate a key pair in PKCS#8 (v2) format.\n\n let rng = rand::SystemRandom::new();\n\n let pkcs8_bytes = signature::Ed25519KeyPair::generate_pkcs8(&rng).unwrap();\n\n\n\n // Normally the application would store the PKCS#8 file persistently. Later\n\n // it would read the PKCS#8 file from persistent storage to use it.\n\n\n\n let key_pair = signature::Ed25519KeyPair::from_pkcs8(pkcs8_bytes.as_ref()).unwrap();\n\n\n\n // Sign the message \"hello, world\".\n\n const MESSAGE: &[u8] = b\"hello, world\";\n\n let sig = key_pair.sign(MESSAGE);\n\n\n\n let pubkey_b64 = base64::encode(key_pair.public_key());\n", "file_path": "lib/examples/signing.rs", "rank": 95, "score": 58562.2842242202 }, { "content": "/// Storelike provides many useful methods for interacting with an Atomic Store.\n\n/// It serves as a basic store Trait, agnostic of how it functions under the hood.\n\n/// This is useful, because we can create methods for Storelike that will work with either in-memory\n\n/// stores, as well as with persistend on-disk stores.\n\npub trait Storelike: Sized {\n\n /// Adds Atoms to the store.\n\n /// Will replace existing Atoms that share Subject / Property combination.\n\n /// Validates datatypes and required props presence.\n\n fn add_atoms(&self, atoms: Vec<Atom>) -> AtomicResult<()>;\n\n\n\n /// Adds an Atom to the PropSubjectMap. Overwrites if already present.\n\n /// The default implementation for this does not do anything, so overwrite it if your store needs indexing.\n\n fn add_atom_to_index(&self, _atom: &Atom) -> AtomicResult<()> {\n\n Ok(())\n\n }\n\n\n\n /// Adds a Resource to the store.\n\n /// Replaces existing resource with the contents.\n\n /// Does not build indexes or save versions.\n\n /// In most cases, you should use `resource.save()` instead, which uses Commits.\n\n fn add_resource(&self, resource: &Resource) -> AtomicResult<()>;\n\n\n\n /// Adds a Resource to the store.\n\n /// Replaces existing resource with the contents.\n", "file_path": "lib/src/storelike.rs", "rank": 96, "score": 57763.01388056114 }, { "content": "// Checks the property and its datatype, and issues a prompt that performs validation.\n\nfn prompt_field(\n\n property: &Property,\n\n optional: bool,\n\n context: &Context,\n\n) -> AtomicResult<Option<String>> {\n\n let mut input: Option<String> = None;\n\n let msg_appendix: &str = if optional {\n\n \" (optional)\"\n\n } else {\n\n \" (required)\"\n\n };\n\n match &property.data_type {\n\n DataType::String | DataType::Markdown => {\n\n let msg = format!(\"string{}\", msg_appendix);\n\n input = prompt_opt(&msg)?;\n\n return Ok(input);\n\n }\n\n DataType::Slug => {\n\n let msg = format!(\"slug{}\", msg_appendix);\n\n input = prompt_opt(&msg)?;\n", "file_path": "cli/src/new.rs", "rank": 97, "score": 57269.534103345446 }, { "content": "fn main() {\n\n // Values to validate output from https://paulmillr.com/ecc/\n\n let hex_private_key = \"0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef\";\n\n let hex_public_key = \"207a067892821e25d770f1fba0c47c11ff4b813e54162ece9eb839e076231ab6\";\n\n // The site seems to sign a SHA512 hash of the value...\n\n let _input_message = \"val\";\n\n // ... and this is the SHA-512 hash of that string. Or am I doing something wrong here?\n\n let input_sha = \"08055e8edd362ae08565bba339e41f0dfb61ad30527bf5ed5985c72ec565ccfd4fce93f6590a4fb85f7819371229a9681d9b2c1f134ad8d16ee82d73dfb0919d\";\n\n // This is the signature that is outputted on paulmillr.com\n\n let signature_correct =\n\n \"8fca64f1e6476ae5e4e978b4f50710bb8d5b86329ce4083247bb254eeb2f4ce18086def1923dbb30c2b758e89c8795a39d68179ed66a4f1799709c37bbb2ff05\";\n\n let signature = sign_message(input_sha, hex_private_key, hex_public_key);\n\n\n\n assert_eq!(signature, signature_correct)\n\n}\n\n\n", "file_path": "lib/examples/compare_ring.rs", "rank": 98, "score": 57261.1966998268 }, { "content": "/// Adds a file to the .https folder to indicate age of certificates\n\nfn add_certs_created_at() {\n\n let now_string = chrono::Utc::now();\n\n fs::write(CERTS_CREATED_AT, now_string.to_string())\n\n .expect(&*format!(\"Unable to write {}\", CERTS_CREATED_AT));\n\n}\n\n\n", "file_path": "server/src/https.rs", "rank": 99, "score": 56053.985660881226 } ]
Rust
2019/intcode.rs
yishn/adventofcode2016
4aa4d7c36921f88fbfc617293372b3b814fd9912
#[derive(Clone)] pub struct ProgramState(Vec<i64>, usize, usize); impl ProgramState { pub fn new(program: Vec<i64>) -> ProgramState { ProgramState(program, 0, 0) } pub fn get_mut(&mut self) -> (&mut Vec<i64>, &mut usize, &mut usize) { (&mut self.0, &mut self.1, &mut self.2) } } #[derive(Debug, Copy, Clone)] pub enum ProgramResult { Output(i64), WaitForInput, Halt, } #[derive(Debug)] enum ParameterMode { Position, Immediate, Relative } #[derive(Debug)] enum OperationType { Add, Multiply, Input, Output, JumpIfTrue, JumpIfFalse, LessThan, Equals, SetRelativeBase, Halt } #[derive(Debug)] struct Instruction { operation: OperationType, inputs: Vec<(ParameterMode, i64)> } fn parse_instruction(numbers: &[i64]) -> Instruction { let instruction_code = numbers[0]; let op_code = instruction_code % 100; let (operation, inputs_count) = match op_code { 1 => (OperationType::Add, 3), 2 => (OperationType::Multiply, 3), 3 => (OperationType::Input, 1), 4 => (OperationType::Output, 1), 5 => (OperationType::JumpIfTrue, 2), 6 => (OperationType::JumpIfFalse, 2), 7 => (OperationType::LessThan, 3), 8 => (OperationType::Equals, 3), 9 => (OperationType::SetRelativeBase, 1), 99 => (OperationType::Halt, 0), _ => panic!("Unsupported operation code {}", instruction_code) }; let get_parameter_mode = |i| { match (instruction_code - op_code) / 10i64.pow(2 + i as u32) % 10 { 0 => ParameterMode::Position, 1 => ParameterMode::Immediate, 2 => ParameterMode::Relative, _ => panic!() } }; Instruction { operation, inputs: (0..inputs_count) .map(|i| (get_parameter_mode(i), numbers[i + 1])) .collect() } } pub fn run_program(state: &mut ProgramState, input: Option<i64>) -> ProgramResult { let (program, pointer, relative_base) = state.get_mut(); let mut input = input; fn extend_memory(program: &mut Vec<i64>, index: usize) { while index >= program.len() { program.push(0); } } fn get_instruction_input_index( program: &mut Vec<i64>, instruction: &Instruction, relative_base: usize, index: usize ) -> usize { match instruction.inputs[index] { (ParameterMode::Relative, d) => { let j = (relative_base as i64 + d) as usize; extend_memory(program, j); j }, (_, j) => { extend_memory(program, j as usize); j as usize } } } fn get_instruction_input( program: &mut Vec<i64>, instruction: &Instruction, relative_base: usize, index: usize ) -> i64 { match instruction.inputs[index] { (ParameterMode::Immediate, value) => value, _ => { let j = get_instruction_input_index(program, instruction, relative_base, index); program[j] } } } while *pointer < program.len() { let init_pointer = *pointer; let instruction = parse_instruction(&program[*pointer..]); let (target_value, target_index) = { let mut get_input = |i| { get_instruction_input(program, &instruction, *relative_base, i) }; let (target_value, output_index) = match instruction.operation { OperationType::Add => (Some(get_input(0) + get_input(1)), Some(2)), OperationType::Multiply => (Some(get_input(0) * get_input(1)), Some(2)), OperationType::Input => match input { Some(x) => { input = None; (Some(x), Some(0)) }, _ => return ProgramResult::WaitForInput }, OperationType::Output => (Some(get_input(0)), None), OperationType::LessThan => (Some((get_input(0) < get_input(1)) as i64), Some(2)), OperationType::Equals => (Some((get_input(0) == get_input(1)) as i64), Some(2)), OperationType::JumpIfTrue => { if get_input(0) != 0 { *pointer = get_input(1) as usize; } (None, None) }, OperationType::JumpIfFalse => { if get_input(0) == 0 { *pointer = get_input(1) as usize; } (None, None) }, OperationType::SetRelativeBase => { *relative_base = (*relative_base as i64 + get_input(0)) as usize; (None, None) } OperationType::Halt => break }; ( target_value, output_index.map(|i| { get_instruction_input_index(program, &instruction, *relative_base, i) }) ) }; if *pointer == init_pointer { *pointer += instruction.inputs.len() + 1; } if let Some(target_value) = target_value { if let Some(target_index) = target_index { extend_memory(program, target_index); program[target_index] = target_value; } else { return ProgramResult::Output(target_value); } } } ProgramResult::Halt } pub fn run_program_with_inputs<I>(state: &mut ProgramState, inputs: I) -> (Vec<i64>, ProgramResult) where I: Iterator<Item = i64> { let mut inputs = inputs; let mut outputs = vec![]; let mut result = run_program(state, None); loop { if let ProgramResult::Output(x) = result { outputs.push(x); } result = run_program(state, match result { ProgramResult::WaitForInput => match inputs.next() { Some(x) => Some(x), None => break }, _ => None }); if let ProgramResult::Halt = result { break; } } (outputs, result) } pub fn run_ascii_program_with_input(state: &mut ProgramState, input: &str) -> (String, ProgramResult) { let inputs = input.chars().map(|c| c as i64); let (outputs, result) = run_program_with_inputs(state, inputs); let output = outputs.into_iter() .map(|x| x as u8 as char) .fold(String::new(), |mut acc, x| { acc.push(x); acc }); (output, result) }
#[derive(Clone)] pub struct ProgramState(Vec<i64>, usize, usize); impl ProgramState { pub fn new(program: Vec<i64>) -> ProgramState { ProgramState(program, 0, 0) } pub fn get_mut(&mut self) -> (&mut Vec<i64>, &mut usize, &mut usize) { (&mut self.0, &mut self.1, &mut self.2) } } #[derive(Debug, Copy, Clone)] pub enum ProgramResult { Output(i64), WaitForInput, Halt, } #[derive(Debug)] enum ParameterMode { Position, Immediate, Relative } #[derive(Debug)] enum OperationType { Add, Multiply, Input, Output, JumpIfTrue, JumpIfFalse, LessThan, Equals, SetRelativeBase, Halt } #[derive(Debug)] struct Instruction { operation: OperationType, inputs: Vec<(ParameterMode, i64)> } fn parse_instruction(numbers: &[i64]) -> Instruction { let instruction_code = numbers[0]; let op_code = instruction_code % 100; let (operation, inputs_count) = match op_code { 1 => (OperationType::Add, 3), 2 => (OperationType::Multiply, 3), 3 => (OperationType::Input, 1), 4 => (OperationType::Output, 1), 5 => (OperationType::JumpIfTrue, 2), 6 => (OperationType::JumpIfFalse, 2), 7 => (OperationType::LessThan, 3), 8 => (OperationType::Equals, 3), 9 => (OperationType::SetRelativeBase, 1), 99 => (OperationType::Halt, 0), _ => panic!("Unsupported operation code {}", instruction_code) }; let get_parameter_mode = |i| { match (instruction_code - op_code) / 10i64.pow(2 + i as u32) % 10 { 0 => ParameterMode::Position, 1 => ParameterMode::Immediate, 2 => ParameterMode::Relative, _ => panic!() } }; Instruction { operation, inputs: (0..inputs_count) .map(|i| (get_parameter_mode(i), numbers[i + 1])) .collect() } } pub fn run_program(state: &mut ProgramState, input: Option<i64>) -> ProgramResult { let (program, pointer, relative_base) = state.get_mut(); let mut input = input; fn extend_memory(program: &mut Vec<i64>, index: usize) { while index >= program.len() { program.push(0); } } fn get_instruction_input_index( program: &mut Vec<i64>, instruction: &Instruction, relative_base: usize, index: usize ) -> usize { match instruction.inputs[index] { (ParameterMode::Relative, d) => { let j = (relative_base as i64 + d) as usize; extend_memory(program, j); j }, (_, j) => { extend_memory(program, j as usize); j as usize } } } fn get_instruction_input( program: &mut Vec<i64>, instruction: &Instruction, relative_base: usize, index: usize ) -> i64 { match instruction.inputs[index] { (ParameterMode::Immediate, value) => value, _ => { let j = get_instruction_input_index(program, instruction, relative_base, index); program[j] } } } while *pointer < program.len() { let init_pointer = *pointer; let instruction = parse_instruction(&program[*pointer..]); let (target_value, target_index) = { let mut get_input = |i| { get_instruction_input(program, &instruction, *relative_base, i) }; let (target_value, output_index) = match instruction.operation { OperationType::Add => (Some(get_input(0) + get_input(1)), Some(2)), OperationType::Multiply => (Some(get_input(0) * get_input(1)), Some(2)), OperationType::Input => match input { Some(x) => { input = None; (Some(x), Some(0)) }, _ => return ProgramResult::WaitForInput }, OperationType::Output => (Some(get_input(0)), None), OperationType::LessThan => (Some((get_input(0) < get_input(1)) as i64), Some(2)), OperationType::Equals => (Some((get_input(0) == get_input(1)) as i64), Some(2)), OperationType::JumpIfTrue => { if get_input(0) != 0 { *pointer = get_input(1) as usize; } (None, None) }, OperationType::JumpIfFalse => { if get_input(0) == 0 { *pointer = get_input(1) as usize; } (None, None) }, OperationType::SetRelativeBase => { *relative_base = (*relative_base as i64 + get_input(0)) as usize; (None, None) } OperationType::Halt => break }; ( target_value, output_index.map(|i| { get_instruction_input_index(program, &instruction, *relative_base, i) }) ) }; if *pointer == init_pointer { *pointer += instruction.inputs.len() + 1; } if let Some(target_value) = target_value { if let Some(target_index) = target_index { extend_memory(program, target_index); program[target_index] = target_value; } else { return ProgramResult::Output(target_value); } } } ProgramResult::Halt } pub fn run_program_with_inputs<I>(state: &mut ProgramState, inputs: I) -> (Vec<i64>, ProgramResult) where I: Iterator<Item = i64> { let mut inputs = inputs; let mut outputs = vec![]; let mut result = run_program(state, None); loop { if let ProgramResult::Output(x) = result { outputs.push(x); } result = run_program(state, match result { ProgramResult::WaitForInput => match inputs.next() { Some(x) => Some(x), None => break }, _ => None }); if let ProgramResult::Halt = result { break; } } (outputs, result) }
pub fn run_ascii_program_with_input(state: &mut ProgramState, input: &str) -> (String, ProgramResult) { let inputs = input.chars().map(|c| c as i64); let (outputs, result) = run_program_with_inputs(state, inputs); let output = outputs.into_iter() .map(|x| x as u8 as char) .fold(String::new(), |mut acc, x| { acc.push(x); acc }); (output, result) }
function_block-full_function
[ { "content": "fn run_program(state: (&mut Vec<i64>, &mut usize, &mut usize), input: Option<i64>) -> ProgramResult {\n\n let (program, pointer, relative_base) = state;\n\n let mut input = input;\n\n\n\n fn extend_memory(program: &mut Vec<i64>, index: usize) {\n\n while index >= program.len() {\n\n program.push(0);\n\n }\n\n }\n\n\n\n fn get_instruction_input_index(\n\n program: &mut Vec<i64>,\n\n instruction: &Instruction,\n\n relative_base: usize,\n\n index: usize\n\n ) -> usize {\n\n match instruction.inputs[index] {\n\n (ParameterMode::Relative, d) => {\n\n let j = (relative_base as i64 + d) as usize;\n\n extend_memory(program, j);\n", "file_path": "2019/11.rs", "rank": 0, "score": 414900.5547089303 }, { "content": "fn calculate_output(mut program: Vec<usize>, input: (usize, usize)) -> usize {\n\n program[1] = input.0;\n\n program[2] = input.1;\n\n\n\n run_program(&mut program);\n\n\n\n program[0]\n\n}\n\n\n", "file_path": "2019/02.rs", "rank": 3, "score": 359260.2881600182 }, { "content": "fn run_program(state: (&mut Vec<i32>, &mut usize), input: Option<i32>) -> ProgramResult {\n\n let (program, pointer) = state;\n\n let mut input = input;\n\n\n\n while *pointer < program.len() {\n\n let init_pointer = *pointer;\n\n let instruction = parse_instruction(&program[*pointer..]);\n\n\n\n let (target_value, target_index) = {\n\n let get_input = |i| match instruction.inputs[i] {\n\n (ParameterMode::Position, j) => program[j as usize],\n\n (ParameterMode::Immediate, value) => value\n\n };\n\n\n\n let mut jump = |condition, i| {\n\n if condition {\n\n *pointer = get_input(i) as usize;\n\n }\n\n\n\n (None, None)\n", "file_path": "2019/07.rs", "rank": 4, "score": 340482.2284852813 }, { "content": "fn run_program(program: &mut Vec<i32>, input: i32) -> Vec<i32> {\n\n let mut result = vec![];\n\n let mut pointer = 0;\n\n\n\n while pointer < program.len() {\n\n let init_pointer = pointer;\n\n let instruction = parse_instruction(&program[pointer..]);\n\n\n\n let (target_value, target_index) = {\n\n let get_input = |i| match instruction.inputs[i] {\n\n (ParameterMode::Position, j) => program[j as usize],\n\n (ParameterMode::Immediate, value) => value\n\n };\n\n\n\n let mut jump = |condition, i| {\n\n if condition {\n\n pointer = get_input(i) as usize;\n\n }\n\n\n\n (None, None)\n", "file_path": "2019/05.rs", "rank": 5, "score": 312158.1251755512 }, { "content": "fn run_program(program: &mut Vec<usize>) {\n\n let mut pointer = 0;\n\n\n\n loop {\n\n let target_value = {\n\n let get = |p| program[program[p]];\n\n\n\n match program[pointer] {\n\n 1 => get(pointer + 1) + get(pointer + 2),\n\n 2 => get(pointer + 1) * get(pointer + 2),\n\n 99 => break,\n\n _ => panic!()\n\n }\n\n };\n\n\n\n let target_index = program[pointer + 3];\n\n program[target_index] = target_value;\n\n\n\n pointer += 4;\n\n }\n\n}\n\n\n", "file_path": "2019/02.rs", "rank": 6, "score": 311614.7236904838 }, { "content": "fn paint_emergency_hull(hull: &mut Hull, program: &mut Vec<i64>) {\n\n let mut position = (0, 0);\n\n let mut direction = (0, -1);\n\n let mut state = (0, 0);\n\n\n\n fn gauss_mul((a, b): (i32, i32), (c, d): (i32, i32)) -> (i32, i32) {\n\n (a * c - b * d, a * d + b * c)\n\n }\n\n\n\n loop {\n\n let input = match hull.get(&position) {\n\n Some(&Color::White) => 1,\n\n _ => 0\n\n };\n\n\n\n let color = match run_program((program, &mut state.0, &mut state.1), Some(input)) {\n\n ProgramResult::Halt => break,\n\n ProgramResult::WaitForInput => panic!(),\n\n ProgramResult::Output(x) => match x {\n\n 0 => Color::Black,\n", "file_path": "2019/11.rs", "rank": 8, "score": 308010.91334057774 }, { "content": "fn is_pulling_with_cache(program: &[i64], position: (u32, u32), cache: &mut HashMap<(u32, u32), bool>) -> bool {\n\n if let Some(&result) = cache.get(&position) {\n\n return result;\n\n }\n\n\n\n let result = is_pulling(program, position);\n\n cache.insert(position, result);\n\n\n\n result\n\n}\n\n\n", "file_path": "2019/19.rs", "rank": 9, "score": 306069.6647690281 }, { "content": "fn transform(buffer: &mut [u32; 4], input: &[u32; 16]) {\n\n let (mut a, mut b, mut c, mut d) = (buffer[0], buffer[1], buffer[2], buffer[3]);\n\n\n\n macro_rules! add(\n\n ($a:expr, $b:expr) => ($a.wrapping_add($b));\n\n );\n\n macro_rules! rotate(\n\n ($x:expr, $n:expr) => (($x << $n) | ($x >> (32 - $n)));\n\n );\n\n\n\n {\n\n macro_rules! F(\n\n ($x:expr, $y:expr, $z:expr) => (($x & $y) | (!$x & $z));\n\n );\n\n macro_rules! T(\n\n ($a:expr, $b:expr, $c:expr, $d:expr, $x:expr, $s:expr, $ac:expr) => ({\n\n $a = add!(add!(add!($a, F!($b, $c, $d)), $x), $ac);\n\n $a = rotate!($a, $s);\n\n $a = add!($a, $b);\n\n });\n", "file_path": "2015/md5.rs", "rank": 10, "score": 292014.89173866593 }, { "content": "fn output_screen(state: &mut ProgramState, input: Option<i64>) -> (TileGrid, Option<i64>, bool) {\n\n let mut result = TileGrid::new();\n\n let mut score = None;\n\n let mut input_iter = input.into_iter();\n\n let mut halted = false;\n\n\n\n loop {\n\n let mut get = |input| run_program(state, input);\n\n\n\n match (get(input_iter.next()), get(None), get(None)) {\n\n (ProgramResult::Output(x), ProgramResult::Output(y), ProgramResult::Output(value)) => {\n\n if (x, y) == (-1, 0) {\n\n score = Some(value);\n\n } else {\n\n result.insert((x, y), match value {\n\n 0 => Tile::Empty,\n\n 1 => Tile::Wall,\n\n 2 => Tile::Block,\n\n 3 => Tile::HorizontalPaddle,\n\n 4 => Tile::Ball,\n", "file_path": "2019/13.rs", "rank": 11, "score": 273333.31597204285 }, { "content": "fn is_pulling(program: &[i64], (x, y): (u32, u32)) -> bool {\n\n let mut state = ProgramState::new(program.to_vec());\n\n let (result, _) = run_program_with_inputs(&mut state, vec![x, y].into_iter().map(|x| x as i64));\n\n\n\n match result.last() {\n\n Some(&x) => x == 1,\n\n _ => panic!()\n\n }\n\n}\n\n\n", "file_path": "2019/19.rs", "rank": 12, "score": 270296.2185523538 }, { "content": "fn parse(input: &str) -> (usize, Vec<Operation>) {\n\n let mut ip_register = 0;\n\n let program = input.lines()\n\n .filter_map(|line| {\n\n if line.starts_with(\"#ip \") {\n\n line[4..].parse::<usize>().ok()\n\n .map(|x| ip_register = x);\n\n }\n\n\n\n let mut tokens = line.split(' ');\n\n let op_type = tokens.next();\n\n\n\n op_type.map(|op_type| (\n\n op_type,\n\n tokens\n\n .filter_map(|x| x.parse::<usize>().ok())\n\n .collect::<Vec<_>>()\n\n ))\n\n })\n\n .filter(|(_, v)| v.len() == 3)\n\n .map(|(op_type, v)| (op_type, v[0], v[1], v[2]))\n\n .collect();\n\n\n\n (ip_register, program)\n\n}\n\n\n", "file_path": "2018/19.rs", "rank": 13, "score": 261180.96343598166 }, { "content": "fn parse_instructions(input: &str) -> Vec<ShuffleOperation> {\n\n input.lines()\n\n .filter_map(|line| {\n\n if line == \"deal into new stack\" {\n\n Some(ShuffleOperation::DealIntoNewStack)\n\n } else if &line[..\"cut\".len()] == \"cut\" {\n\n line[\"cut\".len()..].trim().parse::<isize>().ok()\n\n .map(|n| ShuffleOperation::CutNCards(n))\n\n } else if &line[..\"deal with increment\".len()] == \"deal with increment\" {\n\n line[\"deal with increment\".len()..].trim().parse::<usize>().ok()\n\n .map(|n| ShuffleOperation::DealWithIncrement(n))\n\n } else {\n\n None\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2019/22.rs", "rank": 14, "score": 259565.17008595954 }, { "content": "fn parse(input: &str) -> (Vec<Sample>, Vec<Operation>) {\n\n let input = input.replace('\\r', \"\");\n\n let mut parts = input.split(\"\\n\\n\\n\\n\");\n\n\n\n match (parts.next(), parts.next()) {\n\n (Some(samples_content), Some(program_content)) => {\n\n let samples = samples_content.split(\"\\n\\n\")\n\n .map(|chunk| chunk.lines().collect::<Vec<_>>())\n\n .filter(|v| v.len() == 3)\n\n .map(|v| (\n\n [v[0], v[2]],\n\n v[1].split(' ')\n\n .filter_map(|x| x.parse::<usize>().ok())\n\n .collect::<Vec<_>>()\n\n ))\n\n .map(|(v, op)| (\n\n v.iter()\n\n .filter_map(|line| line.split(|c| c == '[' || c == ']').nth(1))\n\n .map(|line| {\n\n line.split(\", \")\n", "file_path": "2018/16.rs", "rank": 15, "score": 238668.14547400957 }, { "content": "fn parse(input: &str) -> Vec<Instruction> {\n\n input.lines().filter_map(|line| {\n\n if line.len() < 4 {\n\n return None;\n\n }\n\n\n\n let action = &line[0..3];\n\n let args = &line[4..].split(\", \").collect::<Vec<_>>();\n\n\n\n if args.len() < 1 {\n\n return None;\n\n }\n\n\n\n let register_name = match args[0] {\n\n \"a\" => Some(RegisterA),\n\n \"b\" => Some(RegisterB),\n\n _ => None\n\n };\n\n\n\n if let Some(register_name) = register_name {\n", "file_path": "2015/23.rs", "rank": 16, "score": 237467.01652132405 }, { "content": "fn list_combinations(sum: u32, numbers: &Vec<u32>) -> Vec<Vec<u32>> {\n\n match (sum, numbers.len()) {\n\n (0, _) => vec![vec![]],\n\n (_, 0) => vec![],\n\n\n\n _ => {\n\n numbers.iter().cloned()\n\n .enumerate()\n\n .filter(|&(_, x)| x <= sum)\n\n .flat_map(|(i, x)| {\n\n list_combinations(\n\n sum - x,\n\n &numbers.iter().cloned()\n\n .skip(i + 1)\n\n .collect()\n\n ).into_iter().map(move |mut vec| {\n\n vec.push(x);\n\n vec\n\n })\n\n })\n\n .collect()\n\n }\n\n }\n\n}\n\n\n", "file_path": "2015/17.rs", "rank": 17, "score": 235444.33240058465 }, { "content": "fn combine_recipes(scores: &mut Vec<usize>, elf1: &mut usize, elf2: &mut usize) {\n\n let score1 = scores.get(*elf1).cloned().unwrap_or(0);\n\n let score2 = scores.get(*elf2).cloned().unwrap_or(0);\n\n let score_sum = (score1 + score2).to_string();\n\n let new_scores = score_sum.chars()\n\n .filter_map(|c| c.to_digit(10))\n\n .map(|x| x as usize);\n\n\n\n scores.extend(new_scores);\n\n\n\n *elf1 = (*elf1 + 1 + score1) % scores.len();\n\n *elf2 = (*elf2 + 1 + score2) % scores.len();\n\n}\n\n\n", "file_path": "2018/14.rs", "rank": 18, "score": 234008.46480183734 }, { "content": "fn run_program(mut state: State, ip_register: usize, program: &[Operation]) -> State {\n\n if ip_register >= state.0.len() {\n\n return state;\n\n }\n\n\n\n while let Some(&operation) = program.get(state.0[ip_register]) {\n\n state.op(operation);\n\n state.0[ip_register] += 1;\n\n }\n\n\n\n state\n\n}\n\n\n", "file_path": "2018/19.rs", "rank": 19, "score": 232096.57889050167 }, { "content": "fn invert_instructions(count: usize, instructions: &[ShuffleOperation]) -> Vec<ShuffleOperation> {\n\n let mut inverse_instructions = instructions.to_vec();\n\n\n\n inverse_instructions.reverse();\n\n inverse_instructions.into_iter()\n\n .map(|instruction| match instruction {\n\n ShuffleOperation::DealIntoNewStack => ShuffleOperation::DealIntoNewStack,\n\n ShuffleOperation::CutNCards(n) => ShuffleOperation::CutNCards(-n),\n\n ShuffleOperation::DealWithIncrement(n) => {\n\n let (_, inverse_n, _) = extended_gcd(count as isize, n as isize);\n\n let inverse_n = if inverse_n < 0 { count - (-inverse_n as usize) } else { inverse_n as usize };\n\n ShuffleOperation::DealWithIncrement(inverse_n)\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2019/22.rs", "rank": 20, "score": 226498.39573446702 }, { "content": "fn parse_input(input: &str) -> Option<(u32, u32)> {\n\n let vec = input.split('-')\n\n .filter_map(|x| x.trim().parse::<u32>().ok())\n\n .collect::<Vec<_>>();\n\n\n\n if vec.len() != 2 {\n\n None\n\n } else {\n\n Some((vec[0], vec[1]))\n\n }\n\n}\n\n\n", "file_path": "2019/04.rs", "rank": 21, "score": 222060.90846884466 }, { "content": "#[derive(Debug, PartialEq, Copy, Clone)]\n\nenum ProgramResult {\n\n Output(i64),\n\n WaitForInput,\n\n Halt,\n\n}\n\n\n", "file_path": "2019/11.rs", "rank": 22, "score": 211354.30257870862 }, { "content": "#[derive(Debug, PartialEq, Copy, Clone)]\n\nenum ProgramResult {\n\n Output(i32),\n\n WaitForInput,\n\n Halt,\n\n}\n\n\n", "file_path": "2019/07.rs", "rank": 23, "score": 211354.30257870862 }, { "content": "fn start_thruster_amplifier_feedback_loop(program: &Vec<i32>, phase_setting: &Vec<i32>) -> i32 {\n\n let count = phase_setting.len();\n\n let mut program_results = (0..count)\n\n .map(|_| ProgramResult::WaitForInput)\n\n .collect::<Vec<_>>();\n\n let mut program_states = (0..count)\n\n .map(|_| program.clone())\n\n .collect::<Vec<_>>();\n\n let mut program_pointers = (0..count)\n\n .map(|_| 0)\n\n .collect::<Vec<_>>();\n\n let mut input_queues = phase_setting.iter()\n\n .map(|&phase| {\n\n let mut queue = VecDeque::new();\n\n queue.push_back(phase);\n\n queue\n\n })\n\n .collect::<Vec<_>>();\n\n let mut outputs = (0..count)\n\n .map(|_| vec![])\n", "file_path": "2019/07.rs", "rank": 24, "score": 210148.89359968764 }, { "content": "fn list_subsets(numbers: &Vec<usize>, sum: usize, start_index: usize) -> Vec<Vec<usize>> {\n\n if sum == 0 {\n\n return vec![vec![]];\n\n } else if start_index >= numbers.len() {\n\n return vec![];\n\n }\n\n\n\n numbers\n\n .iter()\n\n .enumerate()\n\n .skip(start_index)\n\n .filter(|&(_, &x)| x <= sum)\n\n .flat_map(|(i, &x)| {\n\n list_subsets(numbers, sum - x, i + 1)\n\n .into_iter()\n\n .map(move |mut subset| {\n\n subset.push(x);\n\n subset\n\n })\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2015/24.rs", "rank": 25, "score": 209939.8218785255 }, { "content": "fn play_game(state: &mut ProgramState, print: bool) -> i64 {\n\n fn get_tile_position(screen: &TileGrid, tile: Tile) -> (i64, i64) {\n\n screen.iter()\n\n .find(|&(_, &t)| t == tile)\n\n .map(|(&position, _)| position)\n\n .unwrap()\n\n }\n\n\n\n let (mut screen, mut score, _) = output_screen(state, None);\n\n\n\n loop {\n\n let paddle_position = get_tile_position(&screen, Tile::HorizontalPaddle);\n\n let ball_position = get_tile_position(&screen, Tile::Ball);\n\n let joystick = (ball_position.0 - paddle_position.0).signum();\n\n let (screen_update, score_update, halted) = output_screen(state, Some(joystick));\n\n\n\n for (&(x, y), &tile) in screen_update.iter() {\n\n screen.insert((x, y), tile);\n\n }\n\n\n", "file_path": "2019/13.rs", "rank": 26, "score": 208113.7774300266 }, { "content": "fn parse_input(input: &str) -> Vec<Rect> {\n\n input.lines()\n\n .filter_map(|line| {\n\n let tokens: Vec<&str> = line.split(' ').collect();\n\n\n\n if tokens.len() < 4 {\n\n return None;\n\n }\n\n\n\n let mut position = tokens[2][..tokens[2].len() - 1]\n\n .split(',')\n\n .filter_map(|x| x.parse::<usize>().ok());\n\n\n\n let mut size = tokens[3]\n\n .split('x')\n\n .filter_map(|x| x.parse::<usize>().ok());\n\n\n\n Some(Rect {\n\n id: match tokens[0][1..].parse::<usize>() {\n\n Ok(x) => x,\n", "file_path": "2018/03.rs", "rank": 27, "score": 207705.56757132232 }, { "content": "fn get_input() -> io::Result<String> {\n\n let mut file = File::open(\"14.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/14.rs", "rank": 28, "score": 207080.11849466973 }, { "content": "fn get_attack_order(groups: &mut [GroupInfo]) -> Vec<(usize, Option<usize>)> {\n\n let mut result = Vec::new();\n\n let mut selected = Vec::new();\n\n\n\n groups.sort_by_key(|g| (get_effective_power(g), g.initiative));\n\n groups.reverse();\n\n\n\n for group in groups.iter() {\n\n let targets = groups.iter()\n\n .filter(|target| !selected.contains(&target.id))\n\n .collect::<Vec<_>>();\n\n\n\n let target = select_target(group, &targets);\n\n\n\n target.map(|t| selected.push(t));\n\n result.push((group.initiative, group.id, target));\n\n }\n\n\n\n result.sort();\n\n result.reverse();\n\n\n\n result.into_iter()\n\n .map(|(_, id, target)| (id, target))\n\n .collect()\n\n}\n\n\n", "file_path": "2018/24.rs", "rank": 29, "score": 206076.38889342116 }, { "content": "fn do_phases(n: u32, signal: &[u8], skip: usize) -> Vec<u8> {\n\n let mut numbers = signal.to_vec();\n\n\n\n for _ in 0..n {\n\n numbers = do_phase(&numbers, skip);\n\n }\n\n\n\n numbers\n\n}\n\n\n", "file_path": "2019/16.rs", "rank": 30, "score": 203922.25821947356 }, { "content": "fn parse_input(input: &str) -> Vec<MoonState> {\n\n input.lines()\n\n .filter_map(|line| line.get(1..line.len() - 1))\n\n .filter_map(|line| {\n\n let mut coords = line.split(\", \")\n\n .filter_map(|token| {\n\n match token.split('=').nth(1) {\n\n Some(x) => x.parse::<i32>().ok(),\n\n _ => None\n\n }\n\n });\n\n\n\n match (coords.nth(0), coords.nth(0), coords.nth(0)) {\n\n (Some(x), Some(y), Some(z)) => Some((x, y, z)),\n\n _ => None\n\n }\n\n })\n\n .map(|position| MoonState {\n\n position,\n\n velocity: (0, 0, 0)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2019/12.rs", "rank": 31, "score": 203757.74642928515 }, { "content": "fn parse_layers(input: &str, width: usize, height: usize) -> LayerIter<impl Iterator<Item = u8> + '_> {\n\n let pixels = input.chars()\n\n .filter(|c| c.is_ascii_digit())\n\n .filter_map(|c| c.to_digit(10).map(|d| d as u8));\n\n\n\n LayerIter {\n\n pixels,\n\n width,\n\n height\n\n }\n\n}\n\n\n", "file_path": "2019/08.rs", "rank": 32, "score": 202244.00017229578 }, { "content": "fn get_digits(n: u32) -> Vec<u8> {\n\n let mut result = Vec::new();\n\n let mut n = n;\n\n\n\n while n > 0 {\n\n let last_digit = (n - n / 10 * 10) as u8;\n\n n /= 10;\n\n\n\n result.push(last_digit);\n\n }\n\n\n\n result.reverse();\n\n result\n\n}\n\n\n", "file_path": "2019/04.rs", "rank": 33, "score": 202111.91831227686 }, { "content": "fn parse(input: &str) -> Vec<Point> {\n\n input.lines()\n\n .filter_map(|line| {\n\n let tokens: Vec<isize> = line\n\n .split(|c| c == '<' || c == '>' || c == ',')\n\n .filter_map(|token| token.trim().parse::<isize>().ok())\n\n .collect();\n\n\n\n if tokens.len() < 4 {\n\n None\n\n } else {\n\n Some(Point::new(tokens[0], tokens[1], tokens[2], tokens[3]))\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2018/10.rs", "rank": 34, "score": 201045.5107734648 }, { "content": "fn parse(input: &str) -> Vec<Nanobot> {\n\n input.lines()\n\n .map(|line| line.split(|c| ['<', '>', ',', '='].contains(&c)))\n\n .map(|mut tokens| [tokens.nth(2), tokens.next(), tokens.next(), tokens.nth(2)])\n\n .map(|tokens| {\n\n tokens.into_iter()\n\n .filter_map(|token| token.and_then(|x| x.parse::<isize>().ok()))\n\n .collect::<Vec<_>>()\n\n })\n\n .filter(|tokens| tokens.len() == 4)\n\n .map(|tokens| Nanobot {\n\n position: (tokens[0], tokens[1], tokens[2]),\n\n radius: tokens[3] as usize\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "2018/23.rs", "rank": 35, "score": 201045.5107734648 }, { "content": "fn parse(input: &str) -> Vec<Coordinate> {\n\n input.lines()\n\n .map(|line| line.split(',').filter_map(|x| x.parse::<isize>().ok()).collect::<Vec<_>>())\n\n .filter(|v| v.len() == 4)\n\n .map(|v| (v[0], v[1], v[2], v[3]))\n\n .collect()\n\n}\n\n\n", "file_path": "2018/25.rs", "rank": 36, "score": 201045.5107734648 }, { "content": "fn get_permutations<T: Clone>(list: &Vec<T>) -> Vec<Vec<T>> {\n\n let mut result = Vec::new();\n\n\n\n if list.len() == 0 {\n\n result.push(Vec::new());\n\n return result;\n\n }\n\n\n\n for i in 0..list.len() {\n\n let mut rest = list.clone();\n\n rest.remove(i);\n\n\n\n for mut permutation in get_permutations(&rest) {\n\n permutation.push(list[i].clone());\n\n result.push(permutation);\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "2015/13.rs", "rank": 37, "score": 200165.72195361496 }, { "content": "fn get_permutations<T: Clone>(list: &Vec<T>) -> Vec<Vec<T>> {\n\n let mut result = Vec::new();\n\n\n\n if list.len() == 0 {\n\n result.push(Vec::new());\n\n return result;\n\n }\n\n\n\n for i in 0..list.len() {\n\n let mut rest = list.clone();\n\n rest.remove(i);\n\n\n\n for mut permutation in get_permutations(&rest) {\n\n permutation.push(list[i].clone());\n\n result.push(permutation);\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "2015/09.rs", "rank": 38, "score": 200165.72195361496 }, { "content": "fn get_present_counts(max: u32, presents: u32, limit: Option<u32>) -> Vec<u32> {\n\n let len = max / presents;\n\n let mut houses = Vec::with_capacity(len as usize);\n\n\n\n for _ in 0..len {\n\n houses.push(0);\n\n }\n\n\n\n for i in 1..len {\n\n let mut count = 0;\n\n let mut j = i;\n\n\n\n while j < len {\n\n houses[j as usize] += i * presents;\n\n \n\n count += 1;\n\n j += match limit {\n\n Some(x) if count >= x => break,\n\n _ => i\n\n };\n\n }\n\n }\n\n\n\n houses\n\n}\n\n\n", "file_path": "2015/20.rs", "rank": 39, "score": 199624.28756884218 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"20.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/20.rs", "rank": 40, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"14.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/14.rs", "rank": 41, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"06.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/06.rs", "rank": 42, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"13.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/13.rs", "rank": 43, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"12.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/12.rs", "rank": 44, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"05.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/05.rs", "rank": 45, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"19.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/19.rs", "rank": 46, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"05.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/05.rs", "rank": 47, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"09.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/09.rs", "rank": 48, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"17.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/17.rs", "rank": 49, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"17.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/17.rs", "rank": 50, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"07.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/07.rs", "rank": 51, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"08.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/08.rs", "rank": 52, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"18.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/18.rs", "rank": 53, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"18.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/18.rs", "rank": 54, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"14.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/14.rs", "rank": 55, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"04.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/04.rs", "rank": 56, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"07.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/07.rs", "rank": 57, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"13.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/13.rs", "rank": 58, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"10.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/10.rs", "rank": 59, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"09.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/09.rs", "rank": 60, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"16.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/16.rs", "rank": 61, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"15.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/15.rs", "rank": 62, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"12.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/12.rs", "rank": 63, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"10.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/10.rs", "rank": 64, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"15.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/15.rs", "rank": 65, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"19.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/19.rs", "rank": 66, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"23.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/23.rs", "rank": 67, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"25.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/25.rs", "rank": 68, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"24.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/24.rs", "rank": 69, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"16.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/16.rs", "rank": 70, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"11.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/11.rs", "rank": 71, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"15.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/15.rs", "rank": 72, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"07.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/07.rs", "rank": 73, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"06.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/06.rs", "rank": 74, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"17.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/17.rs", "rank": 75, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"22.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/22.rs", "rank": 76, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"08.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/08.rs", "rank": 77, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"06.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/06.rs", "rank": 78, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"19.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/19.rs", "rank": 79, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"03.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/03.rs", "rank": 80, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"02.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/02.rs", "rank": 81, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"01.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/01.rs", "rank": 82, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"03.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/03.rs", "rank": 83, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"08.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/08.rs", "rank": 84, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"24.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/24.rs", "rank": 85, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"18.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/18.rs", "rank": 86, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"01.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/01.rs", "rank": 87, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"02.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/02.rs", "rank": 88, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"01.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/01.rs", "rank": 89, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"12.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/12.rs", "rank": 90, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"13.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/13.rs", "rank": 91, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"09.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/09.rs", "rank": 92, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"16.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/16.rs", "rank": 93, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"23.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/23.rs", "rank": 94, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"05.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/05.rs", "rank": 95, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"04.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/04.rs", "rank": 96, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"11.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2018/11.rs", "rank": 97, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"03.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2019/03.rs", "rank": 98, "score": 196819.65806679614 }, { "content": "fn get_input() -> std::io::Result<String> {\n\n let mut file = File::open(\"02.txt\")?;\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents)?;\n\n\n\n Ok(contents)\n\n}\n\n\n", "file_path": "2015/02.rs", "rank": 99, "score": 196819.65806679614 } ]
Rust
src/cpu.rs
ZacJoffe/chip8-emulator
2ab341a403f204ccbf3ad8dffecaf4f1f7ae0c75
extern crate rand; use rand::Rng; use crate::keypad::Keypad; use crate::graphics::Graphics; pub struct Cpu { i: u16, v: [u8; 16], pc: u16, sp: u16, stack: [u16; 16], mem: [u8; 4096], sound_timer: u8, delay_timer: u8, opcode: u16, pub key: Keypad, pub graphics: Graphics } impl Cpu { pub fn new() -> Cpu { let mut cpu = Cpu { i: 0x200, v: [0; 16], pc: 0x200, sp: 0, stack: [0; 16], mem: [0; 4096], sound_timer: 0, delay_timer: 0, opcode: 0, key: Keypad::new(), graphics: Graphics::new() }; for i in 0..80 { cpu.mem[0x50 + i] = FONTSET[i]; } cpu } pub fn load_game(&mut self, game: Vec<u8>) { let mut data = Vec::new(); for byte in game { data.push(byte); } for (i, &byte) in data.iter().enumerate() { self.mem[i + 0x200] = byte; } } pub fn emulate_cycle(&mut self) { self.opcode = (self.mem[self.pc as usize] as u16) << 8 | (self.mem[(self.pc as usize) + 1] as u16); println!("{:x}", self.opcode); match self.opcode & 0xf000 { 0x0000 => self.instr_0(), 0x1000 => self.instr_1(), 0x2000 => self.instr_2(), 0x3000 => self.instr_3(), 0x4000 => self.instr_4(), 0x5000 => self.instr_5(), 0x6000 => self.instr_6(), 0x7000 => self.instr_7(), 0x8000 => self.instr_8(), 0x9000 => self.instr_9(), 0xa000 => self.instr_a(), 0xb000 => self.instr_b(), 0xc000 => self.instr_c(), 0xd000 => self.instr_d(), 0xe000 => self.instr_e(), 0xf000 => self.instr_f(), _ => self.nop() } if self.delay_timer > 0 { self.delay_timer -= 1; } if self.sound_timer > 0 { if self.sound_timer == 1 { println!("BEEP!"); } self.sound_timer -= 1; } } fn instr_0(&mut self) { match self.opcode & 0x00ff { 0xe0 => self.graphics.clear(), 0xee => { self.sp -= 1; self.pc = self.stack[self.sp as usize]; } _ => self.nop() } self.pc += 2; } fn instr_1(&mut self) { self.pc = self.opcode_nnn(); } fn instr_2(&mut self) { self.stack[self.sp as usize] = self.pc; self.sp += 1; self.pc = self.opcode_nnn(); } fn instr_3(&mut self) { if self.v[self.opcode_x()] == self.opcode_nn() { self.pc += 4; } else { self.pc += 2; } } fn instr_4(&mut self) { if self.v[self.opcode_x()] != self.opcode_nn() { self.pc += 4; } else { self.pc += 2; } } fn instr_5(&mut self) { if self.v[self.opcode_x()] == self.v[self.opcode_y()] { self.pc += 4; } else { self.pc += 2; } } fn instr_6(&mut self) { self.v[self.opcode_x()] = self.opcode_nn(); self.pc += 2; } fn instr_7(&mut self) { self.v[self.opcode_x()] = self.v[self.opcode_x()].wrapping_add(self.opcode_nn()); self.pc += 2; } fn instr_8(&mut self) { match self.opcode & 0x000f { 0 => self.v[self.opcode_x()] = self.v[self.opcode_y()], 1 => self.v[self.opcode_x()] = self.v[self.opcode_x()] | self.v[self.opcode_y()], 2 => self.v[self.opcode_x()] = self.v[self.opcode_x()] & self.v[self.opcode_y()], 3 => self.v[self.opcode_x()] = self.v[self.opcode_x()] ^ self.v[self.opcode_y()], 4 => { if self.v[self.opcode_x()] > 0xff - self.v[self.opcode_y()] { self.v[0xf] = 1; } else { self.v[0xf] = 0; } self.v[self.opcode_x()] = self.v[self.opcode_x()].wrapping_add(self.v[self.opcode_y()]); } 5 => { if self.v[self.opcode_y()] > self.v[self.opcode_x()] { self.v[0xf] = 0; } else { self.v[0xf] = 1; } self.v[self.opcode_x()] = self.v[self.opcode_x()].wrapping_sub(self.v[self.opcode_y()]); } 6 => { self.v[0xf] = self.v[self.opcode_x()] & 0x1; self.v[self.opcode_x()] >>= 1; } 7 => { if self.v[self.opcode_x()] > self.v[self.opcode_y()] { self.v[0xf] = 0; } else { self.v[0xf] = 1; } self.v[self.opcode_x()] = self.v[self.opcode_y()].wrapping_sub(self.v[self.opcode_x()]); } 0xe => { self.v[0xf] = self.v[self.opcode_x()] >> 7; self.v[self.opcode_x()] <<= 1; } _ => self.nop() } self.pc += 2; } fn instr_9(&mut self) { if self.v[self.opcode_x()] != self.v[self.opcode_y()] { self.pc += 4; } else { self.pc += 2; } } fn instr_a(&mut self) { self.i = self.opcode_nnn(); self.pc += 2; } fn instr_b(&mut self) { self.pc = (self.v[0] as u16) + self.opcode_nnn(); } fn instr_c(&mut self) { let mut rng = rand::thread_rng(); let random_num: u8 = rng.gen(); self.v[self.opcode_x()] = random_num & self.opcode_nn(); self.pc += 2; } fn instr_d(&mut self) { let x = self.opcode_x(); let y = self.opcode_y(); let n = self.opcode_n(); self.v[15] = self.graphics.update(self.v[x] as usize, self.v[y] as usize, n, self.i, self.mem); self.pc += 2; } fn instr_e(&mut self) { match (self.opcode & 0x00ff) as u8 { 0x9e => { if self.key.is_pressed(self.v[self.opcode_x()] as usize) { self.pc += 4; } else { self.pc += 2; } } 0xa1 => { if !self.key.is_pressed(self.v[self.opcode_x()] as usize) { self.pc += 4; } else { self.pc += 2; } } _ => self.nop() } } fn instr_f(&mut self) { match self.opcode & 0x00ff { 0x07 => self.v[self.opcode_x()] = self.delay_timer, 0x0a => { let mut pressed: bool = false; for i in 0..16 { if self.key.is_pressed(i as usize) { self.v[self.opcode_x()] = i; pressed = true; } } if !pressed { self.pc -= 2; } } 0x15 => self.delay_timer = self.v[self.opcode_x()], 0x18 => self.sound_timer = self.v[self.opcode_x()], 0x1e => { if self.v[self.opcode_x()] as u16 + self.i > 0x0fff { self.v[0xf] = 1; } else { self.v[0xf] = 0; } self.i = self.i.wrapping_add(self.v[self.opcode_x()] as u16); } 0x29 => self.i = (self.v[self.opcode_x()] as u16 * 5) + 0x50, 0x33 => { self.mem[self.i as usize] = self.v[self.opcode_x() as usize] / 100; self.mem[(self.i + 1) as usize] = (self.v[self.opcode_x() as usize] / 10) % 10; self.mem[(self.i + 2) as usize] = self.v[self.opcode_x() as usize] % 10; } 0x55 => { for i in 0..self.opcode_x() { self.mem[i + self.i as usize] = self.v[i]; } self.i += self.opcode_x() as u16 + 1; } 0x65 => { for i in 0..(self.opcode_x() + 1) { self.v[i] = self.mem[i + self.i as usize]; } self.i += self.opcode_x() as u16 + 1; } _ => self.nop() } self.pc += 2; } fn nop(&self) { println!("Nop instruction"); } fn opcode_x(&self) -> usize { ((self.opcode & 0x0f00) >> 8) as usize } fn opcode_y(&self) -> usize { ((self.opcode & 0x00f0) >> 4) as usize } fn opcode_n(&self) -> u8 { (self.opcode & 0x000f) as u8 } fn opcode_nn(&self) -> u8 { (self.opcode & 0x00ff) as u8 } fn opcode_nnn(&self) -> u16 { (self.opcode & 0x0fff) as u16 } } const FONTSET: [u8; 80] = [ 0xF0, 0x90, 0x90, 0x90, 0xF0, 0x20, 0x60, 0x20, 0x20, 0x70, 0xF0, 0x10, 0xF0, 0x80, 0xF0, 0xF0, 0x10, 0xF0, 0x10, 0xF0, 0x90, 0x90, 0xF0, 0x10, 0x10, 0xF0, 0x80, 0xF0, 0x10, 0xF0, 0xF0, 0x80, 0xF0, 0x90, 0xF0, 0xF0, 0x10, 0x20, 0x40, 0x40, 0xF0, 0x90, 0xF0, 0x90, 0xF0, 0xF0, 0x90, 0xF0, 0x10, 0xF0, 0xF0, 0x90, 0xF0, 0x90, 0x90, 0xE0, 0x90, 0xE0, 0x90, 0xE0, 0xF0, 0x80, 0x80, 0x80, 0xF0, 0xE0, 0x90, 0x90, 0x90, 0xE0, 0xF0, 0x80, 0xF0, 0x80, 0xF0, 0xF0, 0x80, 0xF0, 0x80, 0x80 ];
extern crate rand; use rand::Rng; use crate::keypad::Keypad; use crate::graphics::Graphics; pub struct Cpu { i: u16, v: [u8; 16], pc: u16, sp: u16, stack: [u16; 16], mem: [u8; 4096], sound_timer: u8, delay_timer: u8, opcode: u16, pub key: Keypad, pub graphics: Graphics } impl Cpu { pub fn new() -> Cpu {
for i in 0..80 { cpu.mem[0x50 + i] = FONTSET[i]; } cpu } pub fn load_game(&mut self, game: Vec<u8>) { let mut data = Vec::new(); for byte in game { data.push(byte); } for (i, &byte) in data.iter().enumerate() { self.mem[i + 0x200] = byte; } } pub fn emulate_cycle(&mut self) { self.opcode = (self.mem[self.pc as usize] as u16) << 8 | (self.mem[(self.pc as usize) + 1] as u16); println!("{:x}", self.opcode); match self.opcode & 0xf000 { 0x0000 => self.instr_0(), 0x1000 => self.instr_1(), 0x2000 => self.instr_2(), 0x3000 => self.instr_3(), 0x4000 => self.instr_4(), 0x5000 => self.instr_5(), 0x6000 => self.instr_6(), 0x7000 => self.instr_7(), 0x8000 => self.instr_8(), 0x9000 => self.instr_9(), 0xa000 => self.instr_a(), 0xb000 => self.instr_b(), 0xc000 => self.instr_c(), 0xd000 => self.instr_d(), 0xe000 => self.instr_e(), 0xf000 => self.instr_f(), _ => self.nop() } if self.delay_timer > 0 { self.delay_timer -= 1; } if self.sound_timer > 0 { if self.sound_timer == 1 { println!("BEEP!"); } self.sound_timer -= 1; } } fn instr_0(&mut self) { match self.opcode & 0x00ff { 0xe0 => self.graphics.clear(), 0xee => { self.sp -= 1; self.pc = self.stack[self.sp as usize]; } _ => self.nop() } self.pc += 2; } fn instr_1(&mut self) { self.pc = self.opcode_nnn(); } fn instr_2(&mut self) { self.stack[self.sp as usize] = self.pc; self.sp += 1; self.pc = self.opcode_nnn(); } fn instr_3(&mut self) { if self.v[self.opcode_x()] == self.opcode_nn() { self.pc += 4; } else { self.pc += 2; } } fn instr_4(&mut self) { if self.v[self.opcode_x()] != self.opcode_nn() { self.pc += 4; } else { self.pc += 2; } } fn instr_5(&mut self) { if self.v[self.opcode_x()] == self.v[self.opcode_y()] { self.pc += 4; } else { self.pc += 2; } } fn instr_6(&mut self) { self.v[self.opcode_x()] = self.opcode_nn(); self.pc += 2; } fn instr_7(&mut self) { self.v[self.opcode_x()] = self.v[self.opcode_x()].wrapping_add(self.opcode_nn()); self.pc += 2; } fn instr_8(&mut self) { match self.opcode & 0x000f { 0 => self.v[self.opcode_x()] = self.v[self.opcode_y()], 1 => self.v[self.opcode_x()] = self.v[self.opcode_x()] | self.v[self.opcode_y()], 2 => self.v[self.opcode_x()] = self.v[self.opcode_x()] & self.v[self.opcode_y()], 3 => self.v[self.opcode_x()] = self.v[self.opcode_x()] ^ self.v[self.opcode_y()], 4 => { if self.v[self.opcode_x()] > 0xff - self.v[self.opcode_y()] { self.v[0xf] = 1; } else { self.v[0xf] = 0; } self.v[self.opcode_x()] = self.v[self.opcode_x()].wrapping_add(self.v[self.opcode_y()]); } 5 => { if self.v[self.opcode_y()] > self.v[self.opcode_x()] { self.v[0xf] = 0; } else { self.v[0xf] = 1; } self.v[self.opcode_x()] = self.v[self.opcode_x()].wrapping_sub(self.v[self.opcode_y()]); } 6 => { self.v[0xf] = self.v[self.opcode_x()] & 0x1; self.v[self.opcode_x()] >>= 1; } 7 => { if self.v[self.opcode_x()] > self.v[self.opcode_y()] { self.v[0xf] = 0; } else { self.v[0xf] = 1; } self.v[self.opcode_x()] = self.v[self.opcode_y()].wrapping_sub(self.v[self.opcode_x()]); } 0xe => { self.v[0xf] = self.v[self.opcode_x()] >> 7; self.v[self.opcode_x()] <<= 1; } _ => self.nop() } self.pc += 2; } fn instr_9(&mut self) { if self.v[self.opcode_x()] != self.v[self.opcode_y()] { self.pc += 4; } else { self.pc += 2; } } fn instr_a(&mut self) { self.i = self.opcode_nnn(); self.pc += 2; } fn instr_b(&mut self) { self.pc = (self.v[0] as u16) + self.opcode_nnn(); } fn instr_c(&mut self) { let mut rng = rand::thread_rng(); let random_num: u8 = rng.gen(); self.v[self.opcode_x()] = random_num & self.opcode_nn(); self.pc += 2; } fn instr_d(&mut self) { let x = self.opcode_x(); let y = self.opcode_y(); let n = self.opcode_n(); self.v[15] = self.graphics.update(self.v[x] as usize, self.v[y] as usize, n, self.i, self.mem); self.pc += 2; } fn instr_e(&mut self) { match (self.opcode & 0x00ff) as u8 { 0x9e => { if self.key.is_pressed(self.v[self.opcode_x()] as usize) { self.pc += 4; } else { self.pc += 2; } } 0xa1 => { if !self.key.is_pressed(self.v[self.opcode_x()] as usize) { self.pc += 4; } else { self.pc += 2; } } _ => self.nop() } } fn instr_f(&mut self) { match self.opcode & 0x00ff { 0x07 => self.v[self.opcode_x()] = self.delay_timer, 0x0a => { let mut pressed: bool = false; for i in 0..16 { if self.key.is_pressed(i as usize) { self.v[self.opcode_x()] = i; pressed = true; } } if !pressed { self.pc -= 2; } } 0x15 => self.delay_timer = self.v[self.opcode_x()], 0x18 => self.sound_timer = self.v[self.opcode_x()], 0x1e => { if self.v[self.opcode_x()] as u16 + self.i > 0x0fff { self.v[0xf] = 1; } else { self.v[0xf] = 0; } self.i = self.i.wrapping_add(self.v[self.opcode_x()] as u16); } 0x29 => self.i = (self.v[self.opcode_x()] as u16 * 5) + 0x50, 0x33 => { self.mem[self.i as usize] = self.v[self.opcode_x() as usize] / 100; self.mem[(self.i + 1) as usize] = (self.v[self.opcode_x() as usize] / 10) % 10; self.mem[(self.i + 2) as usize] = self.v[self.opcode_x() as usize] % 10; } 0x55 => { for i in 0..self.opcode_x() { self.mem[i + self.i as usize] = self.v[i]; } self.i += self.opcode_x() as u16 + 1; } 0x65 => { for i in 0..(self.opcode_x() + 1) { self.v[i] = self.mem[i + self.i as usize]; } self.i += self.opcode_x() as u16 + 1; } _ => self.nop() } self.pc += 2; } fn nop(&self) { println!("Nop instruction"); } fn opcode_x(&self) -> usize { ((self.opcode & 0x0f00) >> 8) as usize } fn opcode_y(&self) -> usize { ((self.opcode & 0x00f0) >> 4) as usize } fn opcode_n(&self) -> u8 { (self.opcode & 0x000f) as u8 } fn opcode_nn(&self) -> u8 { (self.opcode & 0x00ff) as u8 } fn opcode_nnn(&self) -> u16 { (self.opcode & 0x0fff) as u16 } } const FONTSET: [u8; 80] = [ 0xF0, 0x90, 0x90, 0x90, 0xF0, 0x20, 0x60, 0x20, 0x20, 0x70, 0xF0, 0x10, 0xF0, 0x80, 0xF0, 0xF0, 0x10, 0xF0, 0x10, 0xF0, 0x90, 0x90, 0xF0, 0x10, 0x10, 0xF0, 0x80, 0xF0, 0x10, 0xF0, 0xF0, 0x80, 0xF0, 0x90, 0xF0, 0xF0, 0x10, 0x20, 0x40, 0x40, 0xF0, 0x90, 0xF0, 0x90, 0xF0, 0xF0, 0x90, 0xF0, 0x10, 0xF0, 0xF0, 0x90, 0xF0, 0x90, 0x90, 0xE0, 0x90, 0xE0, 0x90, 0xE0, 0xF0, 0x80, 0x80, 0x80, 0xF0, 0xE0, 0x90, 0x90, 0x90, 0xE0, 0xF0, 0x80, 0xF0, 0x80, 0xF0, 0xF0, 0x80, 0xF0, 0x80, 0x80 ];
let mut cpu = Cpu { i: 0x200, v: [0; 16], pc: 0x200, sp: 0, stack: [0; 16], mem: [0; 4096], sound_timer: 0, delay_timer: 0, opcode: 0, key: Keypad::new(), graphics: Graphics::new() };
assignment_statement
[ { "content": "fn main() {\n\n let mut cpu = Cpu::new();\n\n let args: Vec<String> = env::args().collect();\n\n\n\n // if no arg is given, then default to pong2.c8\n\n let mut rom = if args.len() < 2 { String::from(\"pong2.c8\") } else { String::from(&args[1]) };\n\n rom = format!(\"roms/{}\", rom);\n\n\n\n // if the rom isn't fond, then load pong2.c8\n\n let game = fs::read(rom);\n\n let game = match game {\n\n Ok(g) => g,\n\n Err(_) => {\n\n println!(\"Couldn't find file! Loading pong2.c8...\");\n\n\n\n // attempt to load pong2.c8, panic if not found\n\n let pong = fs::read(\"roms/pong2.c8\");\n\n match pong {\n\n Ok(p) => p,\n\n Err(err) => {\n", "file_path": "src/main.rs", "rank": 0, "score": 19836.245858114053 }, { "content": "use sdl2::keyboard::Keycode;\n\n\n\npub struct Keypad {\n\n key: [bool; 16]\n\n}\n\n\n\nimpl Keypad {\n\n pub fn new() -> Keypad {\n\n Keypad {\n\n key: [false; 16]\n\n }\n\n }\n\n\n\n // press down a key\n\n pub fn set(&mut self, key: Keycode) {\n\n match key {\n\n Keycode::Num1 => self.key[1] = true,\n\n Keycode::Num2 => self.key[2] = true,\n\n Keycode::Num3 => self.key[3] = true,\n\n Keycode::Num4 => self.key[12] = true,\n", "file_path": "src/keypad.rs", "rank": 1, "score": 17536.586454691205 }, { "content": " Keycode::Q => self.key[4] = true,\n\n Keycode::W => self.key[5] = true,\n\n Keycode::E => self.key[6] = true,\n\n Keycode::R => self.key[13] = true,\n\n Keycode::A => self.key[7] = true,\n\n Keycode::S => self.key[8] = true,\n\n Keycode::D => self.key[9] = true,\n\n Keycode::F => self.key[14] = true,\n\n Keycode::Z => self.key[10] = true,\n\n Keycode::X => self.key[0] = true,\n\n Keycode::C => self.key[11] = true,\n\n Keycode::V => self.key[15] = true,\n\n _ => {}\n\n }\n\n }\n\n\n\n // unpress a key\n\n pub fn reset(&mut self, key: Keycode) {\n\n println!{\"Reset key: {}\", key};\n\n match key {\n", "file_path": "src/keypad.rs", "rank": 2, "score": 17530.81267129596 }, { "content": " Keycode::Num1 => self.key[1] = false,\n\n Keycode::Num2 => self.key[2] = false,\n\n Keycode::Num3 => self.key[3] = false,\n\n Keycode::Num4 => self.key[12] = false,\n\n Keycode::Q => self.key[4] = false,\n\n Keycode::W => self.key[5] = false,\n\n Keycode::E => self.key[6] = false,\n\n Keycode::R => self.key[13] = false,\n\n Keycode::A => self.key[7] = false,\n\n Keycode::S => self.key[8] = false,\n\n Keycode::D => self.key[9] = false,\n\n Keycode::F => self.key[14] = false,\n\n Keycode::Z => self.key[10] = false,\n\n Keycode::X => self.key[0] = false,\n\n Keycode::C => self.key[11] = false,\n\n Keycode::V => self.key[15] = false,\n\n _ => {}\n\n }\n\n }\n\n\n\n // returns true if the key at the given index is pressed\n\n pub fn is_pressed(&self, i: usize) -> bool { self.key[i] }\n\n}\n", "file_path": "src/keypad.rs", "rank": 3, "score": 17530.544823608663 }, { "content": "use sdl2::pixels::Color;\n\nuse sdl2::rect::Rect;\n\nuse sdl2::render::WindowCanvas;\n\n\n\npub struct Graphics {\n\n gfx: [[u8; 64]; 32], // represent graphics as a 2d array\n\n draw_flag: bool\n\n}\n\n\n\nimpl Graphics {\n\n pub fn new() -> Graphics {\n\n Graphics {\n\n gfx: [[0; 64]; 32],\n\n draw_flag: true\n\n }\n\n }\n\n\n\n // reset to original state\n\n pub fn clear(&mut self) {\n\n self.gfx = [[0; 64]; 32];\n", "file_path": "src/graphics.rs", "rank": 4, "score": 17371.453588300425 }, { "content": " canvas.fill_rect(Rect::new((x * 10) as i32, (y * 10) as i32, 10, 10)).unwrap();\n\n }\n\n }\n\n\n\n canvas.present();\n\n\n\n // reset flag\n\n self.draw_flag = false;\n\n }\n\n }\n\n\n\n // used for opcode 0xDXYN\n\n pub fn update(&mut self, x: usize, y: usize, height: u8, i: u16, mem: [u8; 4096]) -> u8 {\n\n let mut pixel: u8;\n\n\n\n // return value\n\n let mut v15: u8 = 0;\n\n\n\n for yline in 0..height as usize {\n\n pixel = mem[i as usize + yline];\n", "file_path": "src/graphics.rs", "rank": 5, "score": 17370.88620938254 }, { "content": " self.draw_flag = true;\n\n }\n\n\n\n // draws the graphics to the canvas\n\n pub fn draw(&mut self, canvas: &mut WindowCanvas) {\n\n // only draw if the flag is set\n\n if self.draw_flag {\n\n canvas.clear();\n\n for y in 0..32 {\n\n for x in 0..64 {\n\n // if unset then draw black, otherwise white\n\n if self.gfx[y][x] == 0 {\n\n // black\n\n canvas.set_draw_color(Color::RGB(0, 0, 0));\n\n } else {\n\n // white\n\n canvas.set_draw_color(Color::RGB(255, 255, 255));\n\n }\n\n\n\n // fill rect with the appropriate color from above\n", "file_path": "src/graphics.rs", "rank": 6, "score": 17363.605644075964 }, { "content": "\n\n for xline in 0..8 as usize {\n\n if pixel & (0x80 >> xline) != 0 {\n\n if self.gfx[(y + yline) % 32][(x + xline) % 64] == 1 {\n\n v15 = 1;\n\n }\n\n\n\n self.gfx[(y + yline) % 32][(x + xline) % 64] ^= 1;\n\n }\n\n }\n\n }\n\n\n\n self.draw_flag = true;\n\n\n\n v15\n\n }\n\n}\n", "file_path": "src/graphics.rs", "rank": 7, "score": 17361.43952440924 }, { "content": "extern crate sdl2;\n\n\n\nuse std::env;\n\nuse std::fs;\n\n\n\nuse cpu::Cpu;\n\n\n\nuse sdl2::event::Event;\n\nuse sdl2::keyboard::Keycode;\n\n\n\nuse std::time::Duration;\n\nuse std::thread;\n\n\n\nmod cpu;\n\nmod keypad;\n\nmod graphics;\n\n\n", "file_path": "src/main.rs", "rank": 27, "score": 10.202789978760814 }, { "content": " Event::KeyUp { keycode: Some(Keycode::Z), .. } => cpu.key.reset(Keycode::Z),\n\n Event::KeyUp { keycode: Some(Keycode::X), .. } => cpu.key.reset(Keycode::X),\n\n Event::KeyUp { keycode: Some(Keycode::C), .. } => cpu.key.reset(Keycode::C),\n\n Event::KeyUp { keycode: Some(Keycode::V), .. } => cpu.key.reset(Keycode::V),\n\n _ => {}\n\n }\n\n }\n\n\n\n // emulate, draw, and sleep\n\n cpu.emulate_cycle();\n\n cpu.graphics.draw(&mut canvas);\n\n thread::sleep(Duration::new(0, 1_000_000_000u32 / 60)); // 60 Hz\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 28, "score": 8.166850911171432 }, { "content": " Event::KeyDown { keycode: Some(Keycode::D), .. } => cpu.key.set(Keycode::D),\n\n Event::KeyDown { keycode: Some(Keycode::F), .. } => cpu.key.set(Keycode::F),\n\n Event::KeyDown { keycode: Some(Keycode::Z), .. } => cpu.key.set(Keycode::Z),\n\n Event::KeyDown { keycode: Some(Keycode::X), .. } => cpu.key.set(Keycode::X),\n\n Event::KeyDown { keycode: Some(Keycode::C), .. } => cpu.key.set(Keycode::C),\n\n Event::KeyDown { keycode: Some(Keycode::V), .. } => cpu.key.set(Keycode::V),\n\n\n\n // key lifted (reset)\n\n Event::KeyUp { keycode: Some(Keycode::Num1), .. } => cpu.key.reset(Keycode::Num1),\n\n Event::KeyUp { keycode: Some(Keycode::Num2), .. } => cpu.key.reset(Keycode::Num2),\n\n Event::KeyUp { keycode: Some(Keycode::Num3), .. } => cpu.key.reset(Keycode::Num3),\n\n Event::KeyUp { keycode: Some(Keycode::Num4), .. } => cpu.key.reset(Keycode::Num4),\n\n Event::KeyUp { keycode: Some(Keycode::Q), .. } => cpu.key.reset(Keycode::Q),\n\n Event::KeyUp { keycode: Some(Keycode::W), .. } => cpu.key.reset(Keycode::W),\n\n Event::KeyUp { keycode: Some(Keycode::E), .. } => cpu.key.reset(Keycode::E),\n\n Event::KeyUp { keycode: Some(Keycode::R), .. } => cpu.key.reset(Keycode::R),\n\n Event::KeyUp { keycode: Some(Keycode::A), .. } => cpu.key.reset(Keycode::A),\n\n Event::KeyUp { keycode: Some(Keycode::S), .. } => cpu.key.reset(Keycode::S),\n\n Event::KeyUp { keycode: Some(Keycode::D), .. } => cpu.key.reset(Keycode::D),\n\n Event::KeyUp { keycode: Some(Keycode::F), .. } => cpu.key.reset(Keycode::F),\n", "file_path": "src/main.rs", "rank": 29, "score": 5.734985095500074 }, { "content": "\n\n // game loop, each iteration represents a cycle of the cpu\n\n 'running: loop {\n\n // match events\n\n for event in event_pump.poll_iter() {\n\n match event {\n\n // quit\n\n Event::Quit {..} | Event::KeyDown { keycode: Some(Keycode::Escape), .. } => break 'running,\n\n\n\n // key pressed (set)\n\n Event::KeyDown { keycode: Some(Keycode::Num1), .. } => cpu.key.set(Keycode::Num1),\n\n Event::KeyDown { keycode: Some(Keycode::Num2), .. } => cpu.key.set(Keycode::Num2),\n\n Event::KeyDown { keycode: Some(Keycode::Num3), .. } => cpu.key.set(Keycode::Num3),\n\n Event::KeyDown { keycode: Some(Keycode::Num4), .. } => cpu.key.set(Keycode::Num4),\n\n Event::KeyDown { keycode: Some(Keycode::Q), .. } => cpu.key.set(Keycode::Q),\n\n Event::KeyDown { keycode: Some(Keycode::W), .. } => cpu.key.set(Keycode::W),\n\n Event::KeyDown { keycode: Some(Keycode::E), .. } => cpu.key.set(Keycode::E),\n\n Event::KeyDown { keycode: Some(Keycode::R), .. } => cpu.key.set(Keycode::R),\n\n Event::KeyDown { keycode: Some(Keycode::A), .. } => cpu.key.set(Keycode::A),\n\n Event::KeyDown { keycode: Some(Keycode::S), .. } => cpu.key.set(Keycode::S),\n", "file_path": "src/main.rs", "rank": 30, "score": 4.793412873992214 }, { "content": " panic!(\"Couldn't find pong2.c8: {}\", err);\n\n }\n\n }\n\n }\n\n };\n\n\n\n // load the game into the cpu's ram\n\n cpu.load_game(game);\n\n\n\n // initialize sdl2\n\n let sdl_context = sdl2::init().unwrap();\n\n let video_subsystem = sdl_context.video().unwrap();\n\n\n\n let window = video_subsystem.window(\"chip8\", 640, 320).position_centered().build().unwrap();\n\n\n\n let mut canvas = window.into_canvas().build().unwrap();\n\n canvas.clear();\n\n canvas.present();\n\n\n\n let mut event_pump = sdl_context.event_pump().unwrap();\n", "file_path": "src/main.rs", "rank": 31, "score": 1.4978020462323733 } ]
Rust
src/timers.rs
david-sawatzke/swm050-hal
95cec73c1b15475ded314bd4ed1f517085838f9a
use core::ops::Deref; use embedded_hal::timer::{Cancel, CountDown, Periodic}; use void::Void; use crate::delay::Delay; use crate::syscon::{ClockEnable, Clocks, Syscon}; use crate::time::Hertz; pub(crate) type TimerRegisterBlock = swm050::tmrse0::RegisterBlock; pub struct Timer<TIMER> { clocks: Clocks, pub(crate) timer: TIMER, } impl<TIMER> Timer<TIMER> where TIMER: Deref<Target = TimerRegisterBlock> + ClockEnable, { pub fn timer<T>(timer: TIMER, timeout: T, syscon: &mut Syscon) -> Timer<TIMER> where T: Into<Hertz>, { TIMER::enable(syscon); timer.intctrl.write(|w| w.ena().set_bit()); let mut timer = Timer { timer: timer, clocks: syscon.clocks, }; timer.start(timeout); timer } pub fn release(self) -> TIMER { self.timer } } impl<TIMER> CountDown for Timer<TIMER> where TIMER: Deref<Target = TimerRegisterBlock>, { type Time = Hertz; fn start<T>(&mut self, timeout: T) where T: Into<Hertz>, { self.timer.ctrl.write(|w| w.ena().clear_bit()); self.timer.intoflag.write(|w| unsafe { w.bits(0) }); let frequency = timeout.into().0; let ticks = self.clocks.timsclk().0 / frequency / 2; self.timer.tarval.write(|w| unsafe { w.bits(ticks) }); self.timer.curval.write(|w| unsafe { w.bits(0) }); self.timer.ctrl.write(|w| w.ena().set_bit()); } fn wait(&mut self) -> nb::Result<(), Void> { if self.timer.intoflag.read().bits() == 0 { Err(nb::Error::WouldBlock) } else { self.timer.curval.read(); Ok(()) } } } impl<TIMER> Periodic for Timer<TIMER> where TIMER: Deref<Target = TimerRegisterBlock> {} impl CountDown for Delay { type Time = Hertz; fn start<T>(&mut self, timeout: T) where T: Into<Hertz>, { let frequency = timeout.into().0; let ticks = self.scale * 1_000_000 / frequency; self.countdown = Some((ticks, unsafe { (*self.timer).curval.read().bits() })); } fn wait(&mut self) -> nb::Result<(), Void> { if let Some((ref ticks, ref start_count)) = self.countdown { if unsafe { (*(self.timer)) .curval .read() .bits() .wrapping_sub(*start_count) } < *ticks { Err(nb::Error::WouldBlock) } else { let ticks = *ticks; self.countdown = Some((ticks, unsafe { (*self.timer).curval.read().bits() })); Ok(()) } } else { Err(nb::Error::WouldBlock) } } } impl Periodic for Delay {} impl Cancel for Delay { type Error = (); fn cancel(&mut self) -> Result<(), ()> { self.countdown = None; Ok(()) } }
use core::ops::Deref; use embedded_hal::timer::{Cancel, CountDown, Periodic}; use void::Void; use crate::delay::Delay; use crate::syscon::{ClockEnable, Clocks, Syscon}; use crate::time::Hertz; pub(crate) type TimerRegisterBlock = swm050::tmrse0::RegisterBlock; pub struct Timer<TIMER> { clocks: Clocks, pub(crate) timer: TIMER, } impl<TIMER> Timer<TIMER> where TIMER: Deref<Target = TimerRegisterBlock> + ClockEnable, { pub fn timer<T>(timer: TIMER, timeout: T, syscon: &mut Syscon) -> Timer<TIMER> where T: Into<Hertz>, { TIMER::enable(syscon);
pub fn release(self) -> TIMER { self.timer } } impl<TIMER> CountDown for Timer<TIMER> where TIMER: Deref<Target = TimerRegisterBlock>, { type Time = Hertz; fn start<T>(&mut self, timeout: T) where T: Into<Hertz>, { self.timer.ctrl.write(|w| w.ena().clear_bit()); self.timer.intoflag.write(|w| unsafe { w.bits(0) }); let frequency = timeout.into().0; let ticks = self.clocks.timsclk().0 / frequency / 2; self.timer.tarval.write(|w| unsafe { w.bits(ticks) }); self.timer.curval.write(|w| unsafe { w.bits(0) }); self.timer.ctrl.write(|w| w.ena().set_bit()); } fn wait(&mut self) -> nb::Result<(), Void> { if self.timer.intoflag.read().bits() == 0 { Err(nb::Error::WouldBlock) } else { self.timer.curval.read(); Ok(()) } } } impl<TIMER> Periodic for Timer<TIMER> where TIMER: Deref<Target = TimerRegisterBlock> {} impl CountDown for Delay { type Time = Hertz; fn start<T>(&mut self, timeout: T) where T: Into<Hertz>, { let frequency = timeout.into().0; let ticks = self.scale * 1_000_000 / frequency; self.countdown = Some((ticks, unsafe { (*self.timer).curval.read().bits() })); } fn wait(&mut self) -> nb::Result<(), Void> { if let Some((ref ticks, ref start_count)) = self.countdown { if unsafe { (*(self.timer)) .curval .read() .bits() .wrapping_sub(*start_count) } < *ticks { Err(nb::Error::WouldBlock) } else { let ticks = *ticks; self.countdown = Some((ticks, unsafe { (*self.timer).curval.read().bits() })); Ok(()) } } else { Err(nb::Error::WouldBlock) } } } impl Periodic for Delay {} impl Cancel for Delay { type Error = (); fn cancel(&mut self) -> Result<(), ()> { self.countdown = None; Ok(()) } }
timer.intctrl.write(|w| w.ena().set_bit()); let mut timer = Timer { timer: timer, clocks: syscon.clocks, }; timer.start(timeout); timer }
function_block-function_prefix_line
[ { "content": "pub trait ClockEnable {\n\n fn enable(syscon: &mut Syscon);\n\n}\n\nmacro_rules! clock_enable {\n\n ($PERIPH: ident, $field:ident) => {\n\n impl ClockEnable for swm050::$PERIPH {\n\n fn enable(syscon: &mut Syscon) {\n\n syscon.regs.pclk_en.modify(|_, w| w.$field().set_bit());\n\n }\n\n }\n\n };\n\n}\n\nclock_enable!(TMRSE0, tmrse0_clk);\n\nclock_enable!(TMRSE1, tmrse1_clk);\n\nclock_enable!(WDT, wdt_clk);\n", "file_path": "src/syscon.rs", "rank": 0, "score": 59331.63959480604 }, { "content": "/// Extension trait that sets up the `SYSCON` peripheral\n\npub trait SysconExt {\n\n /// Configure the clocks of the SYSCON peripheral\n\n fn configure(self) -> CFGR;\n\n}\n\n\n\nimpl SysconExt for SYS {\n\n fn configure(self) -> CFGR {\n\n CFGR {\n\n timsclk: None,\n\n sclk: None,\n\n syscon: self,\n\n }\n\n }\n\n}\n\n\n\n/// Constrained syscon peripheral\n\npub struct Syscon {\n\n pub clocks: Clocks,\n\n pub(crate) regs: SYS,\n\n}\n", "file_path": "src/syscon.rs", "rank": 1, "score": 48546.25125262455 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let (Some(p), Some(_cp)) = (swm050::Peripherals::take(), Peripherals::take()) {\n\n cortex_m::interrupt::free(move |cs| {\n\n let gpioa = p.GPIOA.split();\n\n\n\n let mut syscon = p.SYS.configure().freeze();\n\n\n\n /* (Re-)configure PA5 as output */\n\n let mut led = gpioa.pa_5.into_push_pull_output(&cs);\n\n\n\n /* Get timer */\n\n let mut timer = Timer::timer(p.TMRSE0, Hertz(1), &mut syscon);\n\n loop {\n\n timer.start(Hertz(1));\n\n led.set_high().ok();\n\n nb::block!(timer.wait()).unwrap();\n\n led.set_low().ok();\n\n nb::block!(timer.wait()).unwrap();\n\n // Do \"pwm\"\n\n timer.start(Hertz(600));\n", "file_path": "examples/blinky_timer.rs", "rank": 2, "score": 45949.295661359356 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let Some(p) = swm050::Peripherals::take() {\n\n cortex_m::interrupt::free(move |cs| {\n\n let gpioa = p.GPIOA.split();\n\n\n\n /* (Re-)configure PA5 as output */\n\n let mut led = gpioa.pa_5.into_push_pull_output(&cs);\n\n\n\n loop {\n\n /* Turn PA5 on a million times in a row */\n\n for _ in 0..1_000_000 {\n\n led.set_high().ok();\n\n }\n\n /* Then turn PA5 off a million times in a row */\n\n for _ in 0..1_000_000 {\n\n led.set_low().ok();\n\n }\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/blinky.rs", "rank": 3, "score": 29516.159557024235 }, { "content": "#[entry]\n\nfn main() -> ! {\n\n if let Some(p) = swm050::Peripherals::take() {\n\n cortex_m::interrupt::free(move |cs| {\n\n let gpioa = p.GPIOA.split();\n\n\n\n let mut syscon = p.SYS.configure().freeze();\n\n /* (Re-)configure PA5 as output */\n\n let mut led = gpioa.pa_5.into_push_pull_output(&cs);\n\n\n\n /* Get delay provider */\n\n let mut delay = Delay::new(p.TMRSE1, &mut syscon);\n\n loop {\n\n led.set_high().ok();\n\n delay.delay_ms(1_000_u16);\n\n led.set_low().ok();\n\n delay.delay_ms(1_000_u16);\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/blinky_delay.rs", "rank": 4, "score": 28219.79207206607 }, { "content": "/// Extension trait that adds convenience methods to the `u32` type\n\npub trait U32Ext {\n\n /// Wrap in `Bps`\n\n fn bps(self) -> Bps;\n\n\n\n /// Wrap in `Hertz`\n\n fn hz(self) -> Hertz;\n\n\n\n /// Wrap in `KiloHertz`\n\n fn khz(self) -> KiloHertz;\n\n\n\n /// Wrap in `MegaHertz`\n\n fn mhz(self) -> MegaHertz;\n\n}\n\n\n\nimpl U32Ext for u32 {\n\n fn bps(self) -> Bps {\n\n Bps(self)\n\n }\n\n\n\n fn hz(self) -> Hertz {\n", "file_path": "src/time.rs", "rank": 5, "score": 24870.758030728866 }, { "content": "// TODO Implement marker for af with PushPull or OpenDrain\n\n/// Extension trait to split a GPIO peripheral in independent pins and registers\n\npub trait GpioExt {\n\n /// The parts to split the GPIO into\n\n type Parts;\n\n\n\n /// Splits the GPIO block into independent pins and registers\n\n // NOTE We don't need an rcc parameter because it's enabled by default\n\n fn split(self) -> Self::Parts;\n\n}\n\n\n", "file_path": "src/gpio.rs", "rank": 6, "score": 24868.174617925975 }, { "content": " self.sclk = Some(freq.into().0);\n\n self\n\n }\n\n\n\n pub fn freeze(self) -> Syscon {\n\n let sclk = self.sclk.map(|_| unimplemented!()).unwrap_or(18000000);\n\n let timsclk = self.timsclk.map(|_| unimplemented!()).unwrap_or(18000000);\n\n Syscon {\n\n clocks: Clocks {\n\n timsclk: Hertz(timsclk),\n\n sclk: Hertz(sclk),\n\n },\n\n regs: self.syscon,\n\n }\n\n }\n\n}\n\n\n\n/// Frozen clock frequencies\n\n///\n\n/// The existence of this value indicates that the clock configuration can no longer be changed\n", "file_path": "src/syscon.rs", "rank": 7, "score": 20881.38664515801 }, { "content": "\n\npub struct CFGR {\n\n timsclk: Option<u32>,\n\n sclk: Option<u32>,\n\n syscon: SYS,\n\n}\n\n\n\nimpl CFGR {\n\n pub fn timsclk<F>(mut self, freq: F) -> Self\n\n where\n\n F: Into<Hertz>,\n\n {\n\n self.timsclk = Some(freq.into().0);\n\n self\n\n }\n\n\n\n pub fn sclk<F>(mut self, freq: F) -> Self\n\n where\n\n F: Into<Hertz>,\n\n {\n", "file_path": "src/syscon.rs", "rank": 8, "score": 20880.901455779196 }, { "content": "#[derive(Clone, Copy)]\n\npub struct Clocks {\n\n timsclk: Hertz,\n\n sclk: Hertz,\n\n}\n\n\n\nimpl Clocks {\n\n /// Returns the frequency of the sysclock\n\n pub fn sclk(&self) -> Hertz {\n\n self.sclk\n\n }\n\n\n\n /// Returns the frequency of the timerclock\n\n pub fn timsclk(&self) -> Hertz {\n\n self.timsclk\n\n }\n\n}\n\n\n", "file_path": "src/syscon.rs", "rank": 9, "score": 20879.752238004952 }, { "content": "use crate::swm050::SYS;\n\nuse crate::time::Hertz;\n\n\n\n/// Extension trait that sets up the `SYSCON` peripheral\n", "file_path": "src/syscon.rs", "rank": 10, "score": 20879.54961447561 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse swm050_hal as hal;\n\n\n\nuse crate::hal::prelude::*;\n\nuse crate::hal::swm050;\n\nuse crate::hal::time::Hertz;\n\nuse crate::hal::timers::*;\n\n\n\nuse cortex_m::peripheral::Peripherals;\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/blinky_timer.rs", "rank": 18, "score": 18982.349276902823 }, { "content": " for _ in 0..200 {\n\n led.set_high().ok();\n\n nb::block!(timer.wait()).unwrap();\n\n nb::block!(timer.wait()).unwrap();\n\n led.set_low().ok();\n\n nb::block!(timer.wait()).unwrap();\n\n }\n\n }\n\n });\n\n }\n\n\n\n loop {\n\n continue;\n\n }\n\n}\n", "file_path": "examples/blinky_timer.rs", "rank": 19, "score": 18980.518713558064 }, { "content": "use crate::swm050::WDT;\n\nuse crate::syscon::{ClockEnable, Clocks, Syscon};\n\nuse crate::time::Hertz;\n\nuse embedded_hal::watchdog;\n\n\n\n/// Watchdog instance\n\npub struct Watchdog {\n\n clocks: Clocks,\n\n wdt: WDT,\n\n}\n\n\n\nimpl Watchdog {\n\n pub fn new(wdt: WDT, syscon: &mut Syscon) -> Self {\n\n WDT::enable(syscon);\n\n Self {\n\n wdt,\n\n clocks: syscon.clocks,\n\n }\n\n }\n\n}\n", "file_path": "src/watchdog.rs", "rank": 20, "score": 11.390587491816534 }, { "content": "//! API for delays with the timer\n\n//!\n\n//! Please be aware of potential overflows.\n\n//!\n\n//! # Example\n\n//!\n\n//! TODO Look in the `examples/` directory\n\n\n\nuse cast::{u16, u32};\n\nuse core::ops::Deref;\n\n\n\nuse crate::syscon::{ClockEnable, Syscon};\n\nuse crate::timers::TimerRegisterBlock;\n\nuse embedded_hal::blocking::delay::{DelayMs, DelayUs};\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Delay {\n\n pub(crate) scale: u32,\n\n pub(crate) timer: *const TimerRegisterBlock,\n\n pub(crate) countdown: Option<(u32, u32)>,\n", "file_path": "src/delay.rs", "rank": 21, "score": 11.158777396565753 }, { "content": " pub fn new<T>(\n\n timer: TMRSE1,\n\n pin: gpio::gpioa::PA_7<Output>,\n\n period: T,\n\n port: &mut PORT,\n\n syscon: &mut Syscon,\n\n ) -> Self\n\n where\n\n T: Into<Hertz>,\n\n {\n\n TMRSE1::enable(syscon);\n\n\n\n let frequency = period.into().0;\n\n let ticks = syscon.clocks.timsclk().0 / frequency;\n\n assert!(ticks < 0x10000);\n\n\n\n // Enable output\n\n port.porta_sel.modify(|_, w| w.pa07().tmrse1_out());\n\n timer.ctrl.write(|w| w.ena().set_bit().wmod().pwm());\n\n let mut pwm_pin = Pwm {\n", "file_path": "src/pwm.rs", "rank": 22, "score": 10.591634992125066 }, { "content": "}\n\n\n\n// NOTE(unsafe) This only reads\n\nunsafe impl Sync for Delay {}\n\n// NOTE(unsafe) This only reads\n\nunsafe impl Send for Delay {}\n\n\n\nimpl Delay {\n\n pub fn new<TIMER>(timer: TIMER, syscon: &mut Syscon) -> Delay\n\n where\n\n TIMER: Deref<Target = TimerRegisterBlock> + ClockEnable,\n\n {\n\n TIMER::enable(syscon);\n\n assert!(syscon.clocks.timsclk().0 >= 1_000_000);\n\n let scale = syscon.clocks.timsclk().0 / 1_000_000;\n\n\n\n // Count to the highest possible value\n\n unsafe { timer.tarval.write(|w| w.bits(0xFFFFFFFF)) };\n\n // Start counting\n\n timer.ctrl.write(|w| w.ena().set_bit());\n", "file_path": "src/delay.rs", "rank": 23, "score": 9.559576643107425 }, { "content": "#![no_std]\n\n#![allow(non_camel_case_types)]\n\n\n\npub use swm050;\n\n\n\npub mod delay;\n\npub mod gpio;\n\npub mod prelude;\n\npub mod pwm;\n\npub mod syscon;\n\npub mod time;\n\npub mod timers;\n\npub mod watchdog;\n", "file_path": "src/lib.rs", "rank": 24, "score": 9.302189383335946 }, { "content": "use core::ops::Deref;\n\n\n\nuse embedded_hal::PwmPin;\n\nuse swm050::{PORT, TMRSE1};\n\n\n\nuse crate::gpio;\n\nuse crate::gpio::Output;\n\nuse crate::syscon::{ClockEnable, Syscon};\n\nuse crate::time::Hertz;\n\nuse crate::timers::TimerRegisterBlock;\n\n\n\npub struct Pwm<TIMER, PIN> {\n\n timer: TIMER,\n\n pin: PIN,\n\n ticks: u16,\n\n}\n\n\n\n// TODO TMRSE0 isn't supported yet, since it shares pins with swd\n\n// Not sure how to do this while ensuring that swd isn't accidentally disabled\n\nimpl Pwm<TMRSE1, gpio::gpioa::PA_7<Output>> {\n", "file_path": "src/pwm.rs", "rank": 25, "score": 8.832417508782573 }, { "content": "pub use crate::gpio::GpioExt as _swm050_hal_gpio_GpioExt;\n\npub use crate::syscon::SysconExt as _swm050_hal_syscon_SysconExt;\n\n\n\npub use embedded_hal::digital::v2::InputPin as _embedded_hal_gpio_InputPin;\n\npub use embedded_hal::digital::v2::OutputPin as _embedded_hal_gpio_OutputPin;\n\npub use embedded_hal::digital::v2::StatefulOutputPin as _embedded_hal_gpio_StatefulOutputPin;\n\npub use embedded_hal::digital::v2::ToggleableOutputPin as _embedded_hal_gpio_ToggleableOutputPin;\n\n\n\npub use embedded_hal::prelude::*;\n", "file_path": "src/prelude.rs", "rank": 26, "score": 7.086070349308198 }, { "content": " timer: timer,\n\n pin: pin,\n\n ticks: ticks as u16,\n\n };\n\n pwm_pin.set_duty(0);\n\n pwm_pin\n\n }\n\n}\n\nimpl Pwm<TMRSE1, gpio::gpioa::PA_7<Output>> {\n\n pub fn release(self, port: &mut PORT) -> (TMRSE1, gpio::gpioa::PA_7<Output>) {\n\n port.porta_sel.modify(|_, w| w.pa07().gpio());\n\n (self.timer, self.pin)\n\n }\n\n}\n\n\n\n// The pwm implementation is a bit curious.\n\n// You can seperately define the high & low time, so the total period can be up to 2 * 2^16,\n\n// but only 2^16 for the high/low time\n\nimpl<TIMER, GPIO> PwmPin for Pwm<TIMER, GPIO>\n\nwhere\n", "file_path": "src/pwm.rs", "rank": 27, "score": 5.7368105639213205 }, { "content": "\n\nimpl watchdog::Watchdog for Watchdog {\n\n /// Feed the watchdog, so that at least one `period` goes by before the next\n\n /// reset\n\n fn feed(&mut self) {\n\n self.wdt.crr.write(|w| w.crr().reset());\n\n }\n\n}\n\n\n\nimpl watchdog::WatchdogEnable for Watchdog {\n\n type Time = Hertz;\n\n fn start<T>(&mut self, period: T)\n\n where\n\n T: Into<Hertz>,\n\n {\n\n let time = period.into();\n\n // TODO Verify function\n\n // As far as i understand the data sheet, it's basically like this:\n\n // time2 is used for mode 0, after the interrupt the counter is set\n\n // to timer2, otherwise timer1\n", "file_path": "src/watchdog.rs", "rank": 28, "score": 5.229176602404573 }, { "content": " Delay {\n\n timer: &(*timer),\n\n scale,\n\n countdown: None,\n\n }\n\n }\n\n}\n\n\n\nimpl DelayMs<u32> for Delay {\n\n // At 48 MHz, calling delay_us with ms * 1_000 directly overflows at 0x15D868 (just over the max u16 value)\n\n fn delay_ms(&mut self, mut ms: u32) {\n\n const MAX_MS: u32 = 0x0000_FFFF;\n\n while ms != 0 {\n\n let current_ms = if ms <= MAX_MS { ms } else { MAX_MS };\n\n self.delay_us(current_ms * 1_000);\n\n ms -= current_ms;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/delay.rs", "rank": 29, "score": 4.870747050353877 }, { "content": " TIMER: Deref<Target = TimerRegisterBlock>,\n\n{\n\n type Duty = u16;\n\n\n\n fn disable(&mut self) {\n\n self.timer.ctrl.write(|w| w.ena().clear_bit());\n\n }\n\n\n\n fn enable(&mut self) {\n\n // pause\n\n self.timer.ctrl.write(|w| w.ena().set_bit());\n\n }\n\n\n\n fn get_duty(&self) -> Self::Duty {\n\n (self.timer.tarval.read().bits() & 0xFFFF) as u16\n\n }\n\n\n\n fn get_max_duty(&self) -> Self::Duty {\n\n self.ticks\n\n }\n\n\n\n fn set_duty(&mut self, duty: Self::Duty) {\n\n let low = duty - self.get_max_duty();\n\n self.timer\n\n .tarval\n\n .write(|w| unsafe { w.bits(duty as u32 | ((low as u32) << 16)) });\n\n }\n\n}\n", "file_path": "src/pwm.rs", "rank": 30, "score": 4.5865429743274735 }, { "content": "impl DelayMs<u16> for Delay {\n\n fn delay_ms(&mut self, ms: u16) {\n\n self.delay_us(u32::from(ms) * 1_000);\n\n }\n\n}\n\n\n\nimpl DelayMs<u8> for Delay {\n\n fn delay_ms(&mut self, ms: u8) {\n\n self.delay_ms(u16(ms));\n\n }\n\n}\n\n\n\nimpl DelayUs<u32> for Delay {\n\n fn delay_us(&mut self, us: u32) {\n\n let ticks = us * self.scale;\n\n\n\n let start_count = unsafe { (*(self.timer)).curval.read().bits() };\n\n\n\n while (unsafe {\n\n (*(self.timer))\n", "file_path": "src/delay.rs", "rank": 31, "score": 3.9682809830808994 }, { "content": " let ticks = self.clocks.sclk().0 / time.0;\n\n let mut timerticks = (ticks >> 16).next_power_of_two();\n\n let mut timer1 = 0;\n\n while timerticks != 0 {\n\n timer1 += 1;\n\n timerticks >>= 1;\n\n }\n\n // This shouldn't happen, but let's make sure\n\n assert!(timer1 < 16);\n\n self.wdt.torr.write(|w| w.top_init().bits(timer1 as u8));\n\n self.wdt.cr.write(|w| w.en().set_bit());\n\n }\n\n}\n", "file_path": "src/watchdog.rs", "rank": 32, "score": 3.6334006456572654 }, { "content": "\n\n/// Floating input (type state)\n\npub struct Floating;\n\n\n\n/// Pulled up input (type state)\n\npub struct PullUp;\n\n\n\n/// Output mode\n\npub struct Output;\n\n\n\nuse embedded_hal::digital::v2::{toggleable, InputPin, OutputPin, StatefulOutputPin};\n\n\n\n/// Fully erased pin\n\npub struct Pin<MODE> {\n\n i: u8,\n\n port: *const dyn GpioRegExt,\n\n _mode: PhantomData<MODE>,\n\n}\n\n\n\n// NOTE(unsafe) The only write acess is to BSRR, which is thread safe\n", "file_path": "src/gpio.rs", "rank": 33, "score": 3.6004162168557774 }, { "content": " }\n\n }\n\n };\n\n}\n\n\n\ngpio_trait!(gpioa);\n\n\n\n#[allow(unused)]\n\nmacro_rules! gpio {\n\n ($GPIOX:ident, $gpiox:ident, [\n\n $($PXi:ident: ($pxi:ident, $i:expr, $MODE:ty),)+\n\n ]) => {\n\n /// GPIO\n\n pub mod $gpiox {\n\n use core::marker::PhantomData;\n\n\n\n use crate::swm050::$GPIOX;\n\n use embedded_hal::digital::v2::{toggleable, InputPin, OutputPin, StatefulOutputPin};\n\n\n\n use cortex_m::interrupt::CriticalSection;\n", "file_path": "src/gpio.rs", "rank": 34, "score": 3.593871623742155 }, { "content": "\n\n use super::{Floating, GpioExt, GpioRegExt, Input, Output, PullUp, Pin};\n\n\n\n /// GPIO parts\n\n pub struct Parts {\n\n $(\n\n /// Pin\n\n pub $pxi: $PXi<$MODE>,\n\n )+\n\n }\n\n\n\n impl GpioExt for $GPIOX {\n\n type Parts = Parts;\n\n\n\n fn split(self) -> Parts {\n\n Parts {\n\n $(\n\n $pxi: $PXi { _mode: PhantomData },\n\n )+\n\n }\n", "file_path": "src/gpio.rs", "rank": 35, "score": 3.4424756053820684 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse swm050_hal as hal;\n\n\n\nuse crate::hal::delay::Delay;\n\nuse crate::hal::prelude::*;\n\nuse crate::hal::swm050;\n\n\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/blinky_delay.rs", "rank": 36, "score": 2.9528586052771915 }, { "content": "#![no_main]\n\n#![no_std]\n\n\n\n#[allow(unused)]\n\nuse panic_halt;\n\n\n\nuse swm050_hal as hal;\n\n\n\nuse crate::hal::prelude::*;\n\nuse crate::hal::swm050;\n\n\n\nuse cortex_m_rt::entry;\n\n\n\n#[entry]\n", "file_path": "examples/blinky.rs", "rank": 37, "score": 2.931109787257889 }, { "content": "# swm050-hal\n\n\n\nA hal for swm050 chips. Large portions\n\nof this hal are based on the\n\n[_stm32f0xx-hal_](https://github.com/stm32-rs/stm32f0xx-hal) hal.\n\n\n\n## About\n\nThis is a *very* basic mcu. It has gpio and two timers. That's it.\n\nIt doesn't even implement the SysTick timer.\n\n\n\n## Flashing\n\n\n\nI use the branch from\n\nhttps://github.com/blacksphere/blackmagic/pull/401.\n\nOther options are using the jlink with some plugin by synwit or this \n\n[openocd](http://openocd.zylin.com/#/c/4927/) branch. You could also try using\n\npyOCD with the pack file from the manufacturer.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 38, "score": 2.8474229317554363 }, { "content": "/// Bits per second\n\n#[derive(PartialEq, PartialOrd, Clone, Copy)]\n\npub struct Bps(pub u32);\n\n\n\n#[derive(PartialEq, PartialOrd, Clone, Copy)]\n\npub struct Hertz(pub u32);\n\n\n\n#[derive(PartialEq, PartialOrd, Clone, Copy)]\n\npub struct KiloHertz(pub u32);\n\n\n\n#[derive(PartialEq, PartialOrd, Clone, Copy)]\n\npub struct MegaHertz(pub u32);\n\n\n\n/// Extension trait that adds convenience methods to the `u32` type\n", "file_path": "src/time.rs", "rank": 39, "score": 2.782272559932578 }, { "content": " }\n\n\n\n impl<MODE> $PXi<MODE> {\n\n /// Erases the pin number from the type\n\n ///\n\n /// This is useful when you want to collect the pins into an array where you\n\n /// need all the elements to have the same type\n\n pub fn downgrade(self) -> Pin<MODE> {\n\n Pin {\n\n i: $i,\n\n port: $GPIOX::ptr() as *const dyn GpioRegExt,\n\n _mode: self._mode,\n\n }\n\n }\n\n }\n\n\n\n impl StatefulOutputPin for $PXi<Output> {\n\n fn is_set_high(&self) -> Result<bool, ()> {\n\n self.is_set_low().map(|low| !low)\n\n }\n", "file_path": "src/gpio.rs", "rank": 40, "score": 2.5774552745946067 }, { "content": " .curval\n\n .read()\n\n .bits()\n\n .wrapping_sub(start_count)\n\n }) < ticks\n\n {}\n\n }\n\n}\n\n\n\nimpl DelayUs<u16> for Delay {\n\n fn delay_us(&mut self, us: u16) {\n\n self.delay_us(u32(us))\n\n }\n\n}\n\n\n\nimpl DelayUs<u8> for Delay {\n\n fn delay_us(&mut self, us: u8) {\n\n self.delay_us(u32(us))\n\n }\n\n}\n", "file_path": "src/delay.rs", "rank": 41, "score": 2.0278302428516235 }, { "content": " }\n\n }\n\n\n\n $(\n\n /// Pin\n\n pub struct $PXi<MODE> {\n\n _mode: PhantomData<MODE>,\n\n }\n\n\n\n impl<MODE> $PXi<MODE> {\n\n /// Configures the pin to operate as a floating input pin\n\n pub fn into_floating_input(self, _cs: &CriticalSection) -> $PXi<Input<Floating>> {\n\n unsafe {\n\n &(*$GPIOX::ptr())\n\n .dir\n\n .modify(|r, w| w.bits(r.bits() & !(1 << $i)));\n\n }\n\n $PXi { _mode: PhantomData }\n\n }\n\n\n", "file_path": "src/gpio.rs", "rank": 42, "score": 1.9211463270512832 }, { "content": "\n\n fn is_set_low(&self) -> Result<bool, ()> {\n\n Ok(unsafe { (*$GPIOX::ptr()).is_set_low($i) })\n\n }\n\n }\n\n\n\n impl OutputPin for $PXi<Output> {\n\n type Error = ();\n\n fn set_high(&mut self) -> Result<(), ()> {\n\n unsafe { (*$GPIOX::ptr()).set_high($i) }\n\n Ok(())\n\n }\n\n\n\n fn set_low(&mut self) -> Result<(), ()>{\n\n unsafe { (*$GPIOX::ptr()).set_low($i) }\n\n Ok(())\n\n }\n\n }\n\n\n\n impl toggleable::Default for $PXi<Output> {}\n", "file_path": "src/gpio.rs", "rank": 43, "score": 1.904447532781754 }, { "content": "//! General Purpose Input / Output\n\n\n\nuse core::marker::PhantomData;\n\n\n\n// TODO Implement marker for af with PushPull or OpenDrain\n\n/// Extension trait to split a GPIO peripheral in independent pins and registers\n", "file_path": "src/gpio.rs", "rank": 44, "score": 1.6949320071580993 }, { "content": " /// Configures the pin to operate as a pulled up input pin\n\n pub fn into_pull_up_input(self, _cs: &CriticalSection) -> $PXi<Input<PullUp>> {\n\n unimplemented!();\n\n // unsafe {\n\n // &(*$GPIOX::ptr())\n\n // .dir\n\n // .modify(|r, w| w.bits(r.bits() & !(1 << $i)));\n\n // }\n\n // $PXi { _mode: PhantomData }\n\n }\n\n\n\n /// Configures the pin to operate as an push pull output pin\n\n pub fn into_push_pull_output(self, _cs: &CriticalSection) -> $PXi<Output> {\n\n unsafe {\n\n &(*$GPIOX::ptr())\n\n .dir\n\n .modify(|r, w| w.bits(r.bits() | (1 << $i)));\n\n }\n\n $PXi { _mode: PhantomData }\n\n }\n", "file_path": "src/gpio.rs", "rank": 45, "score": 1.684783018304282 }, { "content": " Ok(())\n\n }\n\n\n\n fn set_low(&mut self) -> Result<(), Self::Error> {\n\n unsafe { (*self.port).set_low(self.i) }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl toggleable::Default for Pin<Output> {}\n\n\n\nimpl<MODE> InputPin for Pin<Input<MODE>> {\n\n type Error = ();\n\n\n\n fn is_high(&self) -> Result<bool, ()> {\n\n self.is_low().map(|low| !low)\n\n }\n\n\n\n fn is_low(&self) -> Result<bool, ()> {\n\n Ok(unsafe { (*self.port).is_low(self.i) })\n", "file_path": "src/gpio.rs", "rank": 46, "score": 1.3741525604400322 }, { "content": "unsafe impl<MODE> Sync for Pin<MODE> {}\n\n// NOTE(unsafe) this only enables read access to the same pin from multiple\n\n// threads\n\nunsafe impl<MODE> Send for Pin<MODE> {}\n\n\n\nimpl StatefulOutputPin for Pin<Output> {\n\n fn is_set_high(&self) -> Result<bool, ()> {\n\n self.is_set_low().map(|low| !low)\n\n }\n\n\n\n fn is_set_low(&self) -> Result<bool, ()> {\n\n Ok(unsafe { (*self.port).is_set_low(self.i) })\n\n }\n\n}\n\n\n\nimpl OutputPin for Pin<Output> {\n\n type Error = ();\n\n\n\n fn set_high(&mut self) -> Result<(), Self::Error> {\n\n unsafe { (*self.port).set_high(self.i) };\n", "file_path": "src/gpio.rs", "rank": 47, "score": 1.087649710000798 } ]
Rust
Chapter07/users-pool/src/main.rs
dominicbachmann/Hands-On-Microservices-with-Rust
6ca0a00ac8d8bf9123ce4eab2092b57408d8ed1c
use clap::{ crate_authors, crate_description, crate_name, crate_version, App, AppSettings, Arg, SubCommand, }; use postgres::{Connection, Error}; use r2d2_postgres::{TlsMode, PostgresConnectionManager}; use rayon::prelude::*; use serde_derive::Deserialize; use std::io; fn create_table(conn: &Connection) -> Result<(), Error> { conn.execute("CREATE TABLE users ( id SERIAL PRIMARY KEY, name VARCHAR NOT NULL, email VARCHAR NOT NULL )", &[]) .map(drop) } fn create_user(conn: &Connection, user: &User) -> Result<(), Error> { conn.execute("INSERT INTO users (name, email) VALUES ($1, $2)", &[&user.name, &user.email]) .map(drop) } fn list_users(conn: &Connection) -> Result<Vec<User>, Error> { let res = conn.query("SELECT name, email FROM users", &[])?.into_iter() .map(|row| { User { name: row.get(0), email: row.get(1), } }) .collect(); Ok(res) } #[derive(Deserialize, Debug)] struct User { name: String, email: String, } const CMD_CREATE: &str = "create"; const CMD_ADD: &str = "add"; const CMD_LIST: &str = "list"; const CMD_IMPORT: &str = "import"; fn main() -> Result<(), failure::Error> { let matches = App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!()) .about(crate_description!()) .setting(AppSettings::SubcommandRequired) .arg( Arg::with_name("database") .short("d") .long("db") .value_name("ADDR") .help("Sets an address of db connection") .takes_value(true), ) .subcommand(SubCommand::with_name(CMD_CREATE).about("create users table")) .subcommand(SubCommand::with_name(CMD_ADD).about("add user to the table") .arg(Arg::with_name("NAME") .help("Sets the name of a user") .required(true) .index(1)) .arg(Arg::with_name("EMAIL") .help("Sets the email of a user") .required(true) .index(2))) .subcommand(SubCommand::with_name(CMD_LIST).about("print list of users")) .subcommand(SubCommand::with_name(CMD_IMPORT).about("import users from csv")) .get_matches(); let addr = matches.value_of("database") .unwrap_or("postgres://postgres@localhost:5432"); let manager = PostgresConnectionManager::new(addr, TlsMode::None)?; let pool = r2d2::Pool::new(manager)?; let conn = pool.get()?; match matches.subcommand() { (CMD_CREATE, _) => { create_table(&conn)?; } (CMD_ADD, Some(matches)) => { let name = matches.value_of("NAME").unwrap().to_owned(); let email = matches.value_of("EMAIL").unwrap().to_owned(); let user = User { name, email }; create_user(&conn, &user)?; } (CMD_LIST, _) => { let list = list_users(&conn)?; for user in list { println!("Name: {:20} Email: {:20}", user.name, user.email); } } (CMD_IMPORT, _) => { let mut rdr = csv::Reader::from_reader(io::stdin()); let mut users = Vec::new(); for user in rdr.deserialize() { users.push(user?); } users.par_iter() .map(|user| -> Result<(), failure::Error> { let conn = pool.get()?; create_user(&conn, &user)?; Ok(()) }) .for_each(drop); } _ => { matches.usage(); } } Ok(()) }
use clap::{ crate_authors, crate_description, crate_name, crate_version, App, AppSettings, Arg, SubCommand, }; use postgres::{Connection, Error}; use r2d2_postgres::{TlsMode, PostgresConnectionManager}; use rayon::prelude::*; use serde_derive::Deserialize; use std::io; fn create_table(conn: &Connection) -> Result<(), Error> { conn.execute("CREATE TABLE users ( id SERIAL PRIMARY KEY, name VARCHAR NOT NULL, email VARCHAR NOT NULL )", &[]) .map(drop) } fn create_user(conn: &Connection, user: &User) -> Result<(), Error> { conn.execute("INSERT INTO users (name, email) VALUES ($1, $2)", &[&user.name, &user.email]) .map(drop) }
#[derive(Deserialize, Debug)] struct User { name: String, email: String, } const CMD_CREATE: &str = "create"; const CMD_ADD: &str = "add"; const CMD_LIST: &str = "list"; const CMD_IMPORT: &str = "import"; fn main() -> Result<(), failure::Error> { let matches = App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!()) .about(crate_description!()) .setting(AppSettings::SubcommandRequired) .arg( Arg::with_name("database") .short("d") .long("db") .value_name("ADDR") .help("Sets an address of db connection") .takes_value(true), ) .subcommand(SubCommand::with_name(CMD_CREATE).about("create users table")) .subcommand(SubCommand::with_name(CMD_ADD).about("add user to the table") .arg(Arg::with_name("NAME") .help("Sets the name of a user") .required(true) .index(1)) .arg(Arg::with_name("EMAIL") .help("Sets the email of a user") .required(true) .index(2))) .subcommand(SubCommand::with_name(CMD_LIST).about("print list of users")) .subcommand(SubCommand::with_name(CMD_IMPORT).about("import users from csv")) .get_matches(); let addr = matches.value_of("database") .unwrap_or("postgres://postgres@localhost:5432"); let manager = PostgresConnectionManager::new(addr, TlsMode::None)?; let pool = r2d2::Pool::new(manager)?; let conn = pool.get()?; match matches.subcommand() { (CMD_CREATE, _) => { create_table(&conn)?; } (CMD_ADD, Some(matches)) => { let name = matches.value_of("NAME").unwrap().to_owned(); let email = matches.value_of("EMAIL").unwrap().to_owned(); let user = User { name, email }; create_user(&conn, &user)?; } (CMD_LIST, _) => { let list = list_users(&conn)?; for user in list { println!("Name: {:20} Email: {:20}", user.name, user.email); } } (CMD_IMPORT, _) => { let mut rdr = csv::Reader::from_reader(io::stdin()); let mut users = Vec::new(); for user in rdr.deserialize() { users.push(user?); } users.par_iter() .map(|user| -> Result<(), failure::Error> { let conn = pool.get()?; create_user(&conn, &user)?; Ok(()) }) .for_each(drop); } _ => { matches.usage(); } } Ok(()) }
fn list_users(conn: &Connection) -> Result<Vec<User>, Error> { let res = conn.query("SELECT name, email FROM users", &[])?.into_iter() .map(|row| { User { name: row.get(0), email: row.get(1), } }) .collect(); Ok(res) }
function_block-full_function
[ { "content": "fn create_user(conn: &Connection, name: &str, email: &str) -> Result<(), Error> {\n\n conn.execute(\"INSERT INTO users (name, email) VALUES ($1, $2)\",\n\n &[&name, &email])\n\n .map(drop)\n\n}\n\n\n", "file_path": "Chapter07/users/src/main.rs", "rank": 0, "score": 334152.71837063413 }, { "content": "-- Sets up a trigger for the given table to automatically set a column called\n", "file_path": "Chapter08/chat/migrations/00000000000000_diesel_initial_setup/up.sql", "rank": 1, "score": 283659.7752070629 }, { "content": "fn create_table(conn: &Connection) -> Result<(), Error> {\n\n conn.execute(\"CREATE TABLE users (\n\n id SERIAL PRIMARY KEY,\n\n name VARCHAR NOT NULL,\n\n email VARCHAR NOT NULL\n\n )\", &[])\n\n .map(drop)\n\n}\n\n\n", "file_path": "Chapter07/users/src/main.rs", "rank": 3, "score": 277425.64369433676 }, { "content": "fn create_user(conn: &mut Conn, user: &User) -> Result<(), Error> {\n\n conn.prep_exec(\"INSERT INTO users (name, email) VALUES (?, ?)\",\n\n (&user.name, &user.email))\n\n .map(drop)\n\n}\n\n\n", "file_path": "Chapter07/users-mysql/src/main.rs", "rank": 6, "score": 241363.4371889955 }, { "content": "fn create_table(conn: &mut Conn) -> Result<(), Error> {\n\n conn.query(\"CREATE TABLE users (\n\n id INT(6) UNSIGNED AUTO_INCREMENT PRIMARY KEY,\n\n name VARCHAR(50) NOT NULL,\n\n email VARCHAR(50) NOT NULL\n\n )\")\n\n .map(drop)\n\n}\n\n\n", "file_path": "Chapter07/users-mysql/src/main.rs", "rank": 7, "score": 231825.2850157528 }, { "content": "fn main() -> Result<(), Error> {\n\n\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequired)\n\n .arg(\n\n Arg::with_name(\"database\")\n\n .short(\"d\")\n\n .long(\"db\")\n\n .value_name(\"ADDR\")\n\n .help(\"Sets an address of db connection\")\n\n .takes_value(true),\n\n )\n\n .subcommand(SubCommand::with_name(CMD_CREATE).about(\"create users table\"))\n\n .subcommand(SubCommand::with_name(CMD_ADD).about(\"add user to the table\")\n\n .arg(Arg::with_name(\"NAME\")\n\n .help(\"Sets the name of a user\")\n\n .required(true)\n", "file_path": "Chapter07/users/src/main.rs", "rank": 8, "score": 231739.85152171436 }, { "content": "fn main() -> Result<(), Error> {\n\n\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequired)\n\n .arg(\n\n Arg::with_name(\"database\")\n\n .short(\"d\")\n\n .long(\"db\")\n\n .value_name(\"FILE\")\n\n .help(\"Sets a file name of a database\")\n\n .takes_value(true),\n\n )\n\n .subcommand(SubCommand::with_name(CMD_ADD).about(\"add user to the table\")\n\n .arg(Arg::with_name(\"NAME\")\n\n .help(\"Sets the name of a user\")\n\n .required(true)\n\n .index(1))\n", "file_path": "Chapter08/users/src/main.rs", "rank": 9, "score": 231739.85152171436 }, { "content": "fn list_users(conn: &Connection) -> Result<Vec<(String, String)>, Error> {\n\n let res = conn.query(\"SELECT name, email FROM users\", &[])?.into_iter()\n\n .map(|row| (row.get(0), row.get(1)))\n\n .collect();\n\n Ok(res)\n\n}\n\n\n\nconst CMD_CREATE: &str = \"create\";\n\nconst CMD_ADD: &str = \"add\";\n\nconst CMD_LIST: &str = \"list\";\n\n\n", "file_path": "Chapter07/users/src/main.rs", "rank": 10, "score": 231137.26033578906 }, { "content": "fn list_users(conn: &mut Conn) -> Result<Vec<User>, Error> {\n\n conn.query(\"SELECT name, email FROM users\")?\n\n .into_iter()\n\n .try_fold(Vec::new(), |mut vec, row| {\n\n let row = row?;\n\n let user = User {\n\n name: row.get_opt(0).unwrap()?,\n\n email: row.get_opt(1).unwrap()?,\n\n };\n\n vec.push(user);\n\n Ok(vec)\n\n })\n\n}\n\n\n", "file_path": "Chapter07/users-mysql/src/main.rs", "rank": 11, "score": 228306.16963881502 }, { "content": "fn attr_to_string(attr: &AttributeValue) -> Result<String, Error> {\n\n if let Some(value) = &attr.s {\n\n Ok(value.to_owned())\n\n } else {\n\n Err(format_err!(\"no string value\"))\n\n }\n\n}\n", "file_path": "Chapter07/user-location/src/main.rs", "rank": 12, "score": 228183.22097230487 }, { "content": "fn main() -> Result<(), Error> {\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequired)\n\n .arg(\n\n Arg::with_name(\"region\")\n\n .long(\"region\")\n\n .value_name(\"REGION\")\n\n .help(\"Sets a region\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"endpoint\")\n\n .long(\"endpoint-url\")\n\n .value_name(\"URL\")\n\n .help(\"Sets an endpoint url\")\n\n .takes_value(true),\n\n )\n", "file_path": "Chapter07/user-location/src/main.rs", "rank": 13, "score": 227920.92224120727 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let mut config = config::Config::default();\n\n config.merge(config::Environment::with_prefix(\"USERS\"))?;\n\n let config: Config = config.try_into()?;\n\n let bind_address = config.address.unwrap_or(\"0.0.0.0:8000\".into());\n\n let db_address = config.database.unwrap_or(\"postgres://localhost/\".into());\n\n let manager = ConnectionManager::<PgConnection>::new(db_address);\n\n let pool = Pool::builder()\n\n .build(manager)\n\n .expect(\"Failed to create pool.\");\n\n debug!(\"Starting microservice...\");\n\n rouille::start_server(bind_address, move |request| {\n\n match handler(&request, &pool) {\n\n Ok(response) => {\n\n response\n\n },\n\n Err(err) => {\n\n Response::text(err.to_string())\n\n .with_status_code(500)\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "Chapter15/deploy/microservices/users/src/main.rs", "rank": 14, "score": 224305.17983129068 }, { "content": "fn main() -> Result<(), failure::Error> {\n\n\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequired)\n\n .arg(\n\n Arg::with_name(\"database\")\n\n .short(\"d\")\n\n .long(\"db\")\n\n .value_name(\"ADDR\")\n\n .help(\"Sets an address of db connection\")\n\n .takes_value(true),\n\n )\n\n .subcommand(SubCommand::with_name(CMD_CRATE).about(\"create users table\"))\n\n .subcommand(SubCommand::with_name(CMD_ADD).about(\"add user to the table\")\n\n .arg(Arg::with_name(\"NAME\")\n\n .help(\"Sets the name of a user\")\n\n .required(true)\n", "file_path": "Chapter07/users-mysql/src/main.rs", "rank": 15, "score": 218974.720168699 }, { "content": "fn main() -> Result<(), failure::Error> {\n\n\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequired)\n\n .arg(\n\n Arg::with_name(\"database\")\n\n .short(\"d\")\n\n .long(\"db\")\n\n .value_name(\"ADDR\")\n\n .help(\"Sets an address of db connection\")\n\n .takes_value(true),\n\n )\n\n .subcommand(SubCommand::with_name(CMD_ADD).about(\"add user to the table\")\n\n .arg(Arg::with_name(\"USER_ID\")\n\n .help(\"Sets the id of a user\")\n\n .required(true)\n\n .index(1))\n", "file_path": "Chapter07/user-logs/src/main.rs", "rank": 16, "score": 218974.720168699 }, { "content": "fn list_locations(conn: &DynamoDbClient, user_id: String) -> Result<Vec<Location>, Error> {\n\n let expression = format!(\"Uid = :uid\");\n\n let mut values = HashMap::new();\n\n values.insert(\":uid\".into(), s_attr(user_id));\n\n let query = QueryInput {\n\n table_name: \"Locations\".into(),\n\n key_condition_expression: Some(expression),\n\n expression_attribute_values: Some(values),\n\n ..Default::default()\n\n };\n\n let items = conn.query(query).sync()?\n\n .items\n\n .ok_or_else(|| format_err!(\"No Items\"))?;\n\n let mut locations = Vec::new();\n\n for item in items {\n\n let location = Location::from_map(item)?;\n\n locations.push(location);\n\n }\n\n Ok(locations)\n\n}\n\n\n", "file_path": "Chapter07/user-location/src/main.rs", "rank": 18, "score": 212099.00786276773 }, { "content": "fn send_impl(req: &mut Request<Data>) -> Result<(), Error> {\n\n let (to, code) = {\n\n let params = req.form_body().map_err(|_| format_err!(\"\"))?;\n\n let to = params.get(\"to\").ok_or(format_err!(\"to field not set\"))?.to_owned();\n\n let code = params.get(\"code\").ok_or(format_err!(\"code field not set\"))?.to_owned();\n\n (to, code)\n\n };\n\n let data = req.server_data();\n\n let to = EmailAddress::new(to.to_owned())?;\n\n let envelope = Envelope::new(None, vec![to])?;\n\n let mut params: HashMap<&str, &str> = HashMap::new();\n\n params.insert(\"code\", &code);\n\n let mut body: Vec<u8> = Vec::new();\n\n data.cache.render(\"templates/confirm.tpl\", &mut body, &params)?;\n\n let email = SendableEmail::new(envelope, \"Confirm email\".to_string(), Vec::new());\n\n let sender = data.sender.lock().unwrap().clone();\n\n sender.send(email).map_err(|_| format_err!(\"can't send email\"))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "Chapter09/emails-nickel/src/main.rs", "rank": 19, "score": 194346.00391998392 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let tx = spawn_worker()?;\n\n let addr: SocketAddr = env::var(\"ADDRESS\")?.parse()?;\n\n let mut io = IoHandler::default();\n\n let sender = Mutex::new(tx.clone());\n\n io.add_method(START_ROLL_CALL, move |_| {\n\n trace!(\"START_ROLL_CALL\");\n\n let tx = sender\n\n .lock()\n\n .map_err(to_internal)?;\n\n tx.send(Action::StartRollCall)\n\n .map_err(to_internal)\n\n .map(|_| Value::Bool(true))\n\n });\n\n let sender = Mutex::new(tx.clone());\n\n io.add_method(MARK_ITSELF, move |_| {\n\n trace!(\"MARK_ITSELF\");\n\n let tx = sender\n\n .lock()\n", "file_path": "Chapter06/jsonrpc-ring/src/main.rs", "rank": 20, "score": 193745.20731390052 }, { "content": "fn main() -> Result<(), Error> {\n\n let next = env::var(\"NEXT\")?.parse()?;\n\n let remote = Remote::new(next)?;\n\n remote.start_roll_call()?;\n\n Ok(())\n\n}\n", "file_path": "Chapter06/grpc-ring/src/client.rs", "rank": 21, "score": 193745.20731390052 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let mut sys = System::new(\"rabbit-actix-worker\");\n\n let _ = QueueActor::new(WokerHandler {}, &mut sys)?;\n\n let _ = sys.run();\n\n Ok(())\n\n}\n", "file_path": "Chapter12/rabbit-actix/src/worker.rs", "rank": 22, "score": 193745.20731390052 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let (tx, rx) = channel();\n\n let addr: SocketAddr = env::var(\"ADDRESS\")?.parse()?;\n\n let mut server = ServerBuilder::new_plain();\n\n server.http.set_addr(addr)?;\n\n let ring = RingImpl::new(tx);\n\n server.add_service(RingServer::new_service_def(ring));\n\n server.http.set_cpu_pool_threads(4);\n\n let _server = server.build()?;\n\n\n\n worker_loop(rx)\n\n}\n\n\n", "file_path": "Chapter06/grpc-ring/src/server.rs", "rank": 23, "score": 193745.20731390052 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let mut sys = System::new(\"rabbit-actix-server\");\n\n let tasks = Arc::new(Mutex::new(IndexMap::new()));\n\n let addr = QueueActor::new(\n\n ServerHandler {\n\n tasks: tasks.clone(),\n\n },\n\n &mut sys,\n\n )?;\n\n\n\n let state = State {\n\n tasks: tasks.clone(),\n\n addr,\n\n };\n\n server::new(move || {\n\n App::with_state(state.clone())\n\n .middleware(middleware::Logger::default())\n\n .resource(\"/\", |r| r.f(index_handler))\n\n .resource(\"/task\", |r| {\n", "file_path": "Chapter12/rabbit-actix/src/server.rs", "rank": 24, "score": 193745.20731390052 }, { "content": "fn add_activity(conn: &Database, activity: Activity) -> Result<(), Error> {\n\n let doc = doc! {\n\n \"user_id\": activity.user_id,\n\n \"activity\": activity.activity,\n\n \"datetime\": activity.datetime,\n\n };\n\n let coll = conn.collection(\"activities\");\n\n coll.insert_one(doc, None).map(drop)\n\n}\n\n\n", "file_path": "Chapter07/user-logs/src/main.rs", "rank": 25, "score": 193416.33730664506 }, { "content": "fn list_activities(conn: &Database) -> Result<Vec<Activity>, Error> {\n\n conn.collection(\"activities\").find(None, None)?\n\n .try_fold(Vec::new(), |mut vec, doc| {\n\n let doc = doc?;\n\n let activity: Activity = bson::from_bson(bson::Bson::Document(doc))?;\n\n vec.push(activity);\n\n Ok(vec)\n\n })\n\n}\n\n\n", "file_path": "Chapter07/user-logs/src/main.rs", "rank": 26, "score": 193416.33730664503 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let mut config = config::Config::default();\n\n config.merge(config::Environment::with_prefix(\"MAILS\"))?;\n\n let config: Config = config.try_into()?;\n\n let bind_address = config.address.unwrap_or(\"0.0.0.0:8000\".into());\n\n let smtp_address = config.smtp_address.unwrap_or(\"127.0.0.1:2525\".into());\n\n let smtp_login = config.smtp_login;\n\n let smtp_password = config.smtp_password;\n\n let tx = spawn_sender(smtp_address, smtp_login, smtp_password);\n\n let data = Data {\n\n sender: Mutex::new(tx),\n\n cache: TemplateCache::with_policy(ReloadPolicy::Always),\n\n };\n\n\n\n let mut server = Nickel::with_data(data);\n\n server.get(\"/\", middleware!(\"Mailer Microservice\"));\n\n server.post(\"/send\", send);\n\n server.listen(bind_address)\n\n .map_err(|err| format_err!(\"can't bind server: {}\", err))?;\n\n Ok(())\n\n}\n", "file_path": "Chapter15/deploy/microservices/mails/src/main.rs", "rank": 27, "score": 191038.62766351935 }, { "content": "fn main() -> Result<(), Error> {\n\n env_logger::init();\n\n let mut config = config::Config::default();\n\n config.merge(config::Environment::with_prefix(\"DBSYNC\"))?;\n\n let config: Config = config.try_into()?;\n\n let db_address = config.database.unwrap_or(\"postgres://localhost/\".into());\n\n debug!(\"Waiting for database...\");\n\n loop {\n\n let conn: Result<PgConnection, _> = Connection::establish(&db_address);\n\n if let Ok(conn) = conn {\n\n debug!(\"Database connected\");\n\n embedded_migrations::run(&conn)?;\n\n break;\n\n }\n\n }\n\n debug!(\"Database migrated\");\n\n Ok(())\n\n}\n\n\n", "file_path": "Chapter15/deploy/microservices/dbsync/src/main.rs", "rank": 28, "score": 191038.62766351935 }, { "content": "fn handler(request: &Request, pool: &Pool) -> Result<Response, Error> {\n\n debug!(\"Request: {:?}\", request);\n\n let resp = router!(request,\n\n (GET) (/) => {\n\n Response::text(\"Users Microservice\")\n\n },\n\n (POST) (/signup) => {\n\n let data = post_input!(request, {\n\n email: String,\n\n password: String,\n\n })?;\n\n debug!(\"Signup for {}\", data.email);\n\n let user_email = data.email.trim().to_lowercase();\n\n let user_password = pbkdf2_simple(&data.password, 12345)?;\n\n {\n\n use self::schema::users::dsl::*;\n\n let conn = pool.get()?;\n\n let user_exists: bool = select(exists(users.filter(email.eq(user_email.clone()))))\n\n .get_result(&conn)?;\n\n if !user_exists {\n", "file_path": "Chapter09/users-rouille/src/main.rs", "rank": 29, "score": 189943.18492203555 }, { "content": "fn alt_udp_echo() -> Result<(), Error> {\n\n let from = \"0.0.0.0:12345\".parse()?;\n\n let socket = UdpSocket::bind(&from)?;\n\n let framed = UdpFramed::new(socket, LinesCodec::new());\n\n let (sink, stream) = framed.split();\n\n let (tx, rx) = mpsc::channel(16);\n\n let rx = rx.map_err(|_| other(\"can't take a message\"))\n\n .fold(sink, |sink, frame| {\n\n sink.send(frame)\n\n });\n\n let process = stream.and_then(move |args| {\n\n tx.clone()\n\n .send(args)\n\n .map(drop)\n\n .map_err(other)\n\n }).collect();\n\n let execute_all = future::join_all(vec![\n\n to_box(rx),\n\n to_box(process),\n\n ]).map(drop);\n\n Ok(tokio::run(execute_all))\n\n}\n\n\n", "file_path": "Chapter05/futures-examples/src/main.rs", "rank": 30, "score": 188472.3204943939 }, { "content": "fn main() -> Result<(), failure::Error> {\n\n\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .setting(AppSettings::SubcommandRequired)\n\n .arg(\n\n Arg::with_name(\"database\")\n\n .short(\"d\")\n\n .long(\"db\")\n\n .value_name(\"ADDR\")\n\n .help(\"Sets an address of db connection\")\n\n .takes_value(true),\n\n )\n\n .subcommand(SubCommand::with_name(CMD_ADD).about(\"add a session\")\n\n .arg(Arg::with_name(\"TOKEN\")\n\n .help(\"Sets the token of a user\")\n\n .required(true)\n\n .index(1))\n", "file_path": "Chapter07/sessions/src/main.rs", "rank": 31, "score": 188414.74765130886 }, { "content": "fn add_location(conn: &DynamoDbClient, location: Location) -> Result<(), Error> {\n\n let mut key: HashMap<String, AttributeValue> = HashMap::new();\n\n key.insert(\"Uid\".into(), s_attr(location.user_id));\n\n key.insert(\"TimeStamp\".into(), s_attr(location.timestamp));\n\n let expression = format!(\"SET Longitude = :x, Latitude = :y\");\n\n let mut values = HashMap::new();\n\n values.insert(\":x\".into(), s_attr(location.longitude));\n\n values.insert(\":y\".into(), s_attr(location.latitude));\n\n let update = UpdateItemInput {\n\n table_name: \"Locations\".into(),\n\n key,\n\n update_expression: Some(expression),\n\n expression_attribute_values: Some(values),\n\n ..Default::default()\n\n };\n\n conn.update_item(update)\n\n .sync()\n\n .map(drop)\n\n .map_err(Error::from)\n\n}\n\n\n", "file_path": "Chapter07/user-location/src/main.rs", "rank": 32, "score": 187929.93845998248 }, { "content": "fn handler(request: &Request, pool: &Pool) -> Result<Response, Error> {\n\n debug!(\"Request: {:?}\", request);\n\n let resp = router!(request,\n\n (GET) (/) => {\n\n Response::text(\"Users Microservice\")\n\n },\n\n (POST) (/signup) => {\n\n let data = post_input!(request, {\n\n email: String,\n\n password: String,\n\n })?;\n\n debug!(\"Signup for {}\", data.email);\n\n let user_email = data.email.trim().to_lowercase();\n\n let user_password = pbkdf2_simple(&data.password, 12345)?;\n\n {\n\n use self::schema::users::dsl::*;\n\n let conn = pool.get()?;\n\n let user_exists: bool = select(exists(users.filter(email.eq(user_email.clone()))))\n\n .get_result(&conn)?;\n\n if !user_exists {\n", "file_path": "Chapter15/deploy/microservices/users/src/main.rs", "rank": 33, "score": 187135.57650350715 }, { "content": "fn ws_connect(req: &HttpRequest<State>) -> Result<HttpResponse, Error> {\n\n let repeater = req.state().repeater.clone().recipient();\n\n ws::start(req, NotifyActor::new(repeater))\n\n}\n\n\n\n\n\npub struct State {\n\n counter: RefCell<i64>,\n\n cache: CacheLink,\n\n repeater: Addr<RepeaterActor>,\n\n}\n\n\n\nimpl State {\n\n fn new(cache: CacheLink, repeater: Addr<RepeaterActor>) -> Self {\n\n Self {\n\n counter: RefCell::default(),\n\n cache,\n\n repeater,\n\n }\n\n }\n", "file_path": "Chapter11/router-cache-ws/src/main.rs", "rank": 34, "score": 186196.03844848805 }, { "content": "pub fn main() -> Result<(), Error> {\n\n let mut runtime = Runtime::new()?;\n\n\n\n let handshake = tokio_postgres::connect(\"postgres://postgres@localhost:5432\", NoTls);\n\n let (mut client, connection) = runtime.block_on(handshake)?;\n\n runtime.spawn(connection.map_err(drop));\n\n\n\n let execute = client.batch_execute(\n\n \"CREATE TABLE IF NOT EXISTS agents (\n\n agent TEXT NOT NULL,\n\n timestamp TIMESTAMPTZ NOT NULL DEFAULT NOW()\n\n );\");\n\n runtime.block_on(execute)?;\n\n\n\n let state = ConnState::new(client);\n\n let router = router(state);\n\n\n\n let addr = \"127.0.0.1:7878\";\n\n println!(\"Listening for requests at http://{}\", addr);\n\n gotham::start_on_executor(addr, router, runtime.executor());\n\n runtime\n\n .shutdown_on_idle()\n\n .wait()\n\n .map_err(|()| format_err!(\"can't wait for the runtime\"))\n\n}\n", "file_path": "Chapter09/logs-gotham/src/main.rs", "rank": 35, "score": 185708.1680009277 }, { "content": "fn main() -> Result<(), failure::Error> {\n\n env_logger::init();\n\n let mut config = config::Config::default();\n\n config.merge(config::Environment::with_prefix(\"ROUTER\"))?;\n\n let config: Config = config.try_into()?;\n\n debug!(\"Router config: {:?}\", config);\n\n let sys = actix::System::new(\"router\");\n\n\n\n let users = config.users.unwrap_or(\"http://127.0.0.1:8001\".into());\n\n let content = config.content.unwrap_or(\"http://127.0.0.1:8003\".into());\n\n let state = State {\n\n users: Arc::new(users),\n\n content: Arc::new(content),\n\n };\n\n let address = config.address.unwrap_or(\"127.0.0.1:8080\".into());\n\n server::new(move || {\n\n App::with_state(state.clone())\n\n .middleware(middleware::Logger::default())\n\n .middleware(IdentityService::new(\n\n CookieIdentityPolicy::new(&[0; 32])\n", "file_path": "Chapter15/deploy/microservices/router/src/main.rs", "rank": 36, "score": 183141.86083180225 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n simple_logger::init_with_level(log::Level::Debug)?;\n\n debug!(\"Starting lambda with Rust...\");\n\n lambda!(handler);\n\n Ok(())\n\n}\n\n\n", "file_path": "Chapter17/serverless/lambda_1/src/main.rs", "rank": 37, "score": 178395.2394762584 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n simple_logger::init_with_level(log::Level::Debug).unwrap();\n\n lambda!(rng_handler);\n\n Ok(())\n\n}\n\n\n", "file_path": "Chapter17/minimal-lambda/src/main.rs", "rank": 38, "score": 178395.2394762584 }, { "content": "fn serialize(format: &str, resp: &RngResponse) -> Result<Vec<u8>, Error> {\n\n match format {\n\n \"json\" => {\n\n Ok(serde_json::to_vec(resp)?)\n\n },\n\n \"cbor\" => {\n\n Ok(serde_cbor::to_vec(resp)?)\n\n },\n\n _ => {\n\n Err(format_err!(\"unsupported format {}\", format))\n\n },\n\n }\n\n}\n\n\n", "file_path": "Chapter04/random-service-with-multiple-formats/src/main.rs", "rank": 39, "score": 177188.92736014904 }, { "content": "fn spawn_worker() -> Result<Sender<Action>, Error> {\n\n let (tx, rx) = channel();\n\n let next: SocketAddr = env::var(\"NEXT\")?.parse()?;\n\n thread::spawn(move || {\n\n let remote = Remote::new(next);\n\n let mut in_roll_call = false;\n\n for action in rx.iter() {\n\n match action {\n\n Action::StartRollCall => {\n\n if !in_roll_call {\n\n if remote.start_roll_call().is_ok() {\n\n debug!(\"ON\");\n\n in_roll_call = true;\n\n }\n\n } else {\n\n if remote.mark_itself().is_ok() {\n\n debug!(\"OFF\");\n\n in_roll_call = false;\n\n }\n\n }\n", "file_path": "Chapter06/jsonrpc-ring/src/main.rs", "rank": 40, "score": 175958.5754857682 }, { "content": "fn worker_loop(receiver: Receiver<Action>) -> Result<(), Error> {\n\n let next = env::var(\"NEXT\")?.parse()?;\n\n let remote = Remote::new(next)?;\n\n let mut in_roll_call = false;\n\n for action in receiver.iter() {\n\n match action {\n\n Action::StartRollCall => {\n\n if !in_roll_call {\n\n if remote.start_roll_call().is_ok() {\n\n debug!(\"ON\");\n\n in_roll_call = true;\n\n }\n\n } else {\n\n if remote.mark_itself().is_ok() {\n\n debug!(\"OFF\");\n\n in_roll_call = false;\n\n }\n\n }\n\n }\n\n Action::MarkItself => {\n", "file_path": "Chapter06/grpc-ring/src/server.rs", "rank": 41, "score": 169388.24950580994 }, { "content": "fn remove_session(conn: &Connection, token: &str) -> Result<(), RedisError> {\n\n conn.hdel(SESSIONS, token)\n\n}\n\n\n", "file_path": "Chapter07/sessions/src/main.rs", "rank": 42, "score": 168452.07939952845 }, { "content": "fn send_impl(req: &mut Request<Data>) -> Result<(), Error> {\n\n let (to, code) = {\n\n let params = req.form_body().map_err(|(_, err)| format_err!(\"can't read form: {}\", err))?;\n\n let to = params.get(\"to\").ok_or(format_err!(\"to field not set\"))?.to_owned();\n\n let code = params.get(\"code\").ok_or(format_err!(\"code field not set\"))?.to_owned();\n\n (to, code)\n\n };\n\n let data = req.server_data();\n\n let to = EmailAddress::new(to.to_owned())?;\n\n let envelope = Envelope::new(None, vec![to])?;\n\n let mut params: HashMap<&str, &str> = HashMap::new();\n\n params.insert(\"code\", &code);\n\n let mut body: Vec<u8> = Vec::new();\n\n data.cache.render(\"templates/confirm.tpl\", &mut body, &params)?;\n\n let email = SendableEmail::new(envelope, \"Confirm email\".to_string(), Vec::new());\n\n let sender = data.sender.lock().unwrap().clone();\n\n sender.send(email).map_err(|err| format_err!(\"can't send email: {}\", err))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "Chapter15/deploy/microservices/mails/src/main.rs", "rank": 43, "score": 161247.58946681657 }, { "content": "fn list_sessions(conn: &Connection) -> Result<HashMap<String, String>, RedisError> {\n\n conn.hgetall(SESSIONS)\n\n}\n\n\n", "file_path": "Chapter07/sessions/src/main.rs", "rank": 44, "score": 160667.78230515617 }, { "content": "fn add_session(conn: &Connection, token: &str, uid: &str) -> Result<(), RedisError> {\n\n conn.hset(SESSIONS, token, uid)\n\n}\n\n\n", "file_path": "Chapter07/sessions/src/main.rs", "rank": 45, "score": 158613.2683127912 }, { "content": "pub fn spawn_client(sys: &mut SystemRunner) -> Result<Channel<TcpStream>, Error> {\n\n let addr = \"127.0.0.1:5672\".parse().unwrap();\n\n let fut = TcpStream::connect(&addr)\n\n .map_err(Error::from)\n\n .and_then(|stream| {\n\n let options = ConnectionOptions::default();\n\n Client::connect(stream, options).from_err::<Error>()\n\n });\n\n let (client, heartbeat) = sys.block_on(fut)?;\n\n actix::spawn(heartbeat.map_err(drop));\n\n let channel = sys.block_on(client.create_channel())?;\n\n Ok(channel)\n\n}\n\n\n", "file_path": "Chapter12/rabbit-actix/src/lib.rs", "rank": 46, "score": 148861.57741375352 }, { "content": "fn handler(event: Request, _: Context) -> Result<Response, HandlerError> {\n\n let region = Region::default();\n\n let client = DynamoDbClient::new(region);\n\n let username = event\n\n .request_context\n\n .authorizer\n\n .claims\n\n .get(\"cognito:username\")\n\n .unwrap()\n\n .to_owned();\n\n debug!(\"USERNAME: {}\", username);\n\n let ride_id = Uuid::new_v4().to_string();\n\n let request: RequestBody = serde_json::from_str(&event.body).unwrap();\n\n let unicorn = find_unicorn(&request.pickup_location);\n\n record_ride(&client, &ride_id, &username, &unicorn).unwrap();\n\n let body = ResponseBody {\n\n ride_id: ride_id.clone(),\n\n unicorn_name: unicorn.name.clone(),\n\n unicorn,\n\n eta: \"30 seconds\".into(),\n", "file_path": "Chapter17/serverless/lambda_1/src/main.rs", "rank": 47, "score": 135243.42132631014 }, { "content": "fn rng_handler(event: RngRequest, _ctx: Context) -> Result<RngResponse, HandlerError> {\n\n let mut rng = rand::thread_rng();\n\n let value = {\n\n match event {\n\n RngRequest::Uniform { range } => {\n\n rng.sample(Uniform::from(range)) as f64\n\n },\n\n RngRequest::Normal { mean, std_dev } => {\n\n rng.sample(Normal::new(mean, std_dev)) as f64\n\n },\n\n RngRequest::Bernoulli { p } => {\n\n rng.sample(Bernoulli::new(p)) as i8 as f64\n\n },\n\n }\n\n };\n\n Ok(RngResponse { value })\n\n}\n", "file_path": "Chapter17/minimal-lambda/src/main.rs", "rank": 48, "score": 127390.01413386493 }, { "content": "fn s_attr(s: String) -> AttributeValue {\n\n AttributeValue {\n\n s: Some(s),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "Chapter07/user-location/src/main.rs", "rank": 49, "score": 124710.85564304095 }, { "content": "type WorkerResponse = Result<Vec<u8>, Error>;\n\n\n", "file_path": "Chapter10/one-thread/src/main.rs", "rank": 50, "score": 121738.18194890927 }, { "content": "fn other<E>(err: E) -> Error\n\nwhere\n\n E: Into<Box<std::error::Error + Send + Sync>>,\n\n{\n\n Error::new(ErrorKind::Other, err)\n\n}\n\n\n", "file_path": "Chapter10/actors/src/main.rs", "rank": 51, "score": 119862.90220864968 }, { "content": "fn other<E>(err: E) -> Error\n\nwhere\n\n E: Into<Box<std::error::Error + Send + Sync>>,\n\n{\n\n Error::new(ErrorKind::Other, err)\n\n}\n\n\n", "file_path": "Chapter10/blocking/src/main.rs", "rank": 52, "score": 119862.90220864968 }, { "content": "fn count_up(state: &State, path: &str) -> impl Future<Item=(), Error=Error> {\n\n let path = path.to_string();\n\n let log = state.log.clone();\n\n state.count.send(Count(path.clone()))\n\n .and_then(move |value| {\n\n let message = format!(\"total requests for '{}' is {}\", path, value);\n\n log.send(Log(message))\n\n })\n\n .map_err(|err| other(err.compat()))\n\n}\n\n\n", "file_path": "Chapter10/actors/src/main.rs", "rank": 53, "score": 118992.32176138186 }, { "content": "fn other<E>(err: E) -> Error\n\nwhere\n\n E: Into<Box<std::error::Error + Send + Sync>>,\n\n{\n\n Error::new(ErrorKind::Other, err)\n\n}\n\n\n", "file_path": "Chapter10/one-thread/src/main.rs", "rank": 54, "score": 118080.4950602045 }, { "content": "fn other<E>(err: E) -> Error\n\nwhere\n\n E: Into<Box<std::error::Error + Send + Sync>>,\n\n{\n\n Error::new(ErrorKind::Other, err)\n\n}\n\n\n", "file_path": "Chapter10/thread-pool/src/main.rs", "rank": 55, "score": 118080.4950602045 }, { "content": "fn other<E>(err: E) -> Error\n\nwhere\n\n E: Into<Box<std::error::Error + Send + Sync>>,\n\n{\n\n Error::new(ErrorKind::Other, err)\n\n}\n\n\n", "file_path": "Chapter05/images-service/src/main.rs", "rank": 56, "score": 118080.4950602045 }, { "content": "fn get_request(url: &str) -> impl Future<Item = Vec<u8>, Error = Error> {\n\n client::ClientRequest::get(url)\n\n .finish().into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| resp.body().from_err())\n\n .map(|bytes| bytes.to_vec())\n\n })\n\n}\n\n\n", "file_path": "Chapter13/units/src/lib.rs", "rank": 57, "score": 117726.45507267848 }, { "content": "fn get_request(url: &str) -> impl Future<Item = Vec<u8>, Error = Error> {\n\n client::ClientRequest::get(url)\n\n .finish().into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| resp.body().from_err())\n\n .map(|bytes| bytes.to_vec())\n\n })\n\n}\n\n\n", "file_path": "Chapter11/router/src/main.rs", "rank": 58, "score": 117726.45507267848 }, { "content": "fn get_request(url: &str) -> impl Future<Item = Vec<u8>, Error = Error> {\n\n client::ClientRequest::get(url)\n\n .finish().into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| resp.body().from_err())\n\n .map(|bytes| bytes.to_vec())\n\n })\n\n}\n\n\n", "file_path": "Chapter11/router-cache/src/main.rs", "rank": 59, "score": 116508.54645591011 }, { "content": "fn get_request(url: &str) -> impl Future<Item = Vec<u8>, Error = Error> {\n\n client::ClientRequest::get(url)\n\n .finish().into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| resp.body().from_err())\n\n .map(|bytes| bytes.to_vec())\n\n })\n\n}\n\n\n", "file_path": "Chapter11/router-cache-ws/src/main.rs", "rank": 60, "score": 115335.83193756372 }, { "content": "fn get_req(url: &str) -> impl Future<Item = Vec<u8>, Error = Error> {\n\n client::ClientRequest::get(url)\n\n .finish().into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| resp.body().from_err())\n\n .map(|bytes| bytes.to_vec())\n\n })\n\n}\n\n\n", "file_path": "Chapter15/deploy/microservices/router/src/main.rs", "rank": 61, "score": 115335.83193756372 }, { "content": "fn other<E>(err: E) -> io::Error\n\nwhere\n\n E: Into<Box<std::error::Error + Send + Sync>>,\n\n{\n\n io::Error::new(io::ErrorKind::Other, err)\n\n}\n\n\n", "file_path": "Chapter05/futures-examples/src/main.rs", "rank": 62, "score": 113645.83733331664 }, { "content": "fn value(req: HttpRequest<AppState>) -> impl Responder {\n\n format!(\"Counter: {}\", req.state().0.borrow())\n\n}\n\n\n", "file_path": "Chapter11/example-11-3-json/src/main.rs", "rank": 63, "score": 113441.12084876458 }, { "content": "fn value(req: HttpRequest<AppState>) -> impl Responder {\n\n format!(\"Counter: {}\", req.state().0.borrow())\n\n}\n\n\n", "file_path": "Chapter11/example-11-2-routes/src/main.rs", "rank": 64, "score": 113441.12084876458 }, { "content": "#[test]\n\nfn users_healthcheck() {\n\n let mut api = WebApi::users();\n\n api.healthcheck(\"/\", \"Users Microservice\");\n\n}\n\n\n", "file_path": "Chapter13/integration/tests/users.rs", "rank": 65, "score": 110781.61460173383 }, { "content": "fn post_request<T, O>(url: &str, params: T) -> impl Future<Item = O, Error = Error>\n\nwhere\n\n T: Serialize,\n\n O: for <'de> Deserialize<'de> + 'static,\n\n{\n\n client::ClientRequest::post(url)\n\n .form(params)\n\n .into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| {\n\n if resp.status().is_success() {\n\n let fut = resp\n\n .json::<O>()\n\n .from_err();\n\n boxed(fut)\n\n } else {\n\n error!(\"Microservice error: {}\", resp.status());\n\n let fut = Err(format_err!(\"microservice error\"))\n", "file_path": "Chapter13/units/src/lib.rs", "rank": 66, "score": 109343.96772817563 }, { "content": "fn post_request<T, O>(url: &str, params: T) -> impl Future<Item = O, Error = Error>\n\nwhere\n\n T: Serialize,\n\n O: for <'de> Deserialize<'de> + 'static,\n\n{\n\n client::ClientRequest::post(url)\n\n .form(params)\n\n .into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| {\n\n if resp.status().is_success() {\n\n let fut = resp\n\n .json::<O>()\n\n .from_err();\n\n boxed(fut)\n\n } else {\n\n error!(\"Microservice error: {}\", resp.status());\n\n let fut = Err(format_err!(\"microservice error\"))\n", "file_path": "Chapter11/router/src/main.rs", "rank": 67, "score": 109343.96772817563 }, { "content": "fn request<T, O>(url: &str, params: T) -> impl Future<Item = O, Error = Error>\n\nwhere\n\n T: Serialize,\n\n O: for <'de> Deserialize<'de> + 'static,\n\n{\n\n client::ClientRequest::post(url)\n\n .form(params)\n\n .into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| {\n\n if resp.status().is_success() {\n\n let fut = resp\n\n .json::<O>()\n\n .from_err();\n\n boxed(fut)\n\n } else {\n\n error!(\"Microservice error: {}\", resp.status());\n\n let fut = Err(format_err!(\"microservice error\"))\n", "file_path": "Chapter15/deploy/microservices/router/src/main.rs", "rank": 68, "score": 108254.18630161582 }, { "content": "fn post_request<T, O>(url: &str, params: T) -> impl Future<Item = O, Error = Error>\n\nwhere\n\n T: Serialize,\n\n O: for <'de> Deserialize<'de> + 'static,\n\n{\n\n client::ClientRequest::post(url)\n\n .form(params)\n\n .into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| {\n\n if resp.status().is_success() {\n\n let fut = resp\n\n .json::<O>()\n\n .from_err();\n\n boxed(fut)\n\n } else {\n\n error!(\"Microservice error: {}\", resp.status());\n\n let fut = Err(format_err!(\"microservice error\"))\n", "file_path": "Chapter11/router-cache/src/main.rs", "rank": 69, "score": 108254.18630161582 }, { "content": "fn post_request<T, O>(url: &str, params: T) -> impl Future<Item = O, Error = Error>\n\nwhere\n\n T: Serialize,\n\n O: for <'de> Deserialize<'de> + 'static,\n\n{\n\n client::ClientRequest::post(url)\n\n .form(params)\n\n .into_future()\n\n .and_then(|req| {\n\n req.send()\n\n .map_err(Error::from)\n\n .and_then(|resp| {\n\n if resp.status().is_success() {\n\n let fut = resp\n\n .json::<O>()\n\n .from_err();\n\n boxed(fut)\n\n } else {\n\n error!(\"Microservice error: {}\", resp.status());\n\n let fut = Err(format_err!(\"microservice error\"))\n", "file_path": "Chapter11/router-cache-ws/src/main.rs", "rank": 70, "score": 107202.51250834427 }, { "content": "fn tasks_handler(req: HttpRequest<State>) -> impl Future<Item = HttpResponse, Error = WebError> {\n\n let tasks: Vec<_> = req\n\n .state()\n\n .tasks\n\n .lock()\n\n .unwrap()\n\n .values()\n\n .cloned()\n\n .collect();\n\n let tmpl = Tasks { tasks };\n\n future::ok(HttpResponse::Ok().body(tmpl.render().unwrap()))\n\n}\n\n\n", "file_path": "Chapter12/rabbit-actix/src/server.rs", "rank": 71, "score": 103591.5940184264 }, { "content": "fn upload_handler(req: HttpRequest<State>) -> impl Future<Item = HttpResponse, Error = WebError> {\n\n req.multipart()\n\n .map(handle_multipart_item)\n\n .flatten()\n\n .into_future()\n\n .and_then(|(bytes, stream)| {\n\n if let Some(bytes) = bytes {\n\n Ok(bytes)\n\n } else {\n\n Err((MultipartError::Incomplete, stream))\n\n }\n\n })\n\n .map_err(|(err, _)| WebError::from(err))\n\n .and_then(move |image| {\n\n debug!(\"Image: {:?}\", image);\n\n let request = QrRequest { image };\n\n req.state()\n\n .addr\n\n .send(SendMessage(request))\n\n .from_err()\n", "file_path": "Chapter12/rabbit-actix/src/server.rs", "rank": 72, "score": 103591.5940184264 }, { "content": "fn main() {\n\n let tx = spawn_sender();\n\n\n\n let data = Data {\n\n sender: Mutex::new(tx),\n\n cache: TemplateCache::with_policy(ReloadPolicy::Always),\n\n };\n\n\n\n let mut server = Nickel::with_data(data);\n\n server.get(\"/\", middleware!(\"Mailer Microservice\"));\n\n server.post(\"/send\", send);\n\n server.listen(\"127.0.0.1:8002\").unwrap();\n\n}\n", "file_path": "Chapter09/emails-nickel/src/main.rs", "rank": 73, "score": 102615.10820820366 }, { "content": "fn spawn_sender() -> Sender<SendableEmail> {\n\n let (tx, rx) = channel();\n\n let smtp = SmtpClient::new(\"localhost:2525\", ClientSecurity::None)\n\n .expect(\"can't start smtp client\");\n\n let credentials = (\"[email protected]\", \"password\").into_credentials();\n\n let client = smtp.credentials(credentials);\n\n thread::spawn(move || {\n\n let mut mailer = SmtpTransport::new(client);\n\n for email in rx.iter() {\n\n let result = mailer.send(email);\n\n if let Err(err) = result {\n\n println!(\"Can't send mail: {}\", err);\n\n }\n\n }\n\n mailer.close();\n\n });\n\n tx\n\n}\n\n\n", "file_path": "Chapter09/emails-nickel/src/main.rs", "rank": 74, "score": 101640.84727875408 }, { "content": "fn main() {\n\n env_logger::init();\n\n let manager = ConnectionManager::<SqliteConnection>::new(\"test.db\");\n\n let pool = Pool::builder()\n\n .build(manager)\n\n .expect(\"Failed to create pool.\");\n\n rouille::start_server(\"127.0.0.1:8001\", move |request| {\n\n match handler(&request, &pool) {\n\n Ok(response) => {\n\n response\n\n },\n\n Err(err) => {\n\n Response::text(err.to_string())\n\n .with_status_code(500)\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "Chapter09/users-rouille/src/main.rs", "rank": 75, "score": 101497.30609484093 }, { "content": "fn main() {\n\n dotenv().ok();\n\n env_logger::init();\n\n let matches = App::new(crate_name!())\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(crate_description!())\n\n .arg(Arg::with_name(\"config\")\n\n .short(\"c\")\n\n .long(\"config\")\n\n .value_name(\"FILE\")\n\n .help(\"Sets a custom config file\")\n\n .takes_value(true))\n\n .arg(Arg::with_name(\"address\")\n\n .short(\"a\")\n\n .long(\"address\")\n\n .value_name(\"ADDRESS\")\n\n .help(\"Sets an address\")\n\n .takes_value(true))\n\n .get_matches();\n", "file_path": "Chapter03/random-service-with-args/src/main.rs", "rank": 76, "score": 100655.18196514907 }, { "content": "#[test]\n\nfn check_signup_and_signin() {\n\n let mut api = WebApi::users();\n\n let username = utils::rand_str() + \"@example.com\";\n\n let password = utils::rand_str();\n\n let params = vec![\n\n (\"email\", username.as_ref()),\n\n (\"password\", password.as_ref()),\n\n ];\n\n let _: () = api.request(Method::POST, \"/signup\", params);\n\n\n\n let params = vec![\n\n (\"email\", username.as_ref()),\n\n (\"password\", password.as_ref()),\n\n ];\n\n let _: UserId = api.request(Method::POST, \"/signin\", params);\n\n}\n", "file_path": "Chapter13/integration/tests/users.rs", "rank": 77, "score": 99515.52706291221 }, { "content": "fn to_box<T>(fut :T) -> Box<dyn Future<Item=(), Error=()> + Send>\n\nwhere\n\n T: IntoFuture,\n\n T::Future: Send + 'static,\n\n T::Item: 'static,\n\n T::Error: 'static,\n\n{\n\n let fut = fut.into_future().map(drop).map_err(drop);\n\n Box::new(fut)\n\n}\n\n\n", "file_path": "Chapter05/futures-examples/src/main.rs", "rank": 78, "score": 97740.24749326322 }, { "content": "fn to_number(value: &Value, default: u16) -> u16 {\n\n value.as_str()\n\n .and_then(|x| x.parse::<u16>().ok())\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "Chapter10/actors/src/main.rs", "rank": 79, "score": 97321.1552003352 }, { "content": "fn to_number(value: &Value, default: u16) -> u16 {\n\n value.as_str()\n\n .and_then(|x| x.parse::<u16>().ok())\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "Chapter10/blocking/src/main.rs", "rank": 80, "score": 97321.1552003352 }, { "content": "fn to_number(value: &Value, default: u16) -> u16 {\n\n value.as_str()\n\n .and_then(|x| x.parse::<u16>().ok())\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "Chapter10/one-thread/src/main.rs", "rank": 81, "score": 95817.00201483391 }, { "content": "fn to_number(value: &Value, default: u16) -> u16 {\n\n value.as_str()\n\n .and_then(|x| x.parse::<u16>().ok())\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "Chapter10/thread-pool/src/main.rs", "rank": 82, "score": 95817.00201483391 }, { "content": "fn boxed<I, E, F>(fut: F) -> Box<Future<Item = I, Error = E>>\n\nwhere\n\n F: Future<Item = I, Error = E> + 'static,\n\n{\n\n Box::new(fut)\n\n}\n\n\n", "file_path": "Chapter13/units/src/lib.rs", "rank": 83, "score": 95775.07791377055 }, { "content": "fn boxed<I, E, F>(fut: F) -> Box<Future<Item = I, Error = E>>\n\nwhere\n\n F: Future<Item = I, Error = E> + 'static,\n\n{\n\n Box::new(fut)\n\n}\n\n\n", "file_path": "Chapter11/router/src/main.rs", "rank": 84, "score": 95775.07791377055 }, { "content": "fn send<'mw>(req: &mut Request<Data>, res: Response<'mw, Data>) -> MiddlewareResult<'mw, Data> {\n\n try_with!(res, send_impl(req).map_err(|_| StatusCode::BadRequest));\n\n res.send(\"true\")\n\n}\n\n\n", "file_path": "Chapter09/emails-nickel/src/main.rs", "rank": 85, "score": 95602.94589213662 }, { "content": "fn boxed<I, E, F>(fut: F) -> Box<Future<Item = I, Error = E>>\n\nwhere\n\n F: Future<Item = I, Error = E> + 'static,\n\n{\n\n Box::new(fut)\n\n}\n\n\n", "file_path": "Chapter11/router-cache/src/main.rs", "rank": 86, "score": 94454.54822249671 }, { "content": "fn boxed<I, E, F>(fut: F) -> Box<Future<Item = I, Error = E>>\n\nwhere\n\n F: Future<Item = I, Error = E> + 'static,\n\n{\n\n Box::new(fut)\n\n}\n\n\n", "file_path": "Chapter15/deploy/microservices/router/src/main.rs", "rank": 87, "score": 93193.24797052669 }, { "content": "fn boxed<I, E, F>(fut: F) -> Box<Future<Item = I, Error = E>>\n\nwhere\n\n F: Future<Item = I, Error = E> + 'static,\n\n{\n\n Box::new(fut)\n\n}\n\n\n", "file_path": "Chapter11/router-cache-ws/src/main.rs", "rank": 88, "score": 93193.24797052669 }, { "content": "DROP TABLE users;\n", "file_path": "Chapter08/users/migrations/2018-11-22-192300_create_tables/down.sql", "rank": 89, "score": 90440.39277996961 }, { "content": "CREATE TABLE users (\n\n id TEXT PRIMARY KEY NOT NULL,\n\n name TEXT NOT NULL,\n\n email TEXT NOT NULL\n\n);\n", "file_path": "Chapter08/users/migrations/2018-11-22-192300_create_tables/up.sql", "rank": 90, "score": 90440.39277996961 }, { "content": "fn microservice_handler(req: Request<Body>, user_db: &UserDb)\n\n -> impl Future<Item=Response<Body>, Error=Error>\n\n{\n\n let response = {\n\n match (req.method(), req.uri().path()) {\n\n (&Method::GET, \"/\") => {\n\n Response::new(INDEX.into())\n\n },\n\n (method, path) if path.starts_with(USER_PATH) => {\n\n let user_id = path.trim_start_matches(USER_PATH)\n\n .parse::<UserId>()\n\n .ok()\n\n .map(|x| x as usize);\n\n let mut users = user_db.lock().unwrap();\n\n match (method, user_id) {\n\n (&Method::GET, Some(id)) => {\n\n if let Some(data) = users.get(id) {\n\n Response::new(data.to_string().into())\n\n } else {\n\n response_with_code(StatusCode::NOT_FOUND)\n", "file_path": "Chapter02/hyper-microservice-rest/src/main.rs", "rank": 91, "score": 89259.4506973568 }, { "content": "CREATE TABLE users (\n\n id TEXT PRIMARY KEY NOT NULL,\n\n email TEXT NOT NULL,\n\n password TEXT NOT NULL\n\n);\n", "file_path": "Chapter09/users-rouille/migrations/2018-11-22-192300_create_tables/up.sql", "rank": 92, "score": 88854.95431402652 }, { "content": "DROP TABLE users;\n", "file_path": "Chapter09/users-rouille/migrations/2018-11-22-192300_create_tables/down.sql", "rank": 93, "score": 88854.95431402652 }, { "content": "fn unicorn_map(unicorn: &Unicorn) -> AttributeValue {\n\n let mut item = HashMap::new();\n\n item.insert(\"Name\".into(), s_attr(&unicorn.name));\n\n item.insert(\"Color\".into(), s_attr(&unicorn.color));\n\n item.insert(\"Gender\".into(), s_attr(&unicorn.gender));\n\n AttributeValue {\n\n m: Some(item),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "Chapter17/serverless/lambda_1/src/main.rs", "rank": 94, "score": 88117.16416993423 }, { "content": "fn microservice_handler(req: Request<Body>, user_db: &UserDb)\n\n -> impl Future<Item=Response<Body>, Error=Error>\n\n{\n\n let response = {\n\n let method = req.method();\n\n let path = req.uri().path();\n\n let mut users = user_db.lock().unwrap();\n\n if INDEX_PATH.is_match(path) {\n\n if method == &Method::GET {\n\n Response::new(INDEX.into())\n\n } else {\n\n response_with_code(StatusCode::METHOD_NOT_ALLOWED)\n\n }\n\n } else if USERS_PATH.is_match(path) {\n\n if method == &Method::GET {\n\n let list = users.iter()\n\n .map(|(id, _)| id.to_string())\n\n .collect::<Vec<String>>()\n\n .join(\",\");\n\n Response::new(list.into())\n", "file_path": "Chapter02/hyper-microservice-rest-regex/src/main.rs", "rank": 95, "score": 88047.23988567558 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct User {\n\n name: String,\n\n email: String,\n\n}\n\n\n\nconst CMD_CRATE: &str = \"create\";\n\nconst CMD_ADD: &str = \"add\";\n\nconst CMD_LIST: &str = \"list\";\n\nconst CMD_IMPORT: &str = \"import\";\n\n\n", "file_path": "Chapter07/users-mysql/src/main.rs", "rank": 96, "score": 83606.75474005709 }, { "content": "fn register_user_agent(state: State) -> Box<HandlerFuture> {\n\n let user_agent = HeaderMap::borrow_from(&state)\n\n .get(USER_AGENT)\n\n .map(|value| value.to_str().unwrap().to_string())\n\n .unwrap_or_else(|| \"<undefined>\".into());\n\n\n\n let conn = ConnState::borrow_from(&state);\n\n let client_1 = conn.client.clone();\n\n let client_2 = conn.client.clone();\n\n\n\n let res = future::ok(())\n\n .and_then(move |_| {\n\n let mut client = client_1.lock().unwrap();\n\n client.prepare(\"INSERT INTO agents (agent) VALUES ($1)\n\n RETURNING agent\")\n\n })\n\n .and_then(move |statement| {\n\n let mut client = client_2.lock().unwrap();\n\n client.query(&statement, &[&user_agent]).collect().map(|rows| {\n\n rows[0].get::<_, String>(0)\n", "file_path": "Chapter09/logs-gotham/src/main.rs", "rank": 98, "score": 82843.00491743651 }, { "content": "fn s_attr<T: AsRef<str>>(s: T) -> AttributeValue {\n\n AttributeValue {\n\n s: Some(s.as_ref().to_owned()),\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "Chapter17/serverless/lambda_1/src/main.rs", "rank": 99, "score": 82788.34623753259 } ]
Rust
src/main.rs
Steve-xmh/netease-music-tui
7b2c1f07cd679e1d44d8ea61640e37f97b0ce96e
#[macro_use] extern crate lazy_static; #[macro_use] extern crate failure; extern crate config; extern crate serde_derive; extern crate serde_json; #[macro_use] extern crate log; extern crate log_panics; use crossterm::event::{EnableMouseCapture, KeyCode, KeyEvent, KeyModifiers}; use crossterm::terminal::enable_raw_mode; use crossterm::ExecutableCommand; use dirs; use failure::err_msg; use log::LevelFilter; use std::fs; use std::io; use std::path::Path; use tui::backend::CrosstermBackend; use tui::Terminal; use util::event::{Event, Events}; mod api; mod app; mod handlers; mod model; mod player; mod ui; mod util; mod dbus_mpris; use app::{ActiveBlock, App}; use dbus_mpris::{dbus_mpris_handler, DbusMpris}; const FILE_NAME: &str = "Settings.toml"; const CONFIG_DIR: &str = ".config"; const APP_CONFIG_DIR: &str = "netease-music-tui"; #[cfg(not(target_os = "windows"))] fn get_log_path() -> String { "/var/log/ncmt.log".into() } #[cfg(target_os = "windows")] fn get_log_path() -> String { "./ncmt.log".into() } fn main() -> Result<(), failure::Error> { let config_file_path = match dirs::home_dir() { Some(home) => { let path = Path::new(&home); let home_config_dir = path.join(CONFIG_DIR); let app_config_dir = home_config_dir.join(APP_CONFIG_DIR); if !home_config_dir.exists() { fs::create_dir(&home_config_dir)?; } if !app_config_dir.exists() { fs::create_dir(&app_config_dir)?; } let config_file_path = &app_config_dir.join(FILE_NAME); config_file_path.to_path_buf() } None => return Err(err_msg("No $HOME directory found for config")), }; let mut settings = config::Config::default(); let config_string = match fs::read_to_string(&config_file_path) { Ok(data) => data, Err(_) => return Err(err_msg("Please set your account in config file")), }; settings .merge(config::File::from_str( &config_string, config::FileFormat::Toml, )) .unwrap(); match settings.get_bool("debug") { Ok(debug) => { if debug { log_panics::init(); simple_logging::log_to_file(get_log_path(), LevelFilter::Debug)?; } } Err(e) => error!("{}", e), } info!("start netease cloud music rust client"); let mut app = App::new(); let mut is_first_render = true; let cloud_music = app.cloud_music.to_owned().unwrap(); let profile = match cloud_music.login_status()? { Some(profile) => { app.user_id = profile.userId.unwrap(); profile } None => { let username = settings.get::<String>("username")?; let password = settings.get::<String>("password")?; match cloud_music.login(&username, &password) { Ok(profile) => profile, Err(_) => return Err(err_msg("Account/Password Error")), } } }; let mut stdout = io::stdout(); stdout.execute(EnableMouseCapture)?; enable_raw_mode()?; let backend = CrosstermBackend::new(stdout); let mut terminal = Terminal::new(backend)?; terminal.hide_cursor()?; let events = Events::new(); let dbus_mpris = DbusMpris::new(); loop { terminal.draw(|mut f| { let current_route = app.get_current_route(); match current_route.active_block { ActiveBlock::Help => { ui::draw_help_menu(&mut f); } ActiveBlock::Msg => { ui::draw_msg(&mut f, &mut app); } _ => { ui::draw_main_layout(&mut f, &mut app); } } })?; match dbus_mpris.next() { Ok(cmd) => { dbus_mpris_handler(cmd, &mut app); } Err(_) => {} } match events.next()? { Event::Input(input) => { match input { KeyEvent { code: KeyCode::Char('q'), modifiers: KeyModifiers::NONE, } => { if app.get_current_route().active_block != ActiveBlock::Search { let pop_result = app.pop_navigation_stack(); if pop_result.is_none() { break; } } } KeyEvent { code: KeyCode::Char('c'), modifiers: KeyModifiers::CONTROL, } => { break; } _ => { handlers::handle_app(input, &mut app); } } } Event::Tick => { app.update_on_tick(); } } if is_first_render { let cloud_music = app.cloud_music.to_owned().unwrap(); let playlists = cloud_music.user_playlists(&profile.userId.unwrap().to_string()); match playlists { Ok(p) => { app.playlists = Some(p); app.selected_playlist_index = Some(0); } Err(e) => { app.handle_error(e); } }; is_first_render = false; } } Ok(()) }
#[macro_use] extern crate lazy_static; #[macro_use] extern crate failure; extern crate config; extern crate serde_derive; extern crate serde_json; #[macro_use] extern crate log; extern crate log_panics; use crossterm::event::{EnableMouseCapture, KeyCode, KeyEvent, KeyModifiers}; use crossterm::terminal::enable_raw_mode; use crossterm::ExecutableCommand; use dirs; use failure::err_msg; use log::LevelFilter; use std::fs; use std::io; use std::path::Path; use tui::backend::CrosstermBackend; use tui::Terminal; use util::event::{Event, Events}; mod api; mod app; mod handlers; mod model; mod player; mod ui; mod util; mod dbus_mpris; use app::{ActiveBlock, App}; use dbus_mpris::{dbus_mpris_handler, DbusMpris}; const FILE_NAME: &str = "Settings.toml"; const CONFIG_DIR: &str = ".config"; const APP_CONFIG_DIR: &str = "netease-music-tui"; #[cfg(not(target_os = "windows"))] fn get_log_path() -> String { "/var/log/ncmt.log".into() } #[cfg(target_os = "windows")] fn get_log_path() -> String { "./ncmt.log".into() } fn main() -> Result<(), failure::Error> { let config_file_path = match dirs::home_dir() { Some(home) => { let path = Path::new(&home); let home_config_dir = path.join(CONFIG_DIR); let app_config_dir = home_config_dir.join(APP_CONFIG_DIR); if !home_config_dir.exists() { fs::create_dir(&home_config_dir)?; } if !app_config_dir.exists() { fs::create_dir(&app_config_dir)?; } let config_file_path = &app_config_dir.join(FILE_NAME); config_file_path.to_path_buf() } None => return Err(err_msg("No $HOME directory found for config")), }; let mut settings = config::Config::default(); let config_string = match fs::read_to_string(&config_file_path) { Ok(data) => data, Err(_) => return Err(err_msg("Please set your account in config file")), }; settings .merge(config::File::from_str( &config_string, config::FileFormat::Toml, )) .unwrap(); match settings.get_bool("debug") { Ok(debug) => { if debug { log_panics::init(); simple_logging::log_to_file(get_log_path(), LevelFilter::Debug)?; } } Err(e) => error!("{}", e), } info!("start netease cloud music rust client"); let mut app = App::new(); let mut is_first_render = true; let cloud_music = app.cloud_music.to_owned().unwrap(); let profile = match cloud_music.login_status()? { Some(profile) => { app.user_id = profile.userId.unwrap(); profile } None => { let username = settings.get::<String>("username")?; let password = settings.get::<String>("password")?; match cloud_music.login(&username, &password) { Ok(profile) => profile, Err(_) => return Err(err_msg("Account/Password Error")), } } }; let mut stdout = io::stdout(); stdout.execute(EnableMouseCapture)?; enable_raw_mode()?; let backend = CrosstermBackend::new(stdout); let mut terminal = Terminal::new(backend)?; terminal.hide_cursor()?; let events = Events::new(); let dbus_mpris = DbusMpris::new(); loop { terminal.draw(|mut f| { let current_route = app.get_current_route(); match current_route.active_block { ActiveBlock::Help => { ui::draw_help_menu(&mut f); } ActiveBlock::Msg => { ui::draw_msg(&mut f, &mut app); } _ => {
ui::draw_main_layout(&mut f, &mut app); } } })?; match dbus_mpris.next() { Ok(cmd) => { dbus_mpris_handler(cmd, &mut app); } Err(_) => {} } match events.next()? { Event::Input(input) => { match input { KeyEvent { code: KeyCode::Char('q'), modifiers: KeyModifiers::NONE, } => { if app.get_current_route().active_block != ActiveBlock::Search { let pop_result = app.pop_navigation_stack(); if pop_result.is_none() { break; } } } KeyEvent { code: KeyCode::Char('c'), modifiers: KeyModifiers::CONTROL, } => { break; } _ => { handlers::handle_app(input, &mut app); } } } Event::Tick => { app.update_on_tick(); } } if is_first_render { let cloud_music = app.cloud_music.to_owned().unwrap(); let playlists = cloud_music.user_playlists(&profile.userId.unwrap().to_string()); match playlists { Ok(p) => { app.playlists = Some(p); app.selected_playlist_index = Some(0); } Err(e) => { app.handle_error(e); } }; is_first_render = false; } } Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n KeyEvent {\n\n code: KeyCode::Enter,\n\n modifiers: KeyModifiers::NONE,\n\n } => {\n\n let current_hovered = app.get_current_route().hovered_block;\n\n app.set_current_route_state(Some(current_hovered), None);\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "src/handlers/home.rs", "rank": 1, "score": 212300.41441442558 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n KeyEvent {\n\n code: KeyCode::Esc,\n\n modifiers: KeyModifiers::NONE,\n\n } => {\n\n app.set_current_route_state(Some(ActiveBlock::Empty), Some(ActiveBlock::SearchResult));\n\n }\n\n k if common_events::down_event(k) => {\n\n // track tab\n\n if app.tabs.index == 0 {\n\n let next_index = common_events::on_down_press_handler(\n\n &app.search_results.tracks.as_ref().unwrap_or(&vec![]),\n\n Some(app.search_results.selected_tracks_index),\n\n );\n\n app.search_results.selected_tracks_index = next_index;\n\n } else if app.tabs.index == 1 {\n\n let next_index = common_events::on_down_press_handler(\n\n &app.search_results.artists.as_ref().unwrap_or(&vec![]),\n\n Some(app.search_results.selected_artists_index),\n", "file_path": "src/handlers/search_results.rs", "rank": 2, "score": 207146.3768898621 }, { "content": "pub fn draw_main_layout<B>(f: &mut Frame<B>, app: &mut App)\n\nwhere\n\n B: Backend,\n\n{\n\n let parent_layout = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints(\n\n [\n\n Constraint::Length(3),\n\n Constraint::Min(1),\n\n Constraint::Length(6),\n\n ]\n\n .as_ref(),\n\n )\n\n .margin(2)\n\n .split(f.size());\n\n\n\n // Search input and help\n\n draw_input_and_help_box(f, app, parent_layout[0]);\n\n\n\n // Nested main block with potential routes\n\n draw_routes(f, app, parent_layout[1]);\n\n\n\n // Currently playing\n\n draw_playing_block(f, app, parent_layout[2]);\n\n\n\n app.block_height = parent_layout[1].height as usize\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 4, "score": 196038.9662705895 }, { "content": "// Handle events when the search input block is active\n\npub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n KeyEvent {\n\n code: KeyCode::Char('u'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => {\n\n app.input = vec![];\n\n app.input_idx = 0;\n\n app.input_cursor_position = 0;\n\n }\n\n KeyEvent {\n\n code: KeyCode::Char('e'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => {\n\n app.input_idx = app.input.len();\n\n let input_string: String = app.input.iter().collect();\n\n app.input_cursor_position = UnicodeWidthStr::width(input_string.as_str())\n\n .try_into()\n\n .unwrap();\n\n }\n", "file_path": "src/handlers/search.rs", "rank": 5, "score": 189691.40144589532 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(djprogram_list) = &mut app.program_list {\n\n let next_index = common_events::on_down_press_handler(\n\n &djprogram_list.dj_programs,\n\n Some(djprogram_list.selected_index),\n\n );\n\n djprogram_list.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(djprogram_list) = &mut app.program_list {\n\n let next_index = common_events::on_up_press_handler(\n\n &djprogram_list.dj_programs,\n\n Some(djprogram_list.selected_index),\n\n );\n\n djprogram_list.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/djprogram.rs", "rank": 6, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(albumlist) = &mut app.album_list {\n\n let next_index = common_events::on_down_press_handler(\n\n &albumlist.albums,\n\n Some(albumlist.selected_index),\n\n );\n\n albumlist.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(albumlist) = &mut app.album_list {\n\n let next_index = common_events::on_up_press_handler(\n\n &albumlist.albums,\n\n Some(albumlist.selected_index),\n\n );\n\n albumlist.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/albumlist.rs", "rank": 7, "score": 189687.59755799797 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(artist_albums) = &mut app.artist_albums {\n\n let next_index = common_events::on_down_press_handler(\n\n &artist_albums.albums,\n\n Some(artist_albums.selected_index),\n\n );\n\n artist_albums.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(artist_albums) = &mut app.artist_albums {\n\n let next_index = common_events::on_up_press_handler(\n\n &artist_albums.albums,\n\n Some(artist_albums.selected_index),\n\n );\n\n artist_albums.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/artist.rs", "rank": 8, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::right_event(k) => common_events::handle_right_event(app),\n\n k if common_events::down_event(k) => {\n\n match &app.playlists {\n\n Some(p) => {\n\n if let Some(selected_playlist_index) = app.selected_playlist_index {\n\n let next_index =\n\n common_events::on_down_press_handler(&p, Some(selected_playlist_index));\n\n app.selected_playlist_index = Some(next_index);\n\n }\n\n }\n\n None => {}\n\n };\n\n }\n\n k if common_events::up_event(k) => {\n\n match &app.playlists {\n\n Some(p) => {\n\n let next_index =\n\n common_events::on_up_press_handler(&p, app.selected_playlist_index);\n", "file_path": "src/handlers/my_playlist.rs", "rank": 9, "score": 189687.59755799797 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::right_event(k) => common_events::handle_right_event(app),\n\n k if common_events::down_event(k) => {\n\n let next_index = common_events::on_down_press_handler(\n\n &RECOMMEND_OPTIONS,\n\n Some(app.recommend.selected_index),\n\n );\n\n app.recommend.selected_index = next_index;\n\n }\n\n k if common_events::up_event(k) => {\n\n let next_index = common_events::on_up_press_handler(\n\n &RECOMMEND_OPTIONS,\n\n Some(app.recommend.selected_index),\n\n );\n\n app.recommend.selected_index = next_index;\n\n }\n\n // recommend list\n\n // you can go Discover music\n\n // you can go Personal FM\n", "file_path": "src/handlers/recommend.rs", "rank": 10, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n KeyEvent {\n\n code: KeyCode::Enter,\n\n modifiers: KeyModifiers::NONE,\n\n } => {\n\n let current_hovered = app.get_current_route().hovered_block;\n\n app.set_current_route_state(Some(current_hovered), None);\n\n }\n\n k if common_events::left_event(k) => match app.get_current_route().hovered_block {\n\n ActiveBlock::Artist\n\n | ActiveBlock::AlbumList\n\n | ActiveBlock::AlbumTracks\n\n | ActiveBlock::Home\n\n | ActiveBlock::SearchResult\n\n | ActiveBlock::Playlist\n\n | ActiveBlock::PersonalFm\n\n | ActiveBlock::Playing\n\n | ActiveBlock::DjRadio\n\n | ActiveBlock::DjProgram\n", "file_path": "src/handlers/empty.rs", "rank": 11, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n let next_index = common_events::on_down_press_handler(\n\n &app.track_table.tracks,\n\n Some(app.track_table.selected_index),\n\n );\n\n app.track_table.selected_index = next_index;\n\n }\n\n k if common_events::up_event(k) => {\n\n let next_index = common_events::on_up_press_handler(\n\n &app.track_table.tracks,\n\n Some(app.track_table.selected_index),\n\n );\n\n app.track_table.selected_index = next_index;\n\n }\n\n KeyEvent {\n\n code: KeyCode::Enter,\n\n modifiers: KeyModifiers::NONE,\n", "file_path": "src/handlers/track.rs", "rank": 12, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n KeyEvent {\n\n code: KeyCode::Char('t'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => {\n\n app.fm_trash();\n\n }\n\n _ => {}\n\n }\n\n}\n", "file_path": "src/handlers/fm.rs", "rank": 13, "score": 189687.59755799797 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(artistlist) = &mut app.artist_list {\n\n let next_index = common_events::on_down_press_handler(\n\n &artistlist.artists,\n\n Some(artistlist.selected_index),\n\n );\n\n artistlist.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(artistlist) = &mut app.artist_list {\n\n let next_index = common_events::on_up_press_handler(\n\n &artistlist.artists,\n\n Some(artistlist.selected_index),\n\n );\n\n artistlist.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/artistlist.rs", "rank": 14, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(playlist) = &mut app.playlist_list {\n\n let next_index = common_events::on_down_press_handler(\n\n &playlist.playlists,\n\n Some(playlist.selected_index),\n\n );\n\n playlist.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(playlist) = &mut app.playlist_list {\n\n let next_index = common_events::on_up_press_handler(\n\n &playlist.playlists,\n\n Some(playlist.selected_index),\n\n );\n\n playlist.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/playlist.rs", "rank": 15, "score": 189687.59755799794 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(djradio_list) = &mut app.djradio_list {\n\n let next_index = common_events::on_down_press_handler(\n\n &djradio_list.djradios,\n\n Some(djradio_list.selected_index),\n\n );\n\n djradio_list.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(djradio_list) = &mut app.djradio_list {\n\n let next_index = common_events::on_up_press_handler(\n\n &djradio_list.djradios,\n\n Some(djradio_list.selected_index),\n\n );\n\n djradio_list.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/djradio.rs", "rank": 16, "score": 189687.59755799797 }, { "content": "// handle current block events\n\nfn handle_block_events(key: KeyEvent, app: &mut App) {\n\n // get current route\n\n let current_route = app.get_current_route();\n\n\n\n match current_route.active_block {\n\n ActiveBlock::MyPlaylists => {\n\n my_playlist::handler(key, app);\n\n }\n\n ActiveBlock::TrackTable => {\n\n track::handler(key, app);\n\n }\n\n ActiveBlock::Recommend => {\n\n recommend::handler(key, app);\n\n }\n\n ActiveBlock::Empty => {\n\n empty::handler(key, app);\n\n }\n\n ActiveBlock::Home => {\n\n home::handler(key, app);\n\n }\n", "file_path": "src/handlers/mod.rs", "rank": 17, "score": 189639.14128411032 }, { "content": "pub fn handler(key: KeyEvent, app: &mut App) {\n\n match key {\n\n k if common_events::left_event(k) => common_events::handle_left_event(app),\n\n k if common_events::down_event(k) => {\n\n if let Some(selected_album) = &mut app.selected_album {\n\n let next_index = common_events::on_down_press_handler(\n\n &selected_album.tracks,\n\n Some(selected_album.selected_index),\n\n );\n\n selected_album.selected_index = next_index;\n\n }\n\n }\n\n k if common_events::up_event(k) => {\n\n if let Some(selected_album) = &mut app.selected_album {\n\n let next_index = common_events::on_up_press_handler(\n\n &selected_album.tracks,\n\n Some(selected_album.selected_index),\n\n );\n\n selected_album.selected_index = next_index;\n\n }\n", "file_path": "src/handlers/album_tracks.rs", "rank": 18, "score": 185564.88770533845 }, { "content": "pub fn handle_app(key: KeyEvent, app: &mut App) {\n\n // get current route\n\n let current_route = app.get_current_route();\n\n match current_route.active_block {\n\n ActiveBlock::Search => match key {\n\n KeyEvent {\n\n code: KeyCode::Char('h'),\n\n modifiers: KeyModifiers::CONTROL,\n\n } => {\n\n app.hover_mode();\n\n }\n\n _ => {\n\n search::handler(key, app);\n\n }\n\n },\n\n _ => match key.modifiers {\n\n // means space\n\n KeyModifiers::CONTROL => match key.code {\n\n KeyCode::Char('y') => {\n\n app.like_current(Action::Subscribe);\n", "file_path": "src/handlers/mod.rs", "rank": 19, "score": 183431.3889147344 }, { "content": "fn draw_home<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Percentage(80), Constraint::Percentage(20)].as_ref())\n\n .margin(2)\n\n .split(layout_chunk);\n\n\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::Home,\n\n current_route.hovered_block == ActiveBlock::Home,\n\n );\n\n\n\n f.render_widget(\n\n Block::default()\n\n .title(Spans::from(Span::styled(\"Welcome!\", get_color(highlight_state))))\n\n .borders(Borders::ALL)\n", "file_path": "src/ui/mod.rs", "rank": 20, "score": 180754.3338772455 }, { "content": "pub fn draw_msg<B>(f: &mut Frame<B>, app: &mut App)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints(\n\n [\n\n Constraint::Percentage(40),\n\n Constraint::Percentage(20),\n\n Constraint::Percentage(40),\n\n ]\n\n .as_ref(),\n\n )\n\n .split(f.size());\n\n let child_chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints(\n\n [\n\n Constraint::Percentage(40),\n", "file_path": "src/ui/mod.rs", "rank": 21, "score": 179442.68993939104 }, { "content": "// handle right key event\n\npub fn handle_right_event(app: &mut App) {\n\n match app.get_current_route().hovered_block {\n\n ActiveBlock::MyPlaylists | ActiveBlock::Recommend => match app.get_current_route().id {\n\n RouteId::TrackTable => {\n\n app.set_current_route_state(\n\n Some(ActiveBlock::Empty),\n\n Some(ActiveBlock::TrackTable),\n\n );\n\n }\n\n RouteId::Search => {\n\n app.set_current_route_state(\n\n Some(ActiveBlock::Empty),\n\n Some(ActiveBlock::SearchResult),\n\n );\n\n }\n\n RouteId::AlbumList => {\n\n app.set_current_route_state(Some(ActiveBlock::Empty), Some(ActiveBlock::AlbumList));\n\n }\n\n RouteId::Artist => {\n\n app.set_current_route_state(Some(ActiveBlock::Empty), Some(ActiveBlock::Artist));\n", "file_path": "src/handlers/common_events.rs", "rank": 22, "score": 175055.83911474285 }, { "content": "pub fn handle_left_event(app: &mut App) {\n\n app.set_current_route_state(Some(ActiveBlock::Empty), Some(ActiveBlock::Recommend));\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 23, "score": 175051.8998545369 }, { "content": "pub fn draw_error_screen<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Percentage(100)].as_ref())\n\n .split(layout_chunk);\n\n\n\n let playing_text = vec![\n\n Spans::from(Span::raw(\"Api response: \")),\n\n Spans::from(Span::styled(\n\n &app.error_msg,\n\n Style::default().fg(Color::LightRed),\n\n )),\n\n Spans::from(Span::styled(\n\n \"\\nPress `e` to return\",\n\n Style::default().fg(Color::Gray),\n\n )),\n\n ];\n", "file_path": "src/ui/mod.rs", "rank": 24, "score": 171043.46845921327 }, { "content": "pub fn draw_search_results<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::SearchResult,\n\n current_route.hovered_block == ActiveBlock::SearchResult,\n\n );\n\n\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Length(3), Constraint::Min(0)].as_ref())\n\n .split(layout_chunk);\n\n\n\n {\n\n let songs = match &app.search_results.tracks {\n\n Some(r) => r\n\n .iter()\n\n .map(|item| {\n", "file_path": "src/ui/mod.rs", "rank": 25, "score": 170863.0088767539 }, { "content": "// draw track playing block in the bottom\n\npub fn draw_playing_block<B>(f: &mut Frame<B>, app: &mut App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref())\n\n .margin(1)\n\n .split(layout_chunk);\n\n\n\n let state_title = if app.player.is_playing() {\n\n \"Playing\"\n\n } else {\n\n \"Pause \"\n\n };\n\n\n\n let repeat_text = match app.fm_state {\n\n true => \"FM\",\n\n false => match app.repeat_state {\n\n RepeatState::Off => \"Off\",\n", "file_path": "src/ui/mod.rs", "rank": 26, "score": 162367.60729722123 }, { "content": "#[cfg(not(feature = \"dbus_mpris\"))]\n\n#[allow(unused)]\n\npub fn dbus_mpris_handler(r: PlayerCommand, app: &mut App) {}\n\n\n", "file_path": "src/dbus_mpris.rs", "rank": 27, "score": 157260.65801451713 }, { "content": "#[cfg(feature = \"dbus_mpris\")]\n\npub fn dbus_mpris_handler(r: PlayerCommand, app: &mut App) {\n\n match r {\n\n PlayerCommand::Next => {\n\n app.skip_track(TrackState::Forword);\n\n }\n\n PlayerCommand::Previous => {\n\n app.skip_track(TrackState::Backword);\n\n }\n\n PlayerCommand::Pause => {\n\n app.player.pause();\n\n }\n\n PlayerCommand::PlayPause => {\n\n app.player.pause();\n\n }\n\n PlayerCommand::Stop => {\n\n app.player.stop();\n\n }\n\n PlayerCommand::Play => {\n\n app.player.play();\n\n }\n", "file_path": "src/dbus_mpris.rs", "rank": 28, "score": 157260.65801451713 }, { "content": "fn draw_personal_fm<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::PersonalFm,\n\n current_route.hovered_block == ActiveBlock::PersonalFm,\n\n );\n\n let display_block = Block::default()\n\n .title(Spans::from(Span::styled(\"PERSONAL FM\", get_color(highlight_state))))\n\n .borders(Borders::ALL)\n\n .border_style(get_color(highlight_state));\n\n\n\n f.render_widget(\n\n Paragraph::new(\"Your Personal FM\")\n\n .style(Style::default().fg(Color::White))\n\n .block(display_block)\n\n .wrap(Wrap { trim: true }),\n\n layout_chunk,\n\n );\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 29, "score": 157159.67782077365 }, { "content": "pub fn draw_routes<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints([Constraint::Percentage(20), Constraint::Percentage(80)].as_ref())\n\n .split(layout_chunk);\n\n\n\n draw_user_block(f, app, chunks[0]);\n\n\n\n let current_route = app.get_current_route();\n\n // info!(\"{:?}\", current_route);\n\n\n\n match current_route.id {\n\n RouteId::Error => {\n\n draw_error_screen(f, app, chunks[1]);\n\n } // This is handled as a \"full screen\" route in main.rs\n\n RouteId::TrackTable => {\n\n draw_track_table(f, &app, chunks[1]);\n", "file_path": "src/ui/mod.rs", "rank": 30, "score": 155171.5039472617 }, { "content": "pub fn draw_djradio_list<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::DjRadio,\n\n current_route.hovered_block == ActiveBlock::DjRadio,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"DjRadio Name\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n ];\n", "file_path": "src/ui/mod.rs", "rank": 31, "score": 152058.9370607234 }, { "content": "pub fn draw_playlist_table<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::Playlist,\n\n current_route.hovered_block == ActiveBlock::Playlist,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Title\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n TableHeader {\n", "file_path": "src/ui/mod.rs", "rank": 32, "score": 152058.9370607234 }, { "content": "pub fn draw_user_block<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Percentage(30), Constraint::Percentage(70)].as_ref())\n\n .split(layout_chunk);\n\n\n\n draw_recommend_block(f, app, chunks[0]);\n\n draw_playlist_block(f, app, chunks[1]);\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 33, "score": 152058.93706072343 }, { "content": "pub fn draw_album_list<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::AlbumList,\n\n current_route.hovered_block == ActiveBlock::AlbumList,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Album Name\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n TableHeader {\n", "file_path": "src/ui/mod.rs", "rank": 34, "score": 152058.9370607234 }, { "content": "// dtaw album table\n\npub fn draw_album_table<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::AlbumTracks,\n\n current_route.hovered_block == ActiveBlock::AlbumTracks,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Title\",\n\n width: get_percentage_width(layout_chunk.width, 0.8),\n\n },\n\n ];\n", "file_path": "src/ui/mod.rs", "rank": 35, "score": 152058.93706072343 }, { "content": "pub fn draw_playlist_block<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let playlist_items = match &app.playlists {\n\n Some(p) => p\n\n .iter()\n\n .map(|item| item.name.as_ref().unwrap().to_owned())\n\n .collect(),\n\n None => vec![],\n\n };\n\n\n\n let current_route = app.get_current_route();\n\n\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::MyPlaylists,\n\n current_route.hovered_block == ActiveBlock::MyPlaylists,\n\n );\n\n\n\n draw_selectable_list(\n\n f,\n\n layout_chunk,\n\n \"Playlists\",\n\n &playlist_items,\n\n highlight_state,\n\n app.selected_playlist_index,\n\n );\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 36, "score": 152058.93706072343 }, { "content": "pub fn draw_recommend_block<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::Recommend,\n\n current_route.hovered_block == ActiveBlock::Recommend,\n\n );\n\n draw_selectable_list(\n\n f,\n\n layout_chunk,\n\n \"Recommend\",\n\n &RECOMMEND_OPTIONS,\n\n highlight_state,\n\n Some(app.recommend.selected_index),\n\n );\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 37, "score": 152058.93706072343 }, { "content": "pub fn draw_artist_albums<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::Artist,\n\n current_route.hovered_block == ActiveBlock::Artist,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Album Name\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n TableHeader {\n", "file_path": "src/ui/mod.rs", "rank": 38, "score": 152058.9370607234 }, { "content": "pub fn draw_artist_list<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::ArtistList,\n\n current_route.hovered_block == ActiveBlock::ArtistList,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Artist\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n ];\n", "file_path": "src/ui/mod.rs", "rank": 39, "score": 152058.9370607234 }, { "content": "// draw track table\n\npub fn draw_track_table<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let header = [\n\n TableHeader {\n\n text: \"ID\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Title\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n TableHeader {\n\n text: \"Artist\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n TableHeader {\n\n text: \"Album\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n", "file_path": "src/ui/mod.rs", "rank": 40, "score": 152058.93706072343 }, { "content": "pub fn draw_playing_detail<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::Playing,\n\n current_route.hovered_block == ActiveBlock::Playing,\n\n );\n\n\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints([Constraint::Percentage(50), Constraint::Percentage(50)].as_ref())\n\n // .margin(2)\n\n .split(layout_chunk);\n\n\n\n f.render_widget(\n\n Canvas::default()\n\n .block(\n\n Block::default()\n", "file_path": "src/ui/mod.rs", "rank": 41, "score": 152058.93706072343 }, { "content": "pub fn draw_dj_program_list<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::DjProgram,\n\n current_route.hovered_block == ActiveBlock::DjProgram,\n\n );\n\n\n\n let header = [\n\n TableHeader {\n\n text: \"\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Dj Program Name\",\n\n width: get_percentage_width(layout_chunk.width, 0.5),\n\n },\n\n TableHeader {\n", "file_path": "src/ui/mod.rs", "rank": 42, "score": 149130.24487440794 }, { "content": "pub fn draw_input_and_help_box<B>(f: &mut Frame<B>, app: &App, layout_chunk: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Horizontal)\n\n .constraints([Constraint::Percentage(90), Constraint::Percentage(10)].as_ref())\n\n .split(layout_chunk);\n\n\n\n let current_route = app.get_current_route();\n\n\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::Search,\n\n current_route.hovered_block == ActiveBlock::Search,\n\n );\n\n\n\n let input: String = app.input.iter().collect();\n\n\n\n f.render_widget(\n\n Paragraph::new(input)\n", "file_path": "src/ui/mod.rs", "rank": 43, "score": 149130.24487440794 }, { "content": "#[cfg(target_os = \"windows\")]\n\nfn get_cookie_path() -> String {\n\n let mut tmp = temp_dir();\n\n tmp.push(\"ncmt_cookie\");\n\n tmp.as_os_str().to_str().unwrap().to_string()\n\n}\n\n\n\nimpl CloudMusic {\n\n pub fn default() -> CloudMusic {\n\n CloudMusic {\n\n prefix: \"https://music.163.com\".to_owned(),\n\n cookie_path: get_cookie_path(),\n\n }\n\n }\n\n\n\n ///send get request\n\n fn get(\n\n &self,\n\n url: &str,\n\n params: &mut HashMap<String, String>,\n\n ) -> Result<String, failure::Error> {\n", "file_path": "src/api.rs", "rank": 44, "score": 142733.01625705726 }, { "content": "pub fn create_tag_string(tags: &[String]) -> String {\n\n tags.iter()\n\n .map(|tag| tag.to_string())\n\n .collect::<Vec<String>>()\n\n .join(\"|\")\n\n}\n\n\n", "file_path": "src/ui/util.rs", "rank": 45, "score": 141421.04459655273 }, { "content": "pub fn convert_map_to_string<K: Debug + Eq + Hash + ToString, V: Debug + ToString>(\n\n map: &HashMap<K, V>,\n\n) -> String {\n\n let mut string: String = String::new();\n\n for (key, value) in map.iter() {\n\n string.push_str(&key.to_string());\n\n string.push_str(\"=\");\n\n string.push_str(&value.to_string());\n\n string.push_str(\"&\");\n\n }\n\n string\n\n}\n", "file_path": "src/api.rs", "rank": 46, "score": 134621.05665096143 }, { "content": "pub fn create_datetime_string(timestamp: u64) -> String {\n\n let d = UNIX_EPOCH + Duration::from_millis(timestamp);\n\n let datetime = DateTime::<Utc>::from(d);\n\n datetime.format(\"%Y-%m-%d\").to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_create_datatime_string() {\n\n assert_eq!(create_datetime_string(1576472814620), \"2019-12-16\");\n\n }\n\n}\n", "file_path": "src/ui/util.rs", "rank": 47, "score": 129030.56616053965 }, { "content": "pub fn create_artist_string(artists: &[Artist]) -> String {\n\n artists\n\n .iter()\n\n .map(|artist| artist.name.to_string())\n\n .collect::<Vec<String>>()\n\n .join(\"/ \")\n\n}\n\n\n", "file_path": "src/ui/util.rs", "rank": 48, "score": 129030.56616053965 }, { "content": "pub fn draw_help_menu<B>(f: &mut Frame<B>)\n\nwhere\n\n B: Backend,\n\n{\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .constraints([Constraint::Percentage(100)].as_ref())\n\n .margin(2)\n\n .split(f.size());\n\n\n\n let white = Style::default().fg(Color::White);\n\n let gray = Style::default().fg(Color::White);\n\n let header = vec![\"Description\", \"Event\", \"Context\"];\n\n\n\n let help_doc: Vec<[&str; 3]> = vec![\n\n [\"Increase volume\", \"+\", \"General\"],\n\n [\"Decrease volume\", \"-\", \"General\"],\n\n [\"Skip to next track\", \"n\", \"General\"],\n\n [\"Skip to previous track\", \"p\", \"General\"],\n\n [\"Toggle repeat mode\", \"r\", \"General\"],\n", "file_path": "src/ui/mod.rs", "rank": 49, "score": 125906.34190655084 }, { "content": "pub fn millis_to_minutes(millis: u64) -> String {\n\n let minutes = millis / 60000;\n\n let seconds = (millis % 60000) / 1000;\n\n let seconds_display = if seconds < 10 {\n\n format!(\"0{}\", seconds)\n\n } else {\n\n format!(\"{}\", seconds)\n\n };\n\n\n\n if seconds == 60 {\n\n format!(\"{}:00\", minutes + 1)\n\n } else {\n\n format!(\"{}:{}\", minutes, seconds_display)\n\n }\n\n}\n\n\n", "file_path": "src/ui/util.rs", "rank": 50, "score": 122831.78072262286 }, { "content": "#[cfg(feature = \"dbus_mpris\")]\n\npub fn dbus_mpris_server(tx: Sender<PlayerCommand>) -> Result<(), Box<dyn Error>> {\n\n // Let's start by starting up a connection to the session bus and request a name.\n\n let mut c = Connection::new_session()?;\n\n c.request_name(\"org.mpris.MediaPlayer2.ncmt\", false, true, false)?;\n\n\n\n // The choice of factory tells us what type of tree we want,\n\n // and if we want any extra data inside. We pick the simplest variant.\n\n let f = Factory::new_fnmut::<()>();\n\n let tx = Arc::new(tx);\n\n\n\n let method_raise = f.method(\"Raise\", (), move |m| {\n\n let mret = m.msg.method_return();\n\n Ok(vec![mret])\n\n });\n\n\n\n let method_quit = {\n\n // let local_tx = tx.clone();\n\n f.method(\"Quit\", (), move |m| {\n\n // local_spirc.shutdown();\n\n let mret = m.msg.method_return();\n", "file_path": "src/dbus_mpris.rs", "rank": 51, "score": 117849.76049918678 }, { "content": "// display track progress for progress bar\n\npub fn display_track_progress(progress: u64, track_duration: u64) -> String {\n\n let duration = millis_to_minutes(u64::from(track_duration));\n\n let progress_display = millis_to_minutes(progress);\n\n let remaining = millis_to_minutes(u64::from(track_duration) - progress);\n\n\n\n format!(\"{}/{} (-{})\", progress_display, duration, remaining,)\n\n}\n\n\n", "file_path": "src/ui/util.rs", "rank": 52, "score": 106502.85928893159 }, { "content": "fn send_msg(tx: Sender<String>) {\n\n tx.send(\"ok\".to_owned()).expect(\"send error\");\n\n}\n", "file_path": "src/player/fetch.rs", "rank": 53, "score": 103950.88003828809 }, { "content": "pub fn up_event(key: KeyEvent) -> bool {\n\n if key.modifiers == KeyModifiers::NONE {\n\n match key.code {\n\n KeyCode::Up | KeyCode::Char('k') => true,\n\n _ => false,\n\n }\n\n } else if key.modifiers == KeyModifiers::CONTROL {\n\n key.code == KeyCode::Char('p')\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 54, "score": 89320.41049406292 }, { "content": "pub fn down_event(key: KeyEvent) -> bool {\n\n if key.modifiers == KeyModifiers::NONE {\n\n match key.code {\n\n KeyCode::Down | KeyCode::Char('j') => true,\n\n _ => false,\n\n }\n\n } else if key.modifiers == KeyModifiers::CONTROL {\n\n key.code == KeyCode::Char('n')\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 55, "score": 89320.41049406292 }, { "content": "pub fn on_down_press_handler<T>(selection_data: &[T], selection_index: Option<usize>) -> usize {\n\n match selection_index {\n\n Some(selection_index) => {\n\n if !selection_data.is_empty() {\n\n let next_index = selection_index + 1;\n\n if next_index > selection_data.len() - 1 {\n\n return 0;\n\n } else {\n\n return next_index;\n\n }\n\n }\n\n 0\n\n }\n\n None => 0,\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 56, "score": 87932.94645093216 }, { "content": "pub fn on_up_press_handler<T>(selection_data: &[T], selection_index: Option<usize>) -> usize {\n\n match selection_index {\n\n Some(selection_index) => {\n\n if !selection_data.is_empty() {\n\n if selection_index > 0 {\n\n return selection_index - 1;\n\n } else {\n\n return selection_data.len() - 1;\n\n }\n\n }\n\n 0\n\n }\n\n None => 0,\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 57, "score": 87932.94645093216 }, { "content": "fn draw_table<B>(\n\n f: &mut Frame<B>,\n\n app: &App,\n\n layout_chunk: Rect,\n\n table_layout: (&str, &[TableHeader]), // (title, header colums)\n\n items: &[TableItem], // The nested vector must have the same length as the `header_columns`\n\n selected_index: usize,\n\n highlight_state: (bool, bool),\n\n) where\n\n B: Backend,\n\n{\n\n let selected_style = get_color(highlight_state).add_modifier(Modifier::BOLD);\n\n\n\n // caculate index and row\n\n let interval = (layout_chunk.height / 2) as usize;\n\n let (row_items, margin) = if !items.is_empty() {\n\n let count = (layout_chunk.height - 4) as usize;\n\n let total = items.len();\n\n if selected_index >= count - interval && total > count {\n\n if selected_index >= total - interval {\n", "file_path": "src/ui/mod.rs", "rank": 58, "score": 87145.74329868656 }, { "content": "pub fn right_event(key: KeyEvent) -> bool {\n\n if key.modifiers == KeyModifiers::NONE {\n\n match key.code {\n\n KeyCode::Right | KeyCode::Char('l') => true,\n\n _ => false,\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 59, "score": 86757.79266169896 }, { "content": "pub fn left_event(key: KeyEvent) -> bool {\n\n if key.modifiers == KeyModifiers::NONE {\n\n match key.code {\n\n KeyCode::Left | KeyCode::Char('h') => true,\n\n _ => false,\n\n }\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/handlers/common_events.rs", "rank": 60, "score": 86757.79266169896 }, { "content": "// draw selectable list\n\nfn draw_selectable_list<B, S>(\n\n f: &mut Frame<B>,\n\n layout_chunk: Rect,\n\n title: &str,\n\n items: &[S],\n\n highlight_state: (bool, bool),\n\n selected_index: Option<usize>,\n\n) where\n\n B: Backend,\n\n S: std::convert::AsRef<str>,\n\n{\n\n let list =\n\n List::new::<Vec<ListItem>>(items.iter().map(|v| ListItem::new(v.as_ref())).collect())\n\n .block(\n\n Block::default()\n\n .title(Spans::from(Span::styled(title, get_color(highlight_state))))\n\n .borders(Borders::ALL)\n\n .border_style(get_color(highlight_state)),\n\n )\n\n .style(Style::default().fg(Color::White))\n\n .highlight_style(get_color(highlight_state).add_modifier(Modifier::BOLD));\n\n let mut state = ListState::default();\n\n state.select(selected_index);\n\n f.render_stateful_widget(list, layout_chunk, &mut state)\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 61, "score": 79762.75170336138 }, { "content": "#[inline(always)]\n\nfn bres_circle(painter: &mut Painter, color: Color, cx: isize, cy: isize, radius: isize) {\n\n let mut x = 0;\n\n let mut y = radius;\n\n let mut d = 3 - 2 * radius as isize;\n\n draw_circle(painter, color, cx, cy, x, y);\n\n while y >= x {\n\n x += 1;\n\n if d > 0 {\n\n y -= 1;\n\n d = d + 4 * (x - y) + 10;\n\n } else {\n\n d = d + 4 * x + 6;\n\n }\n\n draw_circle(painter, color, cx, cy, x, y)\n\n }\n\n}\n\n\n\nimpl<'a> Shape for Circle {\n\n fn draw(&self, painter: &mut Painter) {\n\n bres_circle(painter, self.color, self.x, self.y, self.radius)\n\n }\n\n}\n", "file_path": "src/ui/circle.rs", "rank": 62, "score": 71512.0559282132 }, { "content": "pub fn get_color((is_active, is_hovered): (bool, bool)) -> Style {\n\n match (is_active, is_hovered) {\n\n (true, _) => Style::default().fg(Color::LightCyan),\n\n (false, true) => Style::default().fg(Color::Magenta),\n\n _ => Style::default().fg(Color::Gray),\n\n }\n\n}\n\n\n", "file_path": "src/ui/util.rs", "rank": 63, "score": 71032.89277320802 }, { "content": "#[inline(always)]\n\nfn draw_circle(painter: &mut Painter, color: Color, cx: isize, cy: isize, x: isize, y: isize) {\n\n painter.paint(max(cx + x, 0) as usize, max(cy + y, 0) as usize, color);\n\n painter.paint(max(cx - x, 0) as usize, max(cy + y, 0) as usize, color);\n\n painter.paint(max(cx + x, 0) as usize, max(cy - y, 0) as usize, color);\n\n painter.paint(max(cx - x, 0) as usize, max(cy - y, 0) as usize, color);\n\n painter.paint(max(cx + y, 0) as usize, max(cy + x, 0) as usize, color);\n\n painter.paint(max(cx - y, 0) as usize, max(cy + x, 0) as usize, color);\n\n painter.paint(max(cx + y, 0) as usize, max(cy - x, 0) as usize, color);\n\n painter.paint(max(cx - y, 0) as usize, max(cy - x, 0) as usize, color);\n\n}\n\n\n", "file_path": "src/ui/circle.rs", "rank": 64, "score": 70298.31208865183 }, { "content": "// `percentage` param needs to be between 0 and 1\n\npub fn get_percentage_width(width: u16, percentage: f32) -> u16 {\n\n let padding = 3;\n\n let width = width - padding;\n\n (f32::from(width) * percentage) as u16\n\n}\n\n\n", "file_path": "src/ui/util.rs", "rank": 65, "score": 68598.56225904041 }, { "content": "// list ui struct\n\nstruct ListUI {\n\n selected_index: usize,\n\n items: Vec<TableItem>,\n\n title: String,\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 66, "score": 65346.69501854062 }, { "content": " loop {\n\n tx.send(Event::Tick).unwrap();\n\n thread::sleep(config.tick_rate);\n\n }\n\n })\n\n };\n\n Events { rx }\n\n }\n\n\n\n pub fn next(&self) -> Result<Event<KeyEvent>, mpsc::RecvError> {\n\n self.rx.recv()\n\n }\n\n}\n", "file_path": "src/util/event.rs", "rank": 67, "score": 64629.75034093431 }, { "content": "use std::sync::mpsc;\n\nuse std::thread;\n\nuse std::time::Duration;\n\n\n\nuse crossterm::event::{read, Event as TermEvent, KeyCode, KeyEvent, KeyModifiers};\n\n\n\npub enum Event<I> {\n\n Input(I),\n\n Tick,\n\n}\n\n\n\n/// A small event handler that wrap termion input and tick events. Each event\n\n/// type is handled in its own thread and returned to a common `Receiver`\n\npub struct Events {\n\n rx: mpsc::Receiver<Event<KeyEvent>>,\n\n // input_handle: thread::JoinHandle<()>,\n\n // tick_handle: thread::JoinHandle<()>,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n", "file_path": "src/util/event.rs", "rank": 68, "score": 64623.05458586596 }, { "content": " let (tx, rx) = mpsc::channel();\n\n let _input_handle = {\n\n let tx = tx.clone();\n\n thread::spawn(move || loop {\n\n if let Ok(k) = read() {\n\n if let TermEvent::Key(ke) = k {\n\n if let Err(_) = tx.send(Event::Input(ke)) {\n\n return;\n\n }\n\n if ke == config.exit_key {\n\n return;\n\n }\n\n }\n\n }\n\n })\n\n };\n\n let _tick_handle = {\n\n let tx = tx.clone();\n\n thread::spawn(move || {\n\n let tx = tx.clone();\n", "file_path": "src/util/event.rs", "rank": 69, "score": 64621.416302341335 }, { "content": "pub struct Config {\n\n pub exit_key: KeyEvent,\n\n pub tick_rate: Duration,\n\n}\n\n\n\nimpl Default for Config {\n\n fn default() -> Config {\n\n Config {\n\n exit_key: KeyEvent::new(KeyCode::Char('c'), KeyModifiers::CONTROL),\n\n tick_rate: Duration::from_millis(1000),\n\n }\n\n }\n\n}\n\n\n\nimpl Events {\n\n pub fn new() -> Events {\n\n Events::with_config(Config::default())\n\n }\n\n\n\n pub fn with_config(config: Config) -> Events {\n", "file_path": "src/util/event.rs", "rank": 70, "score": 64620.02873753165 }, { "content": "use super::super::app::App;\n\nuse super::common_events;\n\nuse crossterm::event::{KeyCode, KeyEvent, KeyModifiers};\n\n\n", "file_path": "src/handlers/home.rs", "rank": 71, "score": 63640.539815357966 }, { "content": "extern crate chrono;\n\nuse super::super::model::artist::Artist;\n\nuse chrono::prelude::DateTime;\n\nuse chrono::Utc;\n\nuse std::time::{Duration, UNIX_EPOCH};\n\nuse tui::style::{Color, Style};\n\n\n", "file_path": "src/ui/util.rs", "rank": 72, "score": 63629.665181993114 }, { "content": "extern crate num_bigint;\n\npub mod event;\n\nuse num_bigint::BigUint;\n\nuse rand::distributions::Alphanumeric;\n\nuse rand::{thread_rng, Rng};\n\nuse serde::Serialize;\n\n// use serde_json::Value;\n\n// use serde_json::map::Map;\n\nuse openssl::{\n\n hash::{hash, MessageDigest},\n\n symm::{encrypt, Cipher},\n\n};\n\nuse serde_urlencoded;\n\n\n\nstatic MODULUS:&str = \"00e0b509f6259df8642dbc35662901477df22677ec152b5ff68ace615bb7b725152b3ab17a876aea8a5aa76d2e417629ec4ee341f56135fccf695280104e0312ecbda92557c93870114af6c9d05c4f7f0c3685b7a46bee255932575cce10b424d813cfe4875d3e82047b97ddef52741d546b8e289dc6935b3ece0462db0a22b8e7\";\n\nstatic NONCE: &str = \"0CoJUm6Qyw8W8jud\";\n\nstatic PUBKEY: &str = \"010001\";\n\n\n\npub struct Encrypt;\n\n\n", "file_path": "src/util/mod.rs", "rank": 73, "score": 63486.751081469 }, { "content": "#[allow(unused)]\n\nimpl Encrypt {\n\n pub fn encrypt_id(id: String) -> String {\n\n let magic = b\"3go8&$8*3*3h0k(2)2\";\n\n let magic_len = magic.len();\n\n let id = id;\n\n let mut song_id = id.clone().into_bytes();\n\n id.as_bytes().iter().enumerate().for_each(|(i, sid)| {\n\n song_id[i] = *sid ^ magic[i % magic_len];\n\n });\n\n let result = hash(MessageDigest::md5(), &song_id).unwrap();\n\n base64::encode_config(&hex::encode(result), base64::URL_SAFE)\n\n .replace(\"/\", \"_\")\n\n .replace(\"+\", \"-\")\n\n }\n\n\n\n pub fn encrypt_login(text: impl Serialize + std::fmt::Debug) -> String {\n\n let data = serde_json::to_string(&text).unwrap();\n\n let secret = Self.create_key(16);\n\n let secret = \"e0e80547fa3ecd5a\".to_owned();\n", "file_path": "src/util/mod.rs", "rank": 74, "score": 63486.182486726335 }, { "content": " let params = Self.aes(Self.aes(data, NONCE), &secret);\n\n #[allow(non_snake_case)]\n\n let encSecKey = Self.rsa(secret);\n\n // let mut res = Map::new();\n\n // res.insert(\"params\".to_owned(), params.into());\n\n // res.insert(\"encSecKey\".to_owned(), encSecKey.into());\n\n // Value::Object(res)\n\n let meal = &[(\"params\", params), (\"encSecKey\", encSecKey)];\n\n serde_urlencoded::to_string(&meal).unwrap_or(\"\".to_owned())\n\n }\n\n\n\n fn aes(&self, text: String, key: &str) -> String {\n\n let pad = 16 - text.len() % 16;\n\n let p = pad as u8 as char;\n\n let mut text = text;\n\n for _ in 0..pad {\n\n text.push(p);\n\n }\n\n let text = text.as_bytes();\n\n let cipher = Cipher::aes_128_cbc();\n", "file_path": "src/util/mod.rs", "rank": 75, "score": 63481.43774911312 }, { "content": " let ciphertext = encrypt(cipher, key.as_bytes(), Some(b\"0102030405060708\"), text).unwrap();\n\n base64::encode(&ciphertext)\n\n }\n\n\n\n fn rsa(&self, text: String) -> String {\n\n let text = text.chars().rev().collect::<String>();\n\n let text = BigUint::parse_bytes(hex::encode(text).as_bytes(), 16).unwrap();\n\n let pubkey = BigUint::parse_bytes(PUBKEY.as_bytes(), 16).unwrap();\n\n let modulus = BigUint::parse_bytes(MODULUS.as_bytes(), 16).unwrap();\n\n let pow = text.modpow(&pubkey, &modulus);\n\n pow.to_str_radix(16)\n\n }\n\n\n\n fn create_key(&self, len: usize) -> String {\n\n return hex::encode(\n\n thread_rng()\n\n .sample_iter(&Alphanumeric)\n\n .take(len)\n\n .collect::<String>(),\n\n )[..16]\n\n .to_string();\n\n }\n\n}\n", "file_path": "src/util/mod.rs", "rank": 76, "score": 63480.08481233725 }, { "content": "pub mod album;\n\npub mod artist;\n\npub mod dj;\n\npub mod lyric;\n\npub mod playlist;\n\npub mod search;\n\npub mod song;\n\npub mod user;\n", "file_path": "src/model/mod.rs", "rank": 77, "score": 63426.54696442229 }, { "content": "extern crate rodio;\n\nextern crate tempfile;\n\nextern crate tokio;\n\n\n\nmod fetch;\n\nmod player;\n\n// mod sink;\n\n// mod range_set;\n\nmod track;\n\n// mod fetch_data;\n\n\n\nuse player::Player;\n\n// use sink::find;\n\nuse std::sync::mpsc::Sender;\n\n\n\n#[allow(unused)]\n\npub enum PlayerCommand {\n\n Play,\n\n Pause,\n\n Stop,\n", "file_path": "src/player/mod.rs", "rank": 78, "score": 63261.949612270444 }, { "content": " pub player: player::Player,\n\n}\n\n\n\nimpl Nplayer {\n\n pub fn new() -> Nplayer {\n\n // let backend = find(None).unwrap();\n\n let mplayer = Player::new();\n\n debug!(\"init player\");\n\n Nplayer { player: mplayer }\n\n }\n\n\n\n pub fn play_url(&mut self, url: &str) {\n\n self.player.load(url.to_owned(), true);\n\n }\n\n\n\n pub fn is_playing(&mut self) -> bool {\n\n self.player.status()\n\n }\n\n\n\n pub fn pause(&mut self) {\n", "file_path": "src/player/mod.rs", "rank": 79, "score": 63259.26924633462 }, { "content": " match self.player.current.clone() {\n\n Some(current) => Some(current.duration.as_millis() as u64),\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn seek_forwards(&mut self) {\n\n // let next_duration = self.get_position().unwrap() + 3000;\n\n // self.player.seek(ClockTime::from_mseconds(next_duration))\n\n }\n\n\n\n pub fn seek_backwards(&mut self) {\n\n // let song_progress_ms = self.get_position().unwrap();\n\n // let next_duration = if song_progress_ms < 3000 {\n\n // 0\n\n // } else {\n\n // song_progress_ms - 3000\n\n // };\n\n // self.player.seek(ClockTime::from_mseconds(next_duration))\n\n }\n", "file_path": "src/player/mod.rs", "rank": 80, "score": 63251.27881969172 }, { "content": " self.player.pause()\n\n }\n\n\n\n pub fn play(&mut self) {\n\n self.player.play()\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn stop(&self) {\n\n self.player.stop()\n\n }\n\n\n\n pub fn get_position(&self) -> Option<u64> {\n\n match self.player.current.clone() {\n\n Some(current) => Some(current.elapsed().as_millis() as u64),\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn get_duration(&self) -> Option<u64> {\n", "file_path": "src/player/mod.rs", "rank": 81, "score": 63249.368387445524 }, { "content": " self.player.set_volume(volume);\n\n }\n\n\n\n pub fn decrease_volume(&mut self) {\n\n let current = self.player.get_volume();\n\n let volume = if current > 0.1 {\n\n current - 0.1_f32\n\n } else {\n\n 0.0_f32\n\n };\n\n self.player.set_volume(volume);\n\n }\n\n}\n", "file_path": "src/player/mod.rs", "rank": 82, "score": 63247.92304474593 }, { "content": "\n\n #[allow(unused)]\n\n pub fn seek(&mut self, offset: i32) {\n\n let next_duration = self.get_position().unwrap() as i32 + (offset * 1000);\n\n // self.player\n\n // .seek(ClockTime::from_mseconds(next_duration as u64))\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn position(&mut self, position: u64) {\n\n // self.player.seek(ClockTime::from_mseconds(position))\n\n }\n\n\n\n pub fn increase_volume(&mut self) {\n\n let current = self.player.get_volume();\n\n let volume = if current < 9.9 {\n\n current + 0.1_f32\n\n } else {\n\n 10.0_f32\n\n };\n", "file_path": "src/player/mod.rs", "rank": 83, "score": 63246.07047346485 }, { "content": " PlayPause,\n\n Seek(i32),\n\n Next,\n\n Previous,\n\n Load(String),\n\n Position(i32, u64),\n\n Metadata(MetaInfo, Sender<String>),\n\n}\n\n\n\n#[allow(unused)]\n\npub enum MetaInfo {\n\n Volume,\n\n Shuffle,\n\n Position,\n\n LoopStatus,\n\n Status,\n\n Info,\n\n}\n\n\n\npub struct Nplayer {\n", "file_path": "src/player/mod.rs", "rank": 84, "score": 63242.840610308696 }, { "content": " None => vec![],\n\n };\n\n let artists = match &app.search_results.artists {\n\n Some(r) => r.iter().map(|item| item.to_owned().name).collect(),\n\n None => vec![],\n\n };\n\n let albums = match &app.search_results.albums {\n\n Some(r) => r\n\n .iter()\n\n .map(|item| {\n\n format!(\n\n \"{} - {}\",\n\n item.name.to_owned().unwrap(),\n\n create_artist_string(&[item.artist.to_owned().unwrap()])\n\n )\n\n })\n\n .collect(),\n\n None => vec![],\n\n };\n\n let djradios = match &app.search_results.djradios {\n", "file_path": "src/ui/mod.rs", "rank": 85, "score": 62227.735120787635 }, { "content": "pub mod circle;\n\nmod util;\n\n\n\nuse super::app::{ActiveBlock, App, RepeatState, RouteId, RECOMMEND_OPTIONS};\n\nuse tui::backend::Backend;\n\nuse tui::layout::{Alignment, Constraint, Direction, Layout, Rect};\n\nuse tui::style::{Color, Modifier, Style};\n\nuse tui::text::{Span, Spans, Text};\n\nuse tui::widgets::{canvas::Canvas, Block, Borders, Gauge, Paragraph, Row, Table, Tabs};\n\nuse tui::widgets::{List, ListItem, ListState, Wrap};\n\nuse tui::Frame;\n\nuse util::{\n\n create_artist_string, create_datetime_string, create_tag_string, display_track_progress,\n\n get_color, get_percentage_width,\n\n};\n\n\n\n// table item for render\n\n#[derive(Clone, Debug)]\n\npub struct TableItem {\n\n id: String,\n\n format: Vec<String>,\n\n}\n\n\n\npub struct TableHeader<'a> {\n\n text: &'a str,\n\n width: u16,\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 86, "score": 62227.61710201916 }, { "content": " text: \"Artist\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n ];\n\n\n\n let mut num = match app.album_list.to_owned() {\n\n Some(albumlist) => albumlist.selected_page * (app.block_height - 4),\n\n None => 0,\n\n };\n\n\n\n let album_ui = match &app.album_list {\n\n Some(album_list) => Some(ListUI {\n\n items: album_list\n\n .albums\n\n .iter()\n\n .map(|item| {\n\n num += 1;\n\n TableItem {\n\n id: item.id.clone().unwrap_or_else(|| 0).to_string(),\n\n format: vec![\n", "file_path": "src/ui/mod.rs", "rank": 87, "score": 62225.80280066168 }, { "content": " format!(\n\n \"{} - {}\",\n\n item.name.to_owned().unwrap(),\n\n create_artist_string(&item.artists.to_owned().unwrap())\n\n )\n\n })\n\n .collect(),\n\n None => vec![],\n\n };\n\n let playlists = match &app.search_results.playlists {\n\n Some(r) => r\n\n .iter()\n\n .map(|item| {\n\n format!(\n\n \"{} - {}\",\n\n item.name.to_owned().unwrap(),\n\n item.creator.to_owned().unwrap().nickname.unwrap()\n\n )\n\n })\n\n .collect(),\n", "file_path": "src/ui/mod.rs", "rank": 88, "score": 62224.59954331571 }, { "content": " text: \"Artist\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n ];\n\n\n\n let mut num = 0;\n\n let album_ui = match &app.artist_albums {\n\n Some(album_list) => Some(ListUI {\n\n items: album_list\n\n .albums\n\n .iter()\n\n .map(|item| {\n\n num += 1;\n\n TableItem {\n\n id: item.id.clone().unwrap_or_else(|| 0).to_string(),\n\n format: vec![\n\n num.to_string(),\n\n item.to_owned().name.unwrap().to_string(),\n\n item.artist.to_owned().unwrap().name.to_string(),\n\n ],\n", "file_path": "src/ui/mod.rs", "rank": 89, "score": 62224.59710989433 }, { "content": "\n\n let mut num = match app.artist_list.to_owned() {\n\n Some(artistlist) => artistlist.selected_page * (app.block_height - 4),\n\n None => 0,\n\n };\n\n\n\n let artist_ui = match &app.artist_list {\n\n Some(artist_list) => Some(ListUI {\n\n items: artist_list\n\n .artists\n\n .iter()\n\n .map(|item| {\n\n num += 1;\n\n TableItem {\n\n id: item.id.to_string(),\n\n format: vec![num.to_string(), item.to_owned().name],\n\n }\n\n })\n\n .collect::<Vec<TableItem>>(),\n\n title: format!(\"Discover Artists\",),\n", "file_path": "src/ui/mod.rs", "rank": 90, "score": 62224.54736380056 }, { "content": " text: \"listener Count\",\n\n width: get_percentage_width(layout_chunk.width, 0.2),\n\n },\n\n TableHeader {\n\n text: \"Date\",\n\n width: get_percentage_width(layout_chunk.width, 0.2),\n\n },\n\n ];\n\n\n\n let program_ui = match &app.program_list {\n\n Some(program_list) => Some(ListUI {\n\n items: program_list\n\n .dj_programs\n\n .iter()\n\n .map(|item| {\n\n let num_string = match &app.current_playing {\n\n Some(track) => {\n\n if item.mainSong.id.to_string() == track.id.unwrap().to_string() {\n\n format!(\"|> {}\", item.serialNum.to_string())\n\n } else {\n", "file_path": "src/ui/mod.rs", "rank": 91, "score": 62224.304359062786 }, { "content": "\n\n let mut num = 0;\n\n let album_ui = match &app.selected_album {\n\n Some(selected_album) => Some(ListUI {\n\n items: selected_album\n\n .tracks\n\n .iter()\n\n .map(|item| {\n\n num += 1;\n\n TableItem {\n\n id: item.id.clone().unwrap_or_else(|| 0).to_string(),\n\n format: vec![num.to_string(), item.to_owned().name.unwrap().to_string()],\n\n }\n\n })\n\n .collect::<Vec<TableItem>>(),\n\n title: format!(\n\n \"{} by {}\",\n\n selected_album.to_owned().album.name.unwrap(),\n\n create_artist_string(&[selected_album.to_owned().album.artist.unwrap()])\n\n ),\n", "file_path": "src/ui/mod.rs", "rank": 92, "score": 62224.27967687255 }, { "content": "\n\n let mut num = match app.djradio_list.to_owned() {\n\n Some(djradio_list) => djradio_list.selected_page * (app.block_height - 4),\n\n None => 0,\n\n };\n\n\n\n let djradio_ui = match &app.djradio_list {\n\n Some(djradio_list) => Some(ListUI {\n\n items: djradio_list\n\n .djradios\n\n .iter()\n\n .map(|item| {\n\n num += 1;\n\n TableItem {\n\n id: item.id.to_string(),\n\n format: vec![num.to_string(), item.to_owned().name],\n\n }\n\n })\n\n .collect::<Vec<TableItem>>(),\n\n title: format!(\"My Subscribe DjRadio\",),\n", "file_path": "src/ui/mod.rs", "rank": 93, "score": 62224.201507234975 }, { "content": " num.to_string(),\n\n item.to_owned().name.unwrap().to_string(),\n\n item.artist.to_owned().unwrap().name.to_string(),\n\n ],\n\n }\n\n })\n\n .collect::<Vec<TableItem>>(),\n\n title: format!(\"Discover Albums\",),\n\n selected_index: album_list.selected_index,\n\n }),\n\n None => None,\n\n };\n\n\n\n if let Some(album_ui) = album_ui {\n\n draw_table(\n\n f,\n\n &app,\n\n layout_chunk,\n\n (&album_ui.title, &header),\n\n &album_ui.items,\n\n album_ui.selected_index,\n\n highlight_state,\n\n );\n\n };\n\n}\n\n\n", "file_path": "src/ui/mod.rs", "rank": 94, "score": 62223.938170723944 }, { "content": " text: \"Count\",\n\n width: get_percentage_width(layout_chunk.width, 0.05),\n\n },\n\n TableHeader {\n\n text: \"Creator\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n TableHeader {\n\n text: \"Tags\",\n\n width: get_percentage_width(layout_chunk.width, 0.3),\n\n },\n\n ];\n\n\n\n let mut num = match app.playlist_list.to_owned() {\n\n Some(playlist) => playlist.selected_page * (app.block_height - 4),\n\n None => 0,\n\n };\n\n let playlist_ui = match &app.playlist_list {\n\n Some(playlist) => Some(ListUI {\n\n items: playlist\n", "file_path": "src/ui/mod.rs", "rank": 95, "score": 62221.36073425488 }, { "content": " RepeatState::Track => \"Track\",\n\n RepeatState::All => \"All\",\n\n RepeatState::Shuffle => \"Shuffle\",\n\n },\n\n };\n\n\n\n let title = format!(\"{} | Repeat: {}\", state_title, repeat_text);\n\n\n\n let current_route = app.get_current_route();\n\n let highlight_state = (\n\n current_route.active_block == ActiveBlock::PlayBar,\n\n current_route.hovered_block == ActiveBlock::PlayBar,\n\n );\n\n\n\n let (track_name, artist_name) = match &app.current_playing {\n\n Some(track) => (\n\n track.name.to_owned().unwrap(),\n\n match &track.artists {\n\n Some(artists) => create_artist_string(&artists),\n\n None => \"Unknown\".to_string(),\n", "file_path": "src/ui/mod.rs", "rank": 96, "score": 62219.90844305882 } ]
Rust
src/main.rs
samhamnam/conways_game_of_crabs
1c8afc822ea1c432c9c3ee281380d524ed71199a
use pixels::{Error, Pixels, SurfaceTexture}; use rand::prelude::*; use winit::{ dpi::LogicalSize, event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; fn main() -> Result<(), Error> { println!("Random start: {}", random::<i32>()); let width = 400; let height = 400; let event_loop = EventLoop::new(); let window = { let size = LogicalSize::new(width as f64, height as f64); WindowBuilder::new() .with_title("Crabs") .with_inner_size(size) .with_min_inner_size(size) .with_resizable(false) .build(&event_loop) .unwrap() }; let mut pixels = { let window_size = window.inner_size(); let surface_texture = SurfaceTexture::new(window_size.width, window_size.height, &window); Pixels::new(width, height, surface_texture)? }; let mut world = World::new([54, 139, 187, 255], [190, 25, 49, 255], width, height); event_loop.run(move |event, _, control_flow| { if let Event::RedrawRequested(_) = event { world.draw(pixels.get_frame()); if pixels .render() .map_err(|e| panic!("pixels.render() failed: {}", e)) .is_err() { *control_flow = ControlFlow::Exit; } } if let Event::WindowEvent { event: e, .. } = event { if e == WindowEvent::CloseRequested { *control_flow = ControlFlow::Exit } else if let WindowEvent::Resized(new_size) = e { println!("{:?}", new_size); pixels.resize_surface(new_size.width, new_size.height) } } window.request_redraw(); }); } pub struct World { clear_color: [u8; 4], width: u32, height: u32, crabs: Vec<bool>, crab_buffer: Vec<bool>, crab_color: [u8; 4], } impl World { pub fn new(clear_color: [u8; 4], crab_color: [u8; 4], width: u32, height: u32) -> Self { let w = width as usize; let h = height as usize; let mut crabs = vec![false; w * h]; crabs.iter_mut().for_each(|c| *c = random()); let crab_buffer = vec![false; w * h]; Self { clear_color, crab_color, width, height, crabs, crab_buffer, } } pub fn get_crab(&self, x: usize, y: usize) -> bool { self.crabs[x + (self.height as usize * y)] } pub fn set_crab(&mut self, val: bool, x: usize, y: usize) { self.crabs[x + (self.height as usize * y)] = val; } pub fn get_crabs_siblings(&self, x: usize, y: usize) -> impl Iterator<Item = bool> { if x == 0 || y == 0 || y >= self.height as usize - 1 || x >= self.width as usize - 1 { vec![].into_iter() } else { let c = &self.crabs; #[allow(clippy::identity_op)] vec![ c[(x - 1) + (self.height as usize * (y - 1))], c[(x + 0) + (self.height as usize * (y - 1))], c[(x + 1) + (self.height as usize * (y - 1))], c[(x - 1) + (self.height as usize * (y + 0))], c[(x + 1) + (self.height as usize * (y + 0))], c[(x - 1) + (self.height as usize * (y + 1))], c[(x + 0) + (self.height as usize * (y + 1))], c[(x + 1) + (self.height as usize * (y + 1))], ] .into_iter() } } pub fn should_crab_live(&self, x: usize, y: usize) -> bool { let alive = self.get_crabs_siblings(x, y).filter(|c| *c).count(); if self.get_crab(x, y) { alive == 2 || alive == 3 } else { alive == 3 } } pub fn draw(&mut self, frame: &mut [u8]) { for i in 0..self.crabs.len() { let x = (i % self.width as usize) as usize; let y = (i / self.width as usize) as usize; let should_live = self.should_crab_live(x, y); self.crab_buffer[i] = should_live; let pixel = frame.chunks_exact_mut(4).nth(i).unwrap(); if should_live { pixel.copy_from_slice(&self.crab_color); } else { pixel.copy_from_slice(&self.clear_color); } } self.crabs.copy_from_slice(&self.crab_buffer); } }
use pixels::{Error, Pixels, SurfaceTexture}; use rand::prelude::*; use winit::{ dpi::LogicalSize, event::{Event, WindowEvent}, event_loop::{ControlFlow, EventLoop}, window::WindowBuilder, }; fn main() -> Result<(), Error> { println!("Random start: {}", random::<i32>()); let width = 400; let height = 400; let event_loop = EventLoop::new(); let window = { let size = LogicalSize::new(width as f64, height as f64); WindowBuilder::new() .with_title("Crab
pub struct World { clear_color: [u8; 4], width: u32, height: u32, crabs: Vec<bool>, crab_buffer: Vec<bool>, crab_color: [u8; 4], } impl World { pub fn new(clear_color: [u8; 4], crab_color: [u8; 4], width: u32, height: u32) -> Self { let w = width as usize; let h = height as usize; let mut crabs = vec![false; w * h]; crabs.iter_mut().for_each(|c| *c = random()); let crab_buffer = vec![false; w * h]; Self { clear_color, crab_color, width, height, crabs, crab_buffer, } } pub fn get_crab(&self, x: usize, y: usize) -> bool { self.crabs[x + (self.height as usize * y)] } pub fn set_crab(&mut self, val: bool, x: usize, y: usize) { self.crabs[x + (self.height as usize * y)] = val; } pub fn get_crabs_siblings(&self, x: usize, y: usize) -> impl Iterator<Item = bool> { if x == 0 || y == 0 || y >= self.height as usize - 1 || x >= self.width as usize - 1 { vec![].into_iter() } else { let c = &self.crabs; #[allow(clippy::identity_op)] vec![ c[(x - 1) + (self.height as usize * (y - 1))], c[(x + 0) + (self.height as usize * (y - 1))], c[(x + 1) + (self.height as usize * (y - 1))], c[(x - 1) + (self.height as usize * (y + 0))], c[(x + 1) + (self.height as usize * (y + 0))], c[(x - 1) + (self.height as usize * (y + 1))], c[(x + 0) + (self.height as usize * (y + 1))], c[(x + 1) + (self.height as usize * (y + 1))], ] .into_iter() } } pub fn should_crab_live(&self, x: usize, y: usize) -> bool { let alive = self.get_crabs_siblings(x, y).filter(|c| *c).count(); if self.get_crab(x, y) { alive == 2 || alive == 3 } else { alive == 3 } } pub fn draw(&mut self, frame: &mut [u8]) { for i in 0..self.crabs.len() { let x = (i % self.width as usize) as usize; let y = (i / self.width as usize) as usize; let should_live = self.should_crab_live(x, y); self.crab_buffer[i] = should_live; let pixel = frame.chunks_exact_mut(4).nth(i).unwrap(); if should_live { pixel.copy_from_slice(&self.crab_color); } else { pixel.copy_from_slice(&self.clear_color); } } self.crabs.copy_from_slice(&self.crab_buffer); } }
s") .with_inner_size(size) .with_min_inner_size(size) .with_resizable(false) .build(&event_loop) .unwrap() }; let mut pixels = { let window_size = window.inner_size(); let surface_texture = SurfaceTexture::new(window_size.width, window_size.height, &window); Pixels::new(width, height, surface_texture)? }; let mut world = World::new([54, 139, 187, 255], [190, 25, 49, 255], width, height); event_loop.run(move |event, _, control_flow| { if let Event::RedrawRequested(_) = event { world.draw(pixels.get_frame()); if pixels .render() .map_err(|e| panic!("pixels.render() failed: {}", e)) .is_err() { *control_flow = ControlFlow::Exit; } } if let Event::WindowEvent { event: e, .. } = event { if e == WindowEvent::CloseRequested { *control_flow = ControlFlow::Exit } else if let WindowEvent::Resized(new_size) = e { println!("{:?}", new_size); pixels.resize_surface(new_size.width, new_size.height) } } window.request_redraw(); }); }
function_block-function_prefixed
[]
Rust
xngin-compute/src/cmp.rs
jiangzhe/xngin
57f3e2070a3bf52dbfda28750038302e330eca12
use crate::error::{Error, Result}; use crate::BinaryEval; use xngin_datatype::PreciseType; use xngin_expr::PredFuncKind; use xngin_storage::attr::Attr; use xngin_storage::bitmap::Bitmap; use xngin_storage::codec::{Codec, Single}; use xngin_storage::repr::ByteRepr; use xngin_storage::sel::Sel; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum CmpKind { Equal, Greater, GreaterEqual, Less, LessEqual, NotEqual, } impl CmpKind { #[inline] pub fn from_pred(kind: PredFuncKind) -> Option<Self> { let res = match kind { PredFuncKind::Equal => CmpKind::Equal, PredFuncKind::Greater => CmpKind::Greater, PredFuncKind::GreaterEqual => CmpKind::GreaterEqual, PredFuncKind::Less => CmpKind::Less, PredFuncKind::LessEqual => CmpKind::LessEqual, PredFuncKind::NotEqual => CmpKind::NotEqual, _ => return None, }; Some(res) } #[inline] pub fn eval(&self, lhs: &Attr, rhs: &Attr, sel: Option<&Sel>) -> Result<Attr> { match (self, lhs.ty, rhs.ty) { (CmpKind::Equal, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(EqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Equal, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(EqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Greater, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(GreaterI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Greater, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(GreaterI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::GreaterEqual, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(GreaterEqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::GreaterEqual, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(GreaterEqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Less, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(LessI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Less, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(LessI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::LessEqual, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(LessEqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::LessEqual, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(LessEqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::NotEqual, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(NotEqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::NotEqual, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(NotEqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } _ => Err(Error::UnsupportedEval), } } } pub trait CmpEval { type L: ByteRepr + Copy; type R: ByteRepr + Copy; fn apply_bool(&self, lhs: Self::L, rhs: Self::R) -> bool; } struct Impl<T>(T); impl<T: CmpEval> Impl<T> { #[inline] fn apply_left_chunk(&self, lhs: &[T::L], rhs: T::R) -> u64 { let mut res = 0u64; let mut mask = 1; for l_val in lhs { let flag = self.0.apply_bool(*l_val, rhs); res |= if flag { mask } else { 0 }; mask <<= 1; } res } #[inline] fn apply_right_chunk(&self, lhs: T::L, rhs: &[T::R]) -> u64 { let mut res = 0u64; let mut mask = 1; for r_val in rhs { let flag = self.0.apply_bool(lhs, *r_val); res |= if flag { mask } else { 0 }; mask <<= 1; } res } #[inline] fn apply_chunk(&self, lhs: &[T::L], rhs: &[T::R]) -> u64 { let mut res = 0u64; let mut mask = 1; for (l_val, r_val) in lhs.iter().zip(rhs) { let flag = self.0.apply_bool(*l_val, *r_val); res |= if flag { mask } else { 0 }; mask <<= 1; } res } #[inline] fn single_single(&self, l_val: T::L, r_val: T::R, validity: Sel) -> Result<Attr> { let flag = self.0.apply_bool(l_val, r_val); Ok(Attr::new_single( PreciseType::bool(), Single::new_bool(flag, validity.n_records() as u16), validity, )) } #[inline] fn array_single( &self, l_vals: &[T::L], r_val: T::R, validity: Sel, sel: Option<&Sel>, ) -> Result<Attr> { if let Some(sel) = sel { match sel { Sel::None { .. } => { return Ok(Attr::new_null(PreciseType::bool(), l_vals.len() as u16)); } Sel::Index { count, indexes, .. } => { return handle_sel_index( &indexes[..*count as usize], &validity, l_vals.len(), |idx| self.0.apply_bool(l_vals[idx], r_val), ) } _ => (), } } let mut res = Bitmap::zeroes(l_vals.len()); let (res_u64s, _) = res.u64s_mut(); for (lhs, res) in l_vals.chunks(64).zip(res_u64s) { *res = self.apply_left_chunk(lhs, r_val); } Ok(Attr::new_bitmap(res, validity)) } #[inline] fn single_array( &self, l_val: T::L, r_vals: &[T::R], validity: Sel, sel: Option<&Sel>, ) -> Result<Attr> { if let Some(sel) = sel { match sel { Sel::None { .. } => { return Ok(Attr::new_null(PreciseType::bool(), r_vals.len() as u16)); } Sel::Index { count, indexes, .. } => { return handle_sel_index( &indexes[..*count as usize], &validity, r_vals.len(), |idx| self.0.apply_bool(l_val, r_vals[idx]), ) } _ => (), } } let mut res = Bitmap::zeroes(r_vals.len()); let (res_u64s, _) = res.u64s_mut(); for (rhs, res) in r_vals.chunks(64).zip(res_u64s) { *res = self.apply_right_chunk(l_val, rhs); } Ok(Attr::new_bitmap(res, validity)) } #[inline] fn array_array( &self, l_vals: &[T::L], r_vals: &[T::R], validity: Sel, sel: Option<&Sel>, ) -> Result<Attr> { assert!(l_vals.len() == r_vals.len()); if let Some(sel) = sel { match sel { Sel::None { .. } => { return Ok(Attr::new_null(PreciseType::bool(), l_vals.len() as u16)); } Sel::Index { count, indexes, .. } => { return handle_sel_index( &indexes[..*count as usize], &validity, l_vals.len(), |idx| self.0.apply_bool(l_vals[idx], r_vals[idx]), ) } _ => (), } } let mut res = Bitmap::zeroes(r_vals.len()); let (res_u64s, _) = res.u64s_mut(); for ((lhs, rhs), res) in l_vals.chunks(64).zip(r_vals.chunks(64)).zip(res_u64s) { *res = self.apply_chunk(lhs, rhs); } Ok(Attr::new_bitmap(res, validity)) } } #[inline] fn handle_sel_index<F: Fn(usize) -> bool>( sel: &[u16], validity: &Sel, len: usize, f: F, ) -> Result<Attr> { let mut res = Bitmap::zeroes(len); let mut valids = [0u16; 6]; let mut valid_count = 0; for idx in sel { let i = *idx as usize; if validity.selected(i)? { let flag = f(i); res.set(i, flag)?; valids[valid_count] = *idx; valid_count += 1; } } let res = if valid_count == 0 { Attr::new_null(PreciseType::bool(), len as u16) } else { let validity = Sel::Index { count: valid_count as u8, len: len as u16, indexes: valids, }; Attr::new_bitmap(res, validity) }; Ok(res) } impl<T: CmpEval> BinaryEval for Impl<T> { #[inline] fn binary_eval( &self, res_ty: PreciseType, lhs: &Attr, rhs: &Attr, sel: Option<&Sel>, ) -> Result<Attr> { debug_assert_eq!(PreciseType::bool(), res_ty); let n_records = lhs.n_records(); if n_records != rhs.n_records() { return Err(Error::RowNumberMismatch); } let validity = lhs.validity.intersect(&rhs.validity)?; if validity.is_none() { return Ok(Attr::new_null(res_ty, n_records as u16)); } match (&lhs.codec, &rhs.codec) { (Codec::Single(l), Codec::Single(r)) => { self.single_single(l.view(), r.view(), validity) } (Codec::Array(l), Codec::Single(r)) => { self.array_single(l.cast_slice(), r.view(), validity, sel) } (Codec::Single(l), Codec::Array(r)) => { self.single_array(l.view(), r.cast_slice(), validity, sel) } (Codec::Array(l), Codec::Array(r)) => { self.array_array(l.cast_slice(), r.cast_slice(), validity, sel) } (Codec::Empty, _) | (_, Codec::Empty) => Ok(Attr::empty(res_ty)), (Codec::Bitmap(_), _) | (_, Codec::Bitmap(_)) => Err(Error::UnsupportedEval), } } } macro_rules! impl_cmp_eval_for_iso_num { ($id:ident, $ty:ty, $op:tt) => { pub struct $id; impl CmpEval for $id { type L = $ty; type R = $ty; #[inline] fn apply_bool(&self, lhs: Self::L, rhs: Self::R) -> bool { lhs $op rhs } } } } impl_cmp_eval_for_iso_num!(EqualI32, i32, ==); impl_cmp_eval_for_iso_num!(GreaterI32, i32, >); impl_cmp_eval_for_iso_num!(GreaterEqualI32, i32, >=); impl_cmp_eval_for_iso_num!(LessI32, i32, <); impl_cmp_eval_for_iso_num!(LessEqualI32, i32, <=); impl_cmp_eval_for_iso_num!(NotEqualI32, i32, !=); impl_cmp_eval_for_iso_num!(EqualI64, i64, ==); impl_cmp_eval_for_iso_num!(GreaterI64, i64, >); impl_cmp_eval_for_iso_num!(GreaterEqualI64, i64, >=); impl_cmp_eval_for_iso_num!(LessI64, i64, <); impl_cmp_eval_for_iso_num!(LessEqualI64, i64, <=); impl_cmp_eval_for_iso_num!(NotEqualI64, i64, !=); #[cfg(test)] mod tests { use super::*; #[test] fn test_cmp_eval_eq() { let size = 10i32; let c1 = Attr::from((0..size).into_iter()); let c2 = Attr::from((0..size).into_iter()); let c3 = Attr::new_single( PreciseType::i32(), Single::new(0i32, size as u16), Sel::All(size as u16), ); let c4 = Attr::new_single( PreciseType::i32(), Single::new(0i32, size as u16), Sel::All(size as u16), ); let c5 = Attr::new_null(PreciseType::i32(), size as u16); let eq = CmpKind::Equal; let res = eq.eval(&c1, &c2, None).unwrap(); let res = res.codec.as_bitmap().unwrap(); assert!(res.bools().all(|b| b)); let res = eq.eval(&c3, &c4, None).unwrap(); assert!(res.validity.is_all()); let res = res.codec.as_single().unwrap(); let value = res.view_bool(); assert!(value); let res = eq.eval(&c1, &c3, None).unwrap(); let res = res.codec.as_bitmap().unwrap(); assert!(res.get(0).unwrap()); assert!(res.bools().skip(1).all(|b| !b)); let res = eq.eval(&c4, &c2, None).unwrap(); let res = res.codec.as_bitmap().unwrap(); assert!(res.get(0).unwrap()); assert!(res.bools().skip(1).all(|b| !b)); let res = eq.eval(&c1, &c5, None).unwrap(); assert!(res.validity.is_none()); let res = eq.eval(&c5, &c1, None).unwrap(); assert!(res.validity.is_none()); let res = eq.eval(&c3, &c5, None).unwrap(); assert!(res.validity.is_none()); } #[test] fn test_cmp_eval_gt() { let c1 = Attr::from((0..64i32).into_iter()); let c2 = Attr::from((0..64i32).into_iter().rev()); let c3 = Attr::new_single(PreciseType::i32(), Single::new(5i32, 64), Sel::All(64)); let c4 = Attr::new_single(PreciseType::i32(), Single::new(15i32, 64), Sel::All(64)); let gt = CmpKind::Greater; let res = gt .eval(&c1, &c2, Some(&Sel::new_indexes(64, vec![5, 50]))) .unwrap(); let (valid, _) = res.bool_at(0).unwrap(); assert!(!valid); let (valid, value) = res.bool_at(5).unwrap(); assert!(valid && !value); let (valid, value) = res.bool_at(50).unwrap(); assert!(valid && value); let res = gt.eval(&c1, &c2, Some(&Sel::None(64))).unwrap(); assert_eq!(PreciseType::bool(), res.ty); let res = gt .eval(&c1, &c3, Some(&Sel::new_indexes(64, vec![1, 50]))) .unwrap(); let (valid, value) = res.bool_at(1).unwrap(); assert!(valid && !value); let (valid, value) = res.bool_at(50).unwrap(); assert!(valid && value); let res = gt.eval(&c1, &c3, Some(&Sel::None(64))).unwrap(); assert_eq!(PreciseType::bool(), res.ty); let res = gt .eval(&c4, &c1, Some(&Sel::new_indexes(64, vec![1, 50]))) .unwrap(); let (valid, value) = res.bool_at(1).unwrap(); assert!(valid && value); let (valid, value) = res.bool_at(50).unwrap(); assert!(valid && !value); let res = gt.eval(&c4, &c1, Some(&Sel::None(64))).unwrap(); assert_eq!(PreciseType::bool(), res.ty); } }
use crate::error::{Error, Result}; use crate::BinaryEval; use xngin_datatype::PreciseType; use xngin_expr::PredFuncKind; use xngin_storage::attr::Attr; use xngin_storage::bitmap::Bitmap; use xngin_storage::codec::{Codec, Single}; use xngin_storage::repr::ByteRepr; use xngin_storage::sel::Sel; #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum CmpKind { Equal, Greater, GreaterEqual, Less, LessEqual, NotEqual, } impl CmpKind { #[inline] pub fn from_pred(kind: PredFuncKind) -> Option<Self> { let res = match kind { PredFuncKind::Equal => CmpKind::Equal, PredFuncKind::Greater => CmpKind::Greater, PredFuncKind::GreaterEqual => CmpKind::GreaterEqual, PredFuncKind::Less => CmpKind::Less, PredFuncKind::LessEqual => CmpKind::LessEqual, PredFuncKind::NotEqual => CmpKind::NotEqual, _ => return None, }; Some(res) } #[inline] pub fn eval(&self, lhs: &Attr, rhs: &Attr, sel: Option<&Sel>) -> Result<Attr> { match (self, lhs.ty, rhs.ty) { (CmpKind::Equal, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(EqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Equal, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(EqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Greater, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(GreaterI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Greater, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(GreaterI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::GreaterEqual, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(GreaterEqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::GreaterEqual, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(GreaterEqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Less, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(LessI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::Less, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(LessI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::LessEqual, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(LessEqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::LessEqual, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(LessEqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::NotEqual, PreciseType::Int(4, false), PreciseType::Int(4, false)) => { Impl(NotEqualI32).binary_eval(PreciseType::bool(), lhs, rhs, sel) } (CmpKind::NotEqual, PreciseType::Int(8, false), PreciseType::Int(8, false)) => { Impl(NotEqualI64).binary_eval(PreciseType::bool(), lhs, rhs, sel) } _ => Err(Error::UnsupportedEval), } } } pub trait CmpEval { type L: ByteRepr + Copy; type R: ByteRepr + Copy; fn apply_bool(&self, lhs: Self::L, rhs: Self::R) -> bool; } struct Impl<T>(T); impl<T: CmpEval> Impl<T> { #[inline] fn apply_left_chunk(&self, lhs: &[T::L], rhs: T::R) -> u64 { let mut res = 0u64; let mut mask = 1; for l_val in lhs { let flag = self.0.apply_bool(*l_val, rhs); res |= if flag { mask } else { 0 }; mask <<= 1; } res } #[inline] fn apply_right_chunk(&self, lhs: T::L, rhs: &[T::R]) -> u64 { let mut res = 0u64; let mut mask = 1; for r_val in rhs { let flag = self.0.apply_bool(lhs, *r_val); res |= if flag { mask } else { 0 }; mask <<= 1; } res } #[inline] fn apply_chunk(&self, lhs: &[T::L], rhs: &[T::R]) -> u64 { let mut res = 0u64; let mut mask = 1; for (l_val, r_val) in lhs.iter().zip(rhs) { let flag = self.0.apply_bool(*l_val, *r_val); res |= if flag { mask } else { 0 }; mask <<= 1; } res } #[inline] fn single_single(&self, l_val: T::L, r_val: T::R, validity: Sel) -> Result<Attr> { let flag = self.0.apply_bool(l_val, r_val); Ok(Attr::new_single( PreciseType::bool(), Single::new_bool(flag, validity.n_records() as u16), validity, )) } #[inline] fn array_single( &self, l_vals: &[T::L], r_val: T::R, validity: Sel, sel: Option<&Sel>, ) -> Result<Attr> { if let Some(sel) = sel { match sel { Sel::None { .. } => { return Ok(Attr::new_null(PreciseType::bool(), l_vals.len() as u16)); } Sel::Index { count, indexes, .. } => { return handle_sel_index( &indexes[..*count as usize], &validity, l_vals.len(), |idx| self.0.apply_bool(l_vals[idx], r_val), ) } _ => (), } } let mut res = Bitmap::zeroes(l_vals.len()); let (res_u64s, _) = res.u64s_mut(); for (lhs, res) in l_vals.chunks(64).zip(res_u64s) { *res = self.apply_left_chunk(lhs, r_val); } Ok(Attr::new_bitmap(res, validity)) } #[inline] fn single_array( &self, l_val: T::L, r_vals: &[T::R], validity: Sel, sel: Option<&Sel>, ) -> Result<Attr> { if let Some(sel) = sel { match sel { Sel::None { .. } => { return Ok(Attr::new_null(PreciseType::bool(), r_vals.len() as u16)); } Sel::Index { count, indexes, .. } => { return handle_sel_index( &indexes[..*count as usize], &validity, r_vals.len(), |idx| self.0.apply_bool(l_val, r_vals[idx]), ) } _ => (), } } let mut res = Bitmap::zeroes(r_vals.len()); let (res_u64s, _) = res.u64s_mut(); for (rhs, res) in r_vals.chunks(64).zip(res_u64s) { *res = self.apply_right_chunk(l_val, rhs); } Ok(Attr::new_bitmap(res, validity)) } #[inline] fn array_array( &self, l_vals: &[T::L], r_vals: &[T::R], validity: Sel, sel: Option<&Sel>, ) -> Result<Attr> { assert!(l_vals.len() == r_vals.len()); if let Some(sel) = sel { match sel { Sel::None { .. } => { return Ok(Attr::new_null(PreciseType::bool(), l_vals.len() as u16)); } Sel::Index { count, indexes, .. } => { return handle_sel_index( &indexes[..*count as usize], &validity, l_vals.len(), |idx| self.0.apply_bool(l_vals[idx], r_vals[idx]), ) } _ => (), } } let mut res = Bitmap::zeroes(r_vals.len()); let (res_u64s, _) = res.u64s_mut(); for ((lhs, rhs), res) in l_vals.chunks(64).zip(r_vals.chunks(64)).zip(res_u64s) { *res = self.apply_chunk(lhs, rhs); } Ok(Attr::new_bitmap(res, validity)) } } #[inline] fn handle_sel_index<F: Fn(usize) -> bool>( sel: &[u16], validity: &Sel, len: usize, f: F, ) -> Result<Attr> { let mut res = Bitmap::zeroes(len); let mut valids = [0u16; 6]; let mut valid_count = 0; for idx in sel { let i = *idx as usize; if validity.selected(i)? { let flag = f(i); res.set(i, flag)?; valids[valid_count] = *idx; valid_count += 1; } } let res = if valid_count == 0 { Attr::new_null(PreciseType::bool(), len as u16) } else { let validity = Sel::Index { count: valid_count as u8, len: len as u16, indexes: valids, }; Attr::new_bitmap(res, validity) }; Ok(res) } impl<T: CmpEval> BinaryEval for Impl<T> { #[inline] fn binary_eval( &self, res_ty: PreciseType, lhs: &Attr, rhs: &Attr, sel: Option<&Sel>, ) -> Result<Attr> { debug_assert_eq!(PreciseType::bool(), res_ty); let n_records = lhs.n_records(); if n_records != rhs.n_records() { return Err(Error::RowNumberMismatch); } let validity = lhs.validity.intersect(&rhs.validity)?; if validity.is_none() { return Ok(Attr::new_null(res_ty, n_records as u16)); } match (&lhs.codec, &rhs.codec) { (Codec::Single(l), Codec::Single(r)) => { self.single_single(l.view(), r.view(), validity) } (Codec::Array(l), Codec::Single(r)) => { self.array_single(l.cast_slice(), r.view(), validity, sel) } (Codec::Single(l), Codec::Array(r)) => { self.single_array(l.view(), r.cast_slice(), validity, sel) } (Codec::Array(l), Codec::Array(r)) => { self.array_array(l.cast_slice(), r.cast_slice(), validity, sel) } (Codec::Empty, _) | (_, Codec::Empty) => Ok(Attr::empty(res_ty)), (Codec::Bitmap(_), _) | (_, Codec::Bitmap(_)) => Err(Error::UnsupportedEval), } } } macro_rules! impl_cmp_eval_for_iso_num { ($id:ident, $ty:ty, $op:tt) => { pub struct $id; impl CmpEval for $id { type L = $ty; type R = $ty; #[inline] fn apply_bool(&self, lhs: Self::L, rhs: Self::R) -> bool { lhs $op rhs } } } } impl_cmp_eval_for_iso_num!(EqualI32, i32, ==); impl_cmp_eval_for_iso_num!(GreaterI32, i32, >); impl_cmp_eval_for_iso_num!(GreaterEqualI32, i32, >=); impl_cmp_eval_for_iso_num!(LessI32, i32, <); impl_cmp_eval_for_iso_num!(LessEqualI32, i32, <=); impl_cmp_eval_for_iso_num!(NotEqualI32, i32, !=); impl_cmp_eval_for_iso_num!(EqualI64, i64, ==); impl_cmp_eval_for_iso_num!(GreaterI64, i64, >); impl_cmp_eval_for_iso_num!(GreaterEqualI64, i64, >=); impl_cmp_eval_for_iso_num!(LessI64, i64, <); impl_cmp_eval_for_iso_num!(LessEqualI64, i64, <=); impl_cmp_eval_for_iso_num!(NotEqualI64, i64, !=); #[cfg(test)] mod tests { use super::*; #[test] fn test_cmp_eval_eq() { let size = 10i32; let c1 = Attr::from((0..size).into_iter()); let c2 = Attr::from((0..size).into_iter()); let c3 = Attr::new_single( PreciseType::i32(), Single::new(0i32, size as u16), Sel::All(size as u16), ); let c4 = Attr::new_single( PreciseType::i32(), Single::new(0i32, size as u16), Sel::All(size as u16), ); let c5 = Attr::new_null(PreciseType::i32(), size as u16); let eq = CmpKind::Equal; let res = eq.eval(&c1, &c2, None).unwrap(); let res = res.codec.as_bitmap().unwrap(); assert!(res.bools().all(|b| b)); let res = eq.eval(&c3, &c4, None).unwrap(); assert!(res.validity.is_all()); let res = res.codec.as_single().unwrap(); let value = res.view_bool(); assert!(value); let res = eq.eval(&c1, &c3, None).unwrap(); let res = res.codec.as_bitmap().unwrap(); assert!(res.get(0).unwrap()); assert!(res.bools().skip(1).all(|b| !b)); let res = eq.eval(&c4, &c2, None).unwrap(); let res = res.codec.as_bitmap().unwrap(); assert!(res.get(0).unwrap()); assert!(res.bools().skip(1).all(|b| !b)); let res = eq.eval(&c1, &c5, None).unwrap(); assert!(res.validity.is_none()); let res = eq.eval(&c5, &c1, None).unwrap(); assert!(res.validity.is_none()); let res = eq.eval(&c3, &c5, None).unwrap(); assert!(res.validity.is_none()); } #[test] fn test_cmp_eval_gt() { let c1 = Attr::from((0..64i32).into_iter()); let c2 = Attr::from((0..64i32).into_iter().rev()); let c3 = Attr::new_single(PreciseType::i32(), Single::new(5i32, 64), Sel::All(64)); let c4 = Attr::new_single(PreciseType::i32(), Single::new(15i32, 64), Sel::All(64)); let gt = CmpKind::Greater; let res = gt .eval(&c1, &c2, Some(&Sel::new_indexes(64, vec![5, 50]))) .unwrap(); let (valid, _) = res.bool_at(0).unwrap(); assert!(!valid); let (valid, value) = res.bool_at(5).unwrap(); assert!(valid && !value); let (valid, value) = res.bool_at(50).unwrap(); assert!(valid && value); let res = gt.eval(&c1, &c2, Some(&Sel::None(64))).unwrap(); assert_eq!(PreciseType::bool(), res.ty); let res = gt .eval(&c1, &c3, Some(&Sel::new_indexes(64, vec![1, 50]))) .unwrap(); let (valid, value) = res.bool_at(1).unwrap(); assert!(valid && !value);
}
let (valid, value) = res.bool_at(50).unwrap(); assert!(valid && value); let res = gt.eval(&c1, &c3, Some(&Sel::None(64))).unwrap(); assert_eq!(PreciseType::bool(), res.ty); let res = gt .eval(&c4, &c1, Some(&Sel::new_indexes(64, vec![1, 50]))) .unwrap(); let (valid, value) = res.bool_at(1).unwrap(); assert!(valid && value); let (valid, value) = res.bool_at(50).unwrap(); assert!(valid && !value); let res = gt.eval(&c4, &c1, Some(&Sel::None(64))).unwrap(); assert_eq!(PreciseType::bool(), res.ty); }
function_block-function_prefix_line
[ { "content": "pub trait DataSourceID: Clone + Copy + PartialEq + Eq + Hash + PartialOrd + Ord + Sized {\n\n /// resolve data source from expression\n\n fn from_expr(e: &Expr) -> Option<(Self, u32)>;\n\n}\n\n\n\nimpl DataSourceID for QueryID {\n\n #[inline]\n\n fn from_expr(e: &Expr) -> Option<(Self, u32)> {\n\n match &e.kind {\n\n ExprKind::Col(Col::QueryCol(qid, idx)) => Some((*qid, *idx)),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl DataSourceID for TableID {\n\n #[inline]\n\n fn from_expr(e: &Expr) -> Option<(Self, u32)> {\n\n match &e.kind {\n\n ExprKind::Col(Col::TableCol(tid, idx)) => Some((*tid, *idx)),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "xngin-expr/src/source.rs", "rank": 0, "score": 440566.20538206556 }, { "content": "#[inline]\n\npub fn join_reorder<R, F>(plan: &mut LgcPlan, mut f: F) -> Result<()>\n\nwhere\n\n R: Reorder,\n\n F: FnMut() -> R,\n\n{\n\n for qid in &plan.attaches {\n\n reorder_join(&mut plan.qry_set, *qid, &mut f)?\n\n }\n\n reorder_join(&mut plan.qry_set, plan.root, &mut f)\n\n}\n\n\n", "file_path": "xngin-plan/src/join/reorder/mod.rs", "rank": 1, "score": 431690.2439242714 }, { "content": "#[inline]\n\nfn sel_indexes(attr: &Attr, sel: &[u16]) -> Result<Attr> {\n\n // bools should be handled specially\n\n let res = if attr.ty == PreciseType::bool() {\n\n let mut bm = Bitmap::zeroes(sel.len());\n\n let mut validity = Bitmap::with_capacity(sel.len());\n\n let orig = attr.codec.as_bitmap().unwrap();\n\n for (idx, i) in sel.iter().enumerate() {\n\n let i = *i as usize;\n\n let valid = attr.validity.selected(i)?;\n\n if valid {\n\n bm.set(idx, orig.get(i)?)?;\n\n }\n\n validity.add(valid);\n\n }\n\n Attr::new_bitmap(bm, Sel::from(validity))\n\n } else {\n\n let val_len = attr.ty.val_len().unwrap();\n\n let mut arr = Array::with_capacity(sel.len());\n\n let arr_raw = arr.raw_mut().unwrap();\n\n let mut validity = Bitmap::with_capacity(sel.len());\n", "file_path": "xngin-storage/src/sel.rs", "rank": 2, "score": 426547.70462188637 }, { "content": "#[inline]\n\npub fn bitmap_u8s_set(bm: &mut [u8], idx: usize, val: bool) {\n\n let bidx = idx / 8;\n\n if val {\n\n bm[bidx] |= 1 << (idx & 7);\n\n } else {\n\n bm[bidx] &= !(1 << (idx & 7));\n\n }\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 3, "score": 421494.62009429 }, { "content": "#[inline]\n\nfn load_validity(raw: &Arc<[u8]>, offset: usize, n_records: u16) -> Result<Sel> {\n\n let res = match SerValidType::try_from(raw[offset])? {\n\n SerValidType::All => Sel::All(n_records),\n\n SerValidType::None => Sel::None(n_records),\n\n SerValidType::Index => {\n\n let count = raw[offset + 1];\n\n let mut indexes = [0u16; 6];\n\n bytemuck::cast_slice_mut::<_, u8>(&mut indexes)\n\n .copy_from_slice(&raw[offset + 2..offset + 2 + 12]);\n\n Sel::Index {\n\n count,\n\n len: n_records,\n\n indexes,\n\n }\n\n }\n\n SerValidType::Bitmap => {\n\n let bm = Bitmap::new_borrowed(Arc::clone(raw), n_records as usize, offset + 16);\n\n Sel::Bitmap(Arc::new(bm))\n\n }\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "xngin-storage/src/attr.rs", "rank": 4, "score": 416391.98173673917 }, { "content": "/// Helper function to generate a visitor to\n\n/// traverse the operator tree in preorder.\n\npub fn preorder<F: FnMut(&Op)>(f: F) -> impl OpVisitor {\n\n struct Preorder<F>(F);\n\n impl<F: FnMut(&Op)> OpVisitor for Preorder<F> {\n\n type Cont = ();\n\n type Break = Error;\n\n fn enter(&mut self, op: &Op) -> ControlFlow<Error> {\n\n (self.0)(op);\n\n ControlFlow::Continue(())\n\n }\n\n }\n\n Preorder(f)\n\n}\n\n\n", "file_path": "xngin-plan/src/op.rs", "rank": 5, "score": 410934.446885933 }, { "content": "fn reorder_join<R, F>(qry_set: &mut QuerySet, qry_id: QueryID, f: &mut F) -> Result<()>\n\nwhere\n\n R: Reorder,\n\n F: FnMut() -> R,\n\n{\n\n struct S<'a, F>(&'a mut QuerySet, &'a mut F);\n\n impl<'a, 'r, R, F> OpMutVisitor for S<'a, F>\n\n where\n\n R: Reorder,\n\n F: FnMut() -> R,\n\n {\n\n type Cont = ();\n\n type Break = Error;\n\n #[inline]\n\n fn enter(&mut self, op: &mut Op) -> ControlFlow<Error> {\n\n match op {\n\n Op::Query(qry_id) => reorder_join(self.0, *qry_id, self.1).branch(),\n\n Op::JoinGraph(_) => {\n\n let graph = mem::take(op);\n\n if let Op::JoinGraph(mut g) = graph {\n", "file_path": "xngin-plan/src/join/reorder/mod.rs", "rank": 6, "score": 400179.13407851977 }, { "content": "#[inline]\n\npub fn fix<F: Fn(QueryID, u32) -> Option<PreciseType>>(e: &mut Expr, f: F) -> Result<()> {\n\n match &mut e.kind {\n\n ExprKind::Const(c) => e.ty = c.pty(),\n\n ExprKind::Col(c) => match c {\n\n Col::QueryCol(qid, idx) | Col::CorrelatedCol(qid, idx) => {\n\n if let Some(pty) = f(*qid, *idx) {\n\n e.ty = pty;\n\n } else {\n\n return Err(Error::UnknownColumnType);\n\n }\n\n }\n\n Col::TableCol(..) => (), // table column already has type, do nothing\n\n },\n\n ExprKind::Func { kind, args } => {\n\n let pty = fix_func(*kind, args.as_mut())?;\n\n e.ty = pty;\n\n }\n\n ExprKind::Pred(pred) => {\n\n // Predicates always return bool\n\n fix_pred(pred)?;\n\n e.ty = PreciseType::bool();\n\n }\n\n _ => todo!(),\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "xngin-expr/src/infer.rs", "rank": 7, "score": 394453.0110646208 }, { "content": "#[inline]\n\npub fn fix_rec<F: Fn(QueryID, u32) -> Option<PreciseType>>(e: &mut Expr, f: F) -> Result<()> {\n\n e.walk_mut(&mut FixRec(f)).unbranch()\n\n}\n\n\n", "file_path": "xngin-expr/src/infer.rs", "rank": 8, "score": 389754.74127851956 }, { "content": "#[inline]\n\nfn intersect_index_bitmap(idx: &[u16], bm: &Bitmap) -> Result<Sel> {\n\n let mut indexes = [0u16; 6];\n\n let mut count = 0;\n\n for i in idx {\n\n if bm.get(*i as usize)? {\n\n indexes[count] = *i;\n\n count += 1;\n\n }\n\n }\n\n let res = if count == 0 {\n\n Sel::None(bm.len() as u16)\n\n } else {\n\n Sel::Index {\n\n count: count as u8,\n\n len: bm.len() as u16,\n\n indexes,\n\n }\n\n };\n\n Ok(res)\n\n}\n", "file_path": "xngin-storage/src/sel.rs", "rank": 10, "score": 381283.46892430075 }, { "content": "#[inline]\n\npub fn bitmap_u8s_get(bm: &[u8], idx: usize) -> bool {\n\n bm[idx / 8] & (1 << (idx & 7)) != 0\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 11, "score": 381161.7710387135 }, { "content": "#[inline]\n\npub fn bitmap_u64s_get(bm: &[u64], idx: usize) -> bool {\n\n bm[idx / 64] & (1 << (idx & 63)) != 0\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 12, "score": 381088.77896370215 }, { "content": "#[inline]\n\npub fn null_as_false(attr: &Attr) -> Result<Sel> {\n\n debug_assert_eq!(PreciseType::bool(), attr.ty);\n\n // Only bitmap and single could be converted to selection.\n\n let res = match &attr.codec {\n\n Codec::Single(s) => match &attr.validity {\n\n Sel::All(_) => {\n\n let flag = s.view::<u8>();\n\n if flag == 0 {\n\n Sel::None(s.len)\n\n } else {\n\n Sel::All(s.len)\n\n }\n\n }\n\n // here we treat null as false.\n\n Sel::None(_) => Sel::None(s.len),\n\n Sel::Index { .. } | Sel::Bitmap(_) => {\n\n let flag = s.view::<u8>();\n\n if flag == 0 {\n\n Sel::None(s.len)\n\n } else {\n", "file_path": "xngin-storage/src/sel.rs", "rank": 13, "score": 378839.4107590006 }, { "content": "#[inline]\n\npub fn bitmap_bools(bm: &[u64], len: usize) -> BoolIter<'_> {\n\n BoolIter { bm, len, idx: 0 }\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 14, "score": 374773.46725922317 }, { "content": "#[inline]\n\nfn load_fixed_len_sma(raw: &Arc<[u8]>, ty: PreciseType, start_bytes: usize) -> Result<SMA> {\n\n let val_len = ty.val_len().unwrap(); // won't fail\n\n // read min value\n\n let mut min = SmallVec::with_capacity(val_len);\n\n let start = start_bytes;\n\n let end = start + val_len;\n\n min.extend_from_slice(&raw[start..end]);\n\n // read max value\n\n let mut max = SmallVec::with_capacity(val_len);\n\n let (start, end) = (end, end + val_len);\n\n max.extend_from_slice(&raw[start..end]);\n\n // read kind\n\n let kind = PosKind::try_from(raw[end])?;\n\n // align and read sma lookup table\n\n let start = align_u128(end + 1);\n\n let pos = PosTbl::new_borrowed(raw.clone(), kind.n_slots(), start);\n\n Ok(SMA::new(min, max, kind, pos))\n\n}\n\n\n", "file_path": "xngin-storage/src/attr.rs", "rank": 15, "score": 368951.5717050412 }, { "content": "#[inline]\n\npub fn bitmap_union(this: &mut [u64], this_len: usize, that: &[u64], that_len: usize) {\n\n debug_assert!(this_len == that_len);\n\n this.iter_mut().zip(that.iter()).for_each(|(a, b)| *a |= b);\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct RangeIter<'a> {\n\n u64s: &'a [u64], // slice of u64\n\n last_word_len: usize, // length of last word\n\n word: u64, // current u64 word to scan\n\n word_bits: usize, // maximum bits in current word\n\n prev: bool, // previous value (true/flase)\n\n n: usize, // previous repeat number\n\n}\n\n\n\nimpl<'a> RangeIter<'a> {\n\n #[inline]\n\n fn break_falses_in_word(&mut self) {\n\n debug_assert!(self.prev);\n\n let bits = self.word_bits.min(self.word.trailing_zeros() as usize);\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 16, "score": 365589.3663424458 }, { "content": "#[inline]\n\npub fn bitmap_intersect(this: &mut [u64], this_len: usize, that: &[u64], that_len: usize) {\n\n debug_assert!(this_len == that_len);\n\n this.iter_mut().zip(that.iter()).for_each(|(a, b)| *a &= b);\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 17, "score": 365589.3663424458 }, { "content": "#[inline]\n\nfn sel_one(attr: &Attr, sel: usize) -> Result<Attr> {\n\n let (valid, raw_val) = attr.val_at(sel)?;\n\n let res = if valid {\n\n let mut data: SmallVec<_> = SmallVec::with_capacity(raw_val.len());\n\n data.extend_from_slice(raw_val);\n\n Attr::new_single(attr.ty, Single::new_raw(data, 1), Sel::All(1))\n\n } else {\n\n Attr::new_null(attr.ty, 1)\n\n };\n\n Ok(res)\n\n}\n\n\n", "file_path": "xngin-storage/src/sel.rs", "rank": 18, "score": 358334.13436496456 }, { "content": "#[inline]\n\nfn write_indent<F: Write>(f: &mut F, newline: bool, indent: usize) -> fmt::Result {\n\n if newline {\n\n f.write_char('\\n')?;\n\n for _ in 0..indent {\n\n f.write_char(' ')?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::parser::dialect::MySQL;\n\n use crate::parser::expr::expr_sp0;\n\n use nom::error::Error;\n\n\n\n #[test]\n\n fn test_pretty_expr() -> anyhow::Result<()> {\n\n for c in vec![\n", "file_path": "xngin-frontend/src/pretty.rs", "rank": 19, "score": 355227.9016117603 }, { "content": "#[inline]\n\nfn write_sp<F: Write>(f: &mut F, newline: bool, indent: usize) -> fmt::Result {\n\n if newline {\n\n f.write_char('\\n')?;\n\n for _ in 0..indent {\n\n f.write_char(' ')?;\n\n }\n\n Ok(())\n\n } else {\n\n f.write_char(' ')\n\n }\n\n}\n\n\n", "file_path": "xngin-frontend/src/pretty.rs", "rank": 20, "score": 355227.9016117604 }, { "content": "#[inline]\n\npub fn bitmap_true_count(bm: &[u64], len: usize) -> usize {\n\n let len_u64 = len / 64;\n\n let len_remained = len & 63;\n\n // sum of u64s\n\n let sum0: usize = bm[..len_u64].iter().map(|v| v.count_ones() as usize).sum();\n\n if len_remained == 0 {\n\n sum0\n\n } else {\n\n sum0 + (bm[len_u64] & ((1 << len_remained) - 1)).count_ones() as usize\n\n }\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 21, "score": 353356.533843744 }, { "content": "#[inline]\n\npub fn bitmap_false_count(bm: &[u64], len: usize) -> usize {\n\n let len_u64 = len / 64;\n\n let len_remained = len & 63;\n\n // sum of u64s\n\n let sum0 = bm[..len_u64].iter().map(|v| v.count_zeros() as usize).sum();\n\n if len_remained == 0 {\n\n sum0\n\n } else {\n\n sum0 + (bm[len_u64] | !((1 << len_remained) - 1)).count_zeros() as usize\n\n }\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 22, "score": 353356.533843744 }, { "content": "#[inline]\n\nfn write_delim<F: Write>(f: &mut F, delim: char, newline: bool, indent: usize) -> fmt::Result {\n\n f.write_char(delim)?;\n\n if newline {\n\n f.write_char('\\n')?;\n\n for _ in 0..indent {\n\n f.write_char(' ')?;\n\n }\n\n Ok(())\n\n } else {\n\n f.write_char(' ')\n\n }\n\n}\n\n\n", "file_path": "xngin-frontend/src/pretty.rs", "rank": 23, "score": 338667.01246126747 }, { "content": "fn with_input<I, O, E, F>(mut parser: F) -> impl FnMut(I) -> IResult<I, (I, O), E>\n\nwhere\n\n I: Clone + nom::Offset + nom::Slice<std::ops::RangeTo<usize>>,\n\n E: ParseError<I>,\n\n F: nom::Parser<I, O, E>,\n\n{\n\n move |input: I| {\n\n let i = input.clone();\n\n match parser.parse(i) {\n\n Ok((i, o)) => {\n\n let index = input.offset(&i);\n\n Ok((i, (input.slice(..index), o)))\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 24, "score": 332593.2671228739 }, { "content": "#[inline]\n\npub fn type_fix(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<()> {\n\n let mut types = HashMap::new();\n\n fix_type(qry_set, qry_id, &mut types)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/type_fix.rs", "rank": 25, "score": 332423.6661703339 }, { "content": "fn next<I, O1, O2, E, F, G>(mut parser: F, mut f: G) -> impl FnMut(I) -> IResult<I, O2, E>\n\nwhere\n\n F: nom::Parser<I, O1, E>,\n\n G: FnMut(I, O1) -> IResult<I, O2, E>,\n\n{\n\n move |input: I| {\n\n let (input, o1) = parser.parse(input)?;\n\n f(input, o1)\n\n }\n\n}\n\n\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 26, "score": 330529.95766539685 }, { "content": "/// apply second parser with first parser's success output,\n\n/// cut the error into failure.\n\nfn next_cut<I, O1, O2, E, F, G>(mut parser: F, mut f: G) -> impl FnMut(I) -> IResult<I, O2, E>\n\nwhere\n\n I: Clone,\n\n E: ParseError<I>,\n\n F: nom::Parser<I, O1, E>,\n\n G: FnMut(I, I, O1) -> IResult<I, O2, E>,\n\n{\n\n move |input: I| {\n\n let (ni, o1) = parser.parse(input.clone())?;\n\n match f(input, ni, o1) {\n\n Err(nom::Err::Error(e)) => Err(nom::Err::Failure(e)),\n\n rest => rest,\n\n }\n\n }\n\n}\n\n\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 27, "score": 327103.82025271194 }, { "content": "#[inline]\n\nfn bitmap_extend_const(dst: &mut [u8], dst_len: usize, src_val: bool, src_len: usize) {\n\n debug_assert!(dst.len() * 8 >= dst_len + src_len);\n\n if src_len == 0 {\n\n // nothing to do\n\n return;\n\n }\n\n if dst_len & 7 == 0 {\n\n // copy bytes\n\n let byte = if src_val { 0xff } else { 0x00 };\n\n let src_len_u8 = (src_len + 7) / 8;\n\n let dst_len_u8 = dst_len / 8;\n\n dst[dst_len_u8..dst_len_u8 + src_len_u8]\n\n .iter_mut()\n\n .for_each(|b| *b = byte);\n\n return;\n\n }\n\n let rbits = dst_len & 7;\n\n let tgt_len = dst_len + src_len;\n\n let tgt_len_u8 = (tgt_len + 7) / 8;\n\n let dst_len_u8 = dst_len / 8;\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 28, "score": 325746.1820771603 }, { "content": "#[inline]\n\npub fn rule_optimize_each(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<()> {\n\n let mut eff = init_rule_optimize(qry_set, qry_id)?;\n\n for _ in 0..10 {\n\n match eff {\n\n RuleEffect::OPEXPR => {\n\n eff = RuleEffect::NONE;\n\n eff |= expr_simplify(qry_set, qry_id)?;\n\n eff |= op_eliminate(qry_set, qry_id)?;\n\n eff |= pred_pushdown(qry_set, qry_id)?;\n\n }\n\n RuleEffect::OP => {\n\n eff = RuleEffect::NONE;\n\n eff |= expr_simplify(qry_set, qry_id)?;\n\n }\n\n RuleEffect::EXPR => {\n\n eff = RuleEffect::NONE;\n\n eff |= pred_pushdown(qry_set, qry_id)?;\n\n eff |= op_eliminate(qry_set, qry_id)?;\n\n }\n\n _ => break,\n\n }\n\n }\n\n joingraph_initialize(qry_set, qry_id)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/mod.rs", "rank": 29, "score": 323271.4314800474 }, { "content": "#[inline]\n\nfn update_simplify_single<F: FnMut(&mut Expr) -> Result<()>>(\n\n e: &mut Expr,\n\n null_coalesce: NullCoalesce,\n\n mut f: F,\n\n) -> Result<RuleEffect> {\n\n let mut eff = RuleEffect::NONE;\n\n // we don't count the replacement as expression change\n\n f(e)?;\n\n match &mut e.kind {\n\n ExprKind::Func { kind, args, .. } => {\n\n if let Some(new) = simplify_func(*kind, args)? {\n\n *e = new;\n\n eff |= RuleEffect::EXPR;\n\n }\n\n }\n\n ExprKind::Pred(p) => {\n\n // todo: add NullCoalesce\n\n if let Some(new) = simplify_pred(p, null_coalesce)? {\n\n *e = new;\n\n eff |= RuleEffect::EXPR;\n\n }\n\n }\n\n _ => (), // All other kinds are skipped in constant folding\n\n }\n\n Ok(eff)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/expr_simplify.rs", "rank": 30, "score": 323132.3004925385 }, { "content": "#[inline]\n\nfn write_align_u128<W: io::Write>(bs: &[u8], writer: &mut W, buf: &mut Vec<u8>) -> Result<usize> {\n\n writer.write_all(bs)?;\n\n let total_bytes = align_u128(bs.len());\n\n if total_bytes > bs.len() {\n\n buf.clear();\n\n buf.extend(std::iter::repeat(0u8).take(total_bytes - bs.len()));\n\n writer.write_all(buf)?;\n\n }\n\n Ok(total_bytes)\n\n}\n\n\n\nimpl<T: ByteRepr + StaticTyped + Default> FromIterator<Option<T>> for Attr {\n\n #[inline]\n\n fn from_iter<I: IntoIterator<Item = Option<T>>>(iter: I) -> Self {\n\n let iter = iter.into_iter();\n\n let iter_size = match iter.size_hint() {\n\n (_, Some(hb)) => hb.max(64),\n\n _ => 64,\n\n };\n\n let mut validity = Bitmap::zeroes(iter_size);\n", "file_path": "xngin-storage/src/attr.rs", "rank": 31, "score": 320562.95584284957 }, { "content": "#[inline]\n\npub fn op_eliminate(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n eliminate_op(qry_set, qry_id, false)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/op_eliminate.rs", "rank": 32, "score": 320155.5521493344 }, { "content": "#[inline]\n\nfn handle_sel_index<T: ByteRepr, F: Fn(usize) -> T>(\n\n sel: &[u16],\n\n res_ty: PreciseType,\n\n validity: &Sel,\n\n len: usize,\n\n f: F,\n\n) -> Result<Attr> {\n\n let mut arr = Array::new_owned::<T>(len);\n\n let res_vals = arr.cast_slice_mut::<T>(len).unwrap();\n\n let mut valids = [0u16; 6];\n\n let mut valid_count = 0;\n\n for idx in sel {\n\n let idx = *idx as usize;\n\n if validity.selected(idx)? {\n\n res_vals[idx] = f(idx);\n\n valids[valid_count] = idx as u16;\n\n valid_count += 1;\n\n }\n\n }\n\n unsafe { arr.set_len(len) };\n", "file_path": "xngin-compute/src/arith.rs", "rank": 33, "score": 319392.5055323261 }, { "content": "#[inline]\n\npub fn bitmap_range_iter(bm: &[u64], len: usize) -> RangeIter<'_> {\n\n if len == 0 {\n\n // empty iterator\n\n return RangeIter {\n\n u64s: &[],\n\n last_word_len: 0,\n\n word: 0,\n\n word_bits: 0,\n\n prev: false,\n\n n: 0,\n\n };\n\n }\n\n let prev = bm[0] & 1 != 0; // pre-read first value\n\n let last_word_len = if len & 63 == 0 { 64 } else { len & 63 };\n\n RangeIter {\n\n u64s: bm,\n\n last_word_len,\n\n word: 0,\n\n word_bits: 0,\n\n prev,\n\n n: 0,\n\n }\n\n}\n\n\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 34, "score": 318754.8882329974 }, { "content": "#[inline]\n\nfn bitmap_u64s_shift(bs: &mut [u64], len: usize, shift_bits: usize) {\n\n if shift_bits >= len || shift_bits == 0 {\n\n return;\n\n }\n\n let orig_len_u64 = (len + 63) / 64;\n\n if shift_bits & 7 == 0 {\n\n // memcpy\n\n let offset_u8 = shift_bits / 8;\n\n let u8s = bytemuck::cast_slice_mut::<_, u8>(&mut bs[..orig_len_u64]);\n\n u8s.copy_within(offset_u8.., 0);\n\n return;\n\n }\n\n let offset_u64 = shift_bits / 64;\n\n let bits = shift_bits & 63;\n\n if offset_u64 == 0 {\n\n let bs = &mut bs[..orig_len_u64];\n\n // Use macro to unroll below expression for better performance.\n\n //\n\n // bs[..orig_len_u64].for_each_offset_pair(1, |(a, b)| {\n\n // *a >>= bits;\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 35, "score": 314064.49642128596 }, { "content": "#[inline]\n\nfn eliminate_op(qry_set: &mut QuerySet, qry_id: QueryID, is_subq: bool) -> Result<RuleEffect> {\n\n qry_set.transform_op(qry_id, |qry_set, _, op| {\n\n let mut eo = EliminateOp::new(qry_set, is_subq);\n\n op.walk_mut(&mut eo).unbranch()\n\n })?\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/op_eliminate.rs", "rank": 36, "score": 313267.11229455937 }, { "content": "#[inline]\n\nfn validity_with_sel_bitmap(validity: &Sel, sel: &Bitmap, n_filtered: usize) -> Result<Sel> {\n\n let res = match &validity {\n\n Sel::Bitmap(vm) => {\n\n let mut tmp = Bitmap::with_capacity(n_filtered);\n\n let mut idx = 0;\n\n for (flag, n) in sel.range_iter() {\n\n if flag {\n\n tmp.extend_range(vm, idx..idx + n)?;\n\n }\n\n idx += n;\n\n }\n\n debug_assert_eq!(tmp.len(), n_filtered);\n\n Sel::Bitmap(Arc::new(tmp))\n\n }\n\n Sel::All(_) => Sel::All(n_filtered as u16),\n\n Sel::None(_) => Sel::None(n_filtered as u16),\n\n Sel::Index { count, indexes, .. } => {\n\n // we will have no more than `count` valid values,\n\n // result validity must be Sel::Index\n\n let mut valids = [0u16; 6];\n", "file_path": "xngin-storage/src/sel.rs", "rank": 37, "score": 312523.9244863347 }, { "content": "#[inline]\n\npub fn init_rule_optimize(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n let mut eff = RuleEffect::NONE;\n\n // Run column pruning as first step, to remove unused columns in operator tree.\n\n // this will largely reduce effort of other rules.\n\n eff |= col_prune(qry_set, qry_id)?; // onetime\n\n // Run expression simplify as second step, fold constants, normalize expressions.\n\n eff |= expr_simplify(qry_set, qry_id)?;\n\n // Run operator eliminate after expression simplify, to remove unnecessary operators.\n\n eff |= op_eliminate(qry_set, qry_id)?;\n\n // Run outerjoin reduce to update join type top down.\n\n eff |= outerjoin_reduce(qry_set, qry_id)?; // onetime\n\n // Run predicate pushdown\n\n eff |= pred_pushdown(qry_set, qry_id)?;\n\n // Run predicate pullup with predicate propagate for future predicate pushdown.\n\n pred_pullup(qry_set, qry_id)?; // onetime\n\n // Run predicate pushdown again\n\n eff |= pred_pushdown(qry_set, qry_id)?;\n\n // Run column pruning again\n\n eff |= col_prune(qry_set, qry_id)?;\n\n // unfold derived tables to gather more tables to join graph.\n\n eff |= derived_unfold(qry_set, qry_id)?; // onetime\n\n Ok(eff)\n\n}\n", "file_path": "xngin-plan/src/rule/mod.rs", "rank": 38, "score": 306999.14046756516 }, { "content": "#[inline]\n\npub fn rule_optimize(plan: &mut LgcPlan) -> Result<()> {\n\n for qry_id in &plan.attaches {\n\n rule_optimize_each(&mut plan.qry_set, *qry_id)?\n\n }\n\n rule_optimize_each(&mut plan.qry_set, plan.root)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/mod.rs", "rank": 39, "score": 304782.8934375881 }, { "content": "#[inline]\n\nfn write_sma<W: io::Write>(sma: &SMA, writer: &mut W, buf: &mut Vec<u8>) -> Result<usize> {\n\n let mut n = 0;\n\n buf.clear();\n\n // array codec does not support var length values.\n\n // min value, max value and kind\n\n buf.extend_from_slice(&sma.min);\n\n buf.extend_from_slice(&sma.max);\n\n buf.push(sma.kind as u8);\n\n let padding = align_u128(buf.len()) - buf.len();\n\n if padding > 0 {\n\n buf.extend(std::iter::repeat(0u8).take(padding));\n\n }\n\n // kind\n\n writer.write_all(buf)?;\n\n n += buf.len();\n\n // position lookup\n\n n += write_align_u128(sma.raw_pos_tbl(), writer, buf)?;\n\n Ok(n)\n\n}\n\n\n", "file_path": "xngin-storage/src/attr.rs", "rank": 40, "score": 301209.9557579578 }, { "content": "#[inline]\n\nfn fix_func(kind: FuncKind, args: &mut [Expr]) -> Result<PreciseType> {\n\n use PreciseType as PT;\n\n match kind {\n\n FuncKind::Add | FuncKind::Sub => match (args[0].ty, args[1].ty) {\n\n (PT::Unknown, _) | (_, PT::Unknown) => Err(Error::UnknownArgumentType),\n\n (PT::Compound, _) | (_, PT::Compound) => todo!(\"compound type infer\"),\n\n // float vs float\n\n (PT::Float(lhs_bytes), PT::Float(rhs_bytes)) => {\n\n match lhs_bytes.cmp(&rhs_bytes) {\n\n Ordering::Greater => {\n\n // implicit cast rhs\n\n let res_ty = PT::Float(lhs_bytes);\n\n cast_arg(&mut args[1], res_ty);\n\n Ok(res_ty)\n\n }\n\n Ordering::Less => {\n\n // implicit cast lhs\n\n let res_ty = PT::Float(rhs_bytes);\n\n cast_arg(&mut args[0], res_ty);\n\n Ok(res_ty)\n", "file_path": "xngin-expr/src/infer.rs", "rank": 41, "score": 300205.4555453138 }, { "content": "#[inline]\n\nfn bitmap_extend(dst: &mut [u64], dst_len: usize, src: &[u64], src_len: usize) {\n\n debug_assert!(src.len() * 64 >= src_len);\n\n debug_assert!(dst.len() * 64 >= dst_len + src_len);\n\n if src_len == 0 {\n\n // nothing to do\n\n return;\n\n }\n\n if dst_len & 7 == 0 {\n\n let src_len_u8 = (src_len + 7) / 8;\n\n let offset_u8 = dst_len / 8;\n\n let src_u8s = bytemuck::cast_slice::<_, u8>(src);\n\n let dst_u8s = bytemuck::cast_slice_mut::<_, u8>(dst);\n\n dst_u8s[offset_u8..offset_u8 + src_len_u8].copy_from_slice(&src_u8s[..src_len_u8]);\n\n return;\n\n }\n\n let rbits = dst_len & 63;\n\n let bits = 64 - rbits;\n\n let tgt_len = dst_len + src_len;\n\n let tgt_len_u64 = (tgt_len + 63) / 64;\n\n let src_len_u64 = (src_len + 63) / 64;\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 42, "score": 296956.1953087598 }, { "content": "#[inline]\n\npub fn joingraph_initialize(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<()> {\n\n init_joingraph(qry_set, qry_id)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/joingraph_initialize.rs", "rank": 43, "score": 284779.11136815057 }, { "content": "#[inline]\n\npub fn pred_pullup(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<()> {\n\n let mut p_preds = HashMap::new();\n\n let _ = pullup_pred(qry_set, qry_id, HashSet::new(), &mut p_preds)?; // pass empty parent columns, so pulled preds must be empty\n\n Ok(())\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/pred_pullup.rs", "rank": 44, "score": 284779.11136815057 }, { "content": "/// i64 + u64 always returns u64, fail if not in range\n\nfn i64_add_u64(v0: i64, v1: u64) -> Result<Const> {\n\n let v0 = Decimal::from(v0);\n\n let v1 = Decimal::from(v1);\n\n let mut res = Decimal::zero();\n\n Decimal::add_to(&v0, &v1, &mut res).map_err(|_| Error::ValueOutOfRange)?;\n\n res.as_u64()\n\n .map(Const::U64)\n\n .map_err(|_| Error::ValueOutOfRange)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::fold::tests::{new_decimal, new_f64};\n\n use crate::Const::*;\n\n\n\n #[test]\n\n fn test_fold_add() {\n\n assert_eq_fold_add(I64(1), I64(1), I64(2));\n\n assert_eq_fold_add(I64(1), U64(1), U64(2));\n", "file_path": "xngin-expr/src/fold/add.rs", "rank": 45, "score": 284200.78381621407 }, { "content": "fn i64_sub_u64(v0: i64, v1: u64) -> Result<Const> {\n\n let v0 = Decimal::from(v0);\n\n let v1 = Decimal::from(v1);\n\n let mut res = Decimal::zero();\n\n Decimal::sub_to(&v0, &v1, &mut res).map_err(|_| Error::ValueOutOfRange)?;\n\n res.as_u64()\n\n .map(Const::U64)\n\n .map_err(|_| Error::ValueOutOfRange)\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/sub.rs", "rank": 46, "score": 284183.8162117684 }, { "content": "fn u64_sub_i64(v0: u64, v1: i64) -> Result<Const> {\n\n let v0 = Decimal::from(v0);\n\n let v1 = Decimal::from(v1);\n\n let mut res = Decimal::zero();\n\n Decimal::sub_to(&v0, &v1, &mut res).map_err(|_| Error::ValueOutOfRange)?;\n\n res.as_u64()\n\n .map(Const::U64)\n\n .map_err(|_| Error::ValueOutOfRange)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::fold::tests::{new_decimal, new_f64};\n\n use crate::Const::*;\n\n\n\n #[test]\n\n fn test_fold_sub() {\n\n assert_eq_fold_sub(I64(3), I64(1), I64(2));\n\n assert_eq_fold_sub(I64(3), U64(1), U64(2));\n", "file_path": "xngin-expr/src/fold/sub.rs", "rank": 47, "score": 284183.8162117684 }, { "content": "#[inline]\n\nfn ser_validity_offset(validity: &Sel, offset: usize) -> (usize, usize) {\n\n let vlen = match validity {\n\n Sel::All(_) | Sel::None(_) => align_u128(1),\n\n Sel::Index { .. } => align_u128(1 + 1 + 12),\n\n Sel::Bitmap(bm) => align_u128(1) + align_u128(bm.total_bytes()),\n\n };\n\n (offset, offset + vlen)\n\n}\n\n\n", "file_path": "xngin-storage/src/attr.rs", "rank": 48, "score": 278007.83975864534 }, { "content": "#[inline]\n\nfn bitmap_extend_range(dst: &mut [u64], dst_len: usize, src: &[u64], range: Range<usize>) {\n\n debug_assert!(src.len() * 64 >= range.end);\n\n let tgt_len = dst_len + range.end - range.start;\n\n debug_assert!(dst.len() * 64 >= tgt_len);\n\n if range.start & 63 == 0 {\n\n // start aligned to byte bound, reuse copy_bits\n\n bitmap_extend(\n\n dst,\n\n dst_len,\n\n &src[range.start / 64..(range.end + 63) / 64],\n\n range.end - range.start,\n\n );\n\n return;\n\n }\n\n if dst_len & 7 == range.start & 7 {\n\n // last few dst bits and first src bits just compose one byte\n\n let dst_u8s = bytemuck::cast_slice_mut::<_, u8>(dst);\n\n let src_u8s = bytemuck::cast_slice::<_, u8>(src);\n\n\n\n let rbits = dst_len & 7;\n", "file_path": "xngin-storage/src/bitmap.rs", "rank": 49, "score": 277726.26975904085 }, { "content": "#[inline]\n\npub fn expr_simplify(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n simplify_expr(qry_set, qry_id)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/expr_simplify.rs", "rank": 50, "score": 273841.8951696717 }, { "content": "#[inline]\n\npub fn pred_pushdown(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n pushdown_pred(qry_set, qry_id)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/pred_pushdown.rs", "rank": 51, "score": 273841.8951696717 }, { "content": "#[inline]\n\npub fn outerjoin_reduce(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n reduce_outerjoin(qry_set, qry_id, None)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/outerjoin_reduce.rs", "rank": 52, "score": 273841.8951696717 }, { "content": "#[inline]\n\npub fn col_prune(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n let mut use_set = FnvHashMap::default();\n\n prune_col(qry_set, qry_id, &mut use_set)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/col_prune.rs", "rank": 53, "score": 273841.8951696717 }, { "content": "#[inline]\n\npub fn derived_unfold(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n let mut mapping = HashMap::new();\n\n unfold_derived(qry_set, qry_id, &mut mapping, Mode::Full)\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/derived_unfold.rs", "rank": 54, "score": 273841.8951696717 }, { "content": "fn write_prefix<F: Write>(f: &mut F, spans: &[Span]) -> fmt::Result {\n\n for &span in spans {\n\n match span {\n\n Span::Space(n) => {\n\n for _ in 0..n {\n\n f.write_char(' ')?\n\n }\n\n }\n\n Span::Branch(1, false) => {\n\n f.write_char(BRANCH_1)?;\n\n for _ in 1..INDENT {\n\n f.write_char(LINE)?\n\n }\n\n }\n\n Span::Branch(_, false) => {\n\n f.write_char(BRANCH_N)?;\n\n for _ in 1..INDENT {\n\n f.write_char(LINE)?\n\n }\n\n }\n", "file_path": "xngin-plan/src/explain.rs", "rank": 55, "score": 272792.72353241255 }, { "content": "#[inline]\n\nfn is_ident_char(c: u8) -> bool {\n\n c.is_ascii_alphanumeric() || c == b'_' || c == b'$'\n\n}\n\n\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 56, "score": 264187.73182865744 }, { "content": "pub trait OpMutVisitor {\n\n type Cont: Effect;\n\n type Break;\n\n /// Returns true if continue\n\n #[inline]\n\n fn enter(&mut self, _op: &mut Op) -> ControlFlow<Self::Break, Self::Cont> {\n\n ControlFlow::Continue(Self::Cont::default())\n\n }\n\n\n\n /// Returns true if continue\n\n #[inline]\n\n fn leave(&mut self, _op: &mut Op) -> ControlFlow<Self::Break, Self::Cont> {\n\n ControlFlow::Continue(Self::Cont::default())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "xngin-plan/src/op.rs", "rank": 57, "score": 260570.08304333995 }, { "content": "#[inline]\n\nfn update_use_set(use_set: &mut FnvHashMap<QueryID, BTreeMap<u32, u32>>) {\n\n for mapping in use_set.values_mut() {\n\n for (i, old) in mapping.values_mut().enumerate() {\n\n *old = i as u32;\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::builder::tests::{assert_j_plan, get_lvl_queries, print_plan};\n\n\n\n #[test]\n\n fn test_col_prune_const() {\n\n assert_j_plan(\"select 1 from t3\", |sql, mut plan| {\n\n let subq = get_lvl_queries(&plan, 1);\n\n assert_eq!(4, subq[0].out_cols().len());\n\n col_prune(&mut plan.qry_set, plan.root).unwrap();\n\n print_plan(sql, &plan);\n", "file_path": "xngin-plan/src/rule/col_prune.rs", "rank": 58, "score": 257453.7564134869 }, { "content": "pub trait AlignPartialOrd<Rhs: ?Sized = Self> {\n\n fn align_partial_cmp(&self, other: &Rhs) -> Option<Ordering>;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum AlignType {\n\n Identical,\n\n F64,\n\n Bytes,\n\n Datetime,\n\n}\n\n\n\nimpl AlignType {\n\n /// Align two runtime types to single type for comparison.\n\n ///\n\n /// All numeric types are aligned to f64, which can cover most cases\n\n /// but has some issues on precison, especially for i64, u64, decimal.\n\n /// This is the default behavior of MySQL and we choose to follow it.\n\n #[inline]\n\n pub fn cmp_align(this: RuntimeType, that: RuntimeType) -> Option<Self> {\n", "file_path": "xngin-datatype/src/align.rs", "rank": 59, "score": 255901.8897119991 }, { "content": "#[inline]\n\npub fn fix_bools(preds: &mut [Expr]) {\n\n for e in preds {\n\n if !e.ty.is_bool() {\n\n cast_arg(e, PreciseType::bool());\n\n }\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/infer.rs", "rank": 60, "score": 253278.520783766 }, { "content": "fn write_objs<'i, F, E: 'i, I>(f: &mut F, exprs: I, delimiter: &str) -> fmt::Result\n\nwhere\n\n F: Write,\n\n E: Explain,\n\n I: IntoIterator<Item = E>,\n\n{\n\n let mut exprs = exprs.into_iter();\n\n if let Some(head) = exprs.next() {\n\n head.explain(f)?\n\n }\n\n for e in exprs {\n\n f.write_str(delimiter)?;\n\n e.explain(f)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "xngin-plan/src/explain.rs", "rank": 61, "score": 253205.06037252763 }, { "content": "fn write_refs<'i, F, E: 'i, I>(f: &mut F, exprs: I, delimiter: &str) -> fmt::Result\n\nwhere\n\n F: Write,\n\n E: Explain,\n\n I: IntoIterator<Item = &'i E>,\n\n{\n\n let mut exprs = exprs.into_iter();\n\n if let Some(head) = exprs.next() {\n\n head.explain(f)?\n\n }\n\n for e in exprs {\n\n f.write_str(delimiter)?;\n\n e.explain(f)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "xngin-plan/src/explain.rs", "rank": 62, "score": 253205.06037252763 }, { "content": "/// General trait to wrap expressions to perform constant folding,\n\n/// as well as checking whether an expression rejects null given\n\n/// specific condition.\n\npub trait Fold: Sized {\n\n /// fold consumes self and returns any error if folding can\n\n /// be performed but fails.\n\n fn fold(self) -> Result<Expr> {\n\n self.replace_fold(|_| {})\n\n }\n\n\n\n fn replace_fold<F: Fn(&mut Expr)>(self, f: F) -> Result<Expr>;\n\n\n\n fn reject_null<F: Fn(&mut Expr)>(self, f: F) -> Result<bool> {\n\n self.replace_fold(f).map(|res| match &res.kind {\n\n ExprKind::Const(Const::Null) => true,\n\n ExprKind::Const(c) => c.is_zero().unwrap_or_default(),\n\n _ => false,\n\n })\n\n }\n\n}\n\n\n\nimpl Fold for Expr {\n\n fn replace_fold<F: Fn(&mut Expr)>(mut self, f: F) -> Result<Expr> {\n\n let mut fe = FoldExpr(&f);\n\n self.walk_mut(&mut fe).unbranch()?;\n\n Ok(self)\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/mod.rs", "rank": 63, "score": 244214.90773318574 }, { "content": "#[inline]\n\npub fn bounded(buf_size: usize, block_len: usize) -> (InputBuffer, OutputBuffer) {\n\n let (tx, rx) = flume::bounded(buf_size);\n\n (\n\n InputBuffer { rx },\n\n OutputBuffer {\n\n expected_len: block_len,\n\n tx,\n\n },\n\n )\n\n}\n\n\n", "file_path": "xngin-runtime/src/buf.rs", "rank": 64, "score": 244084.85907764998 }, { "content": "#[inline]\n\nfn check_ord_left_eq(ord: Ordering, r_kind: PredFuncKind) -> bool {\n\n match (ord, r_kind) {\n\n // a=2 and a>1, a=2 and a>=1, a=2 and a!=1\n\n (\n\n Ordering::Greater,\n\n PredFuncKind::Greater | PredFuncKind::GreaterEqual | PredFuncKind::NotEqual,\n\n ) => true,\n\n (Ordering::Greater, _) => false,\n\n // a=1 and a<2, a=1 and a <=2, a=1 and a!=2\n\n (Ordering::Less, PredFuncKind::Less | PredFuncKind::LessEqual | PredFuncKind::NotEqual) => {\n\n true\n\n }\n\n (Ordering::Less, _) => false,\n\n // a=1 and a>=1, a=1 and a<=1, a=1 and a=1\n\n (\n\n Ordering::Equal,\n\n PredFuncKind::GreaterEqual | PredFuncKind::LessEqual | PredFuncKind::Equal,\n\n ) => true,\n\n (Ordering::Equal, _) => false,\n\n }\n", "file_path": "xngin-plan/src/rule/expr_simplify.rs", "rank": 65, "score": 239914.64626370056 }, { "content": "#[inline]\n\npub fn fold_not(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(Const::Null) => Ok(Some(Const::Null)),\n\n ExprKind::Const(c) => fold_not_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/not.rs", "rank": 66, "score": 236615.80963111337 }, { "content": "#[inline]\n\nfn rewrite_exprs(op: &mut Op, mapping: &HashMap<Col, Expr>) -> RuleEffect {\n\n struct Rewrite<'a>(&'a HashMap<Col, Expr>);\n\n impl ExprMutVisitor for Rewrite<'_> {\n\n type Cont = RuleEffect;\n\n type Break = ();\n\n #[inline]\n\n fn leave(&mut self, e: &mut Expr) -> ControlFlow<(), RuleEffect> {\n\n if let ExprKind::Col(c) = &e.kind {\n\n if let Some(new) = self.0.get(c) {\n\n *e = new.clone();\n\n return ControlFlow::Continue(RuleEffect::EXPR);\n\n }\n\n }\n\n ControlFlow::Continue(RuleEffect::NONE)\n\n }\n\n }\n\n let mut eff = RuleEffect::NONE;\n\n if mapping.is_empty() {\n\n return eff;\n\n }\n", "file_path": "xngin-plan/src/rule/derived_unfold.rs", "rank": 67, "score": 235409.76140429653 }, { "content": "#[test]\n\nfn test_parse_bool_and_null() -> anyhow::Result<()> {\n\n for c in vec![\n\n (\"true\", Expr::bool_lit(true)),\n\n (\"True\", Expr::bool_lit(true)),\n\n (\"TRUE\", Expr::bool_lit(true)),\n\n (\"tRUE\", Expr::bool_lit(true)),\n\n (\"false\", Expr::bool_lit(false)),\n\n (\"null\", Expr::null_lit()),\n\n (\"Null\", Expr::null_lit()),\n\n (\"NULL\", Expr::null_lit()),\n\n (\"nULL\", Expr::null_lit()),\n\n ] {\n\n check_expr(c)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "xngin-frontend/src/parser/expr/tests.rs", "rank": 68, "score": 234236.46097518137 }, { "content": "#[inline]\n\nfn analyze_conj_preds(exprs: &[Expr], rn_map: &mut HashMap<QueryID, Vec<Expr>>) -> Result<()> {\n\n if exprs.is_empty() {\n\n return Ok(());\n\n }\n\n let mut tmp = HashSet::new();\n\n for e in exprs {\n\n tmp.clear();\n\n e.collect_qry_ids(&mut tmp);\n\n for qid in &tmp {\n\n if reject_null_single(e, *qid)? {\n\n rn_map.entry(*qid).or_default().push(e.clone());\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n// translate reject null expressions,\n\n// returns the expression set that still rejects null\n", "file_path": "xngin-plan/src/rule/outerjoin_reduce.rs", "rank": 69, "score": 233493.17280116538 }, { "content": "#[inline]\n\nfn init_joingraph(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<()> {\n\n qry_set.transform_op(qry_id, |qry_set, location, op| {\n\n if location == Location::Intermediate {\n\n // only build join graph in intermediate queries\n\n let mut init = InitGraph { qry_set };\n\n op.walk_mut(&mut init).unbranch()\n\n } else {\n\n Ok(())\n\n }\n\n })?\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/joingraph_initialize.rs", "rank": 70, "score": 231036.50561387406 }, { "content": "#[inline]\n\npub fn fold_isfalse(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(c) => fold_isfalse_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 71, "score": 229763.9483988844 }, { "content": "#[inline]\n\npub fn fold_isnotnull(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(c) => fold_isnotnull_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 72, "score": 229763.9483988844 }, { "content": "#[inline]\n\npub fn fold_isnottrue(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(c) => fold_isnottrue_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 73, "score": 229763.9483988844 }, { "content": "#[inline]\n\npub fn fold_neg(arg: &ExprKind) -> Result<Option<Const>> {\n\n match &arg {\n\n ExprKind::Const(Const::Null) => Ok(Some(Const::Null)),\n\n ExprKind::Const(c) => fold_neg_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/neg.rs", "rank": 74, "score": 229763.9483988844 }, { "content": "#[inline]\n\npub fn fold_isnull(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(c) => fold_isnull_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 75, "score": 229763.9483988844 }, { "content": "#[inline]\n\npub fn fold_istrue(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(c) => fold_istrue_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 76, "score": 229763.9483988844 }, { "content": "#[inline]\n\npub fn fold_isnotfalse(arg: &ExprKind) -> Result<Option<Const>> {\n\n match arg {\n\n ExprKind::Const(c) => fold_isnotfalse_const(c),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 77, "score": 229763.9483988844 }, { "content": "/// Convert to ControlFlow\n\npub trait Branch<B, C> {\n\n fn branch(self) -> ControlFlow<B, C>;\n\n}\n\n\n\nimpl<B, C> Branch<B, C> for Result<C, B> {\n\n #[inline]\n\n fn branch(self) -> ControlFlow<B, C> {\n\n match self {\n\n Ok(c) => ControlFlow::Continue(c),\n\n Err(b) => ControlFlow::Break(b),\n\n }\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/controlflow.rs", "rank": 78, "score": 229302.48887211177 }, { "content": "struct Collect<'a>(&'a mut FnvHashMap<QueryID, BTreeMap<u32, u32>>);\n\nimpl OpVisitor for Collect<'_> {\n\n type Cont = ();\n\n type Break = ();\n\n #[inline]\n\n fn enter(&mut self, op: &Op) -> ControlFlow<()> {\n\n for e in op.exprs() {\n\n let _ = e.walk(self);\n\n }\n\n ControlFlow::Continue(())\n\n }\n\n}\n\nimpl<'a> ExprVisitor<'a> for Collect<'_> {\n\n type Cont = ();\n\n type Break = ();\n\n #[inline]\n\n fn enter(&mut self, e: &Expr) -> ControlFlow<()> {\n\n if let ExprKind::Col(Col::QueryCol(qry_id, idx)) = &e.kind {\n\n self.0.entry(*qry_id).or_default().insert(*idx, 0);\n\n }\n\n ControlFlow::Continue(())\n\n }\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/col_prune.rs", "rank": 79, "score": 226613.31098099198 }, { "content": "#[inline]\n\npub fn fold_safeeq(lhs: &ExprKind, rhs: &ExprKind) -> Result<Option<Const>> {\n\n match (lhs, rhs) {\n\n (ExprKind::Const(c1), ExprKind::Const(c2)) => fold_safeeq_const(c1, c2),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/cmp.rs", "rank": 80, "score": 226328.93560086243 }, { "content": "#[inline]\n\npub fn fold_sub(lhs: &ExprKind, rhs: &ExprKind) -> Result<Option<Const>> {\n\n match (lhs, rhs) {\n\n (ExprKind::Const(Const::Null), _) | (_, ExprKind::Const(Const::Null)) => {\n\n Ok(Some(Const::Null))\n\n }\n\n (ExprKind::Const(lhs), ExprKind::Const(rhs)) => fold_sub_const(lhs, rhs),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/sub.rs", "rank": 81, "score": 226328.93560086243 }, { "content": "#[inline]\n\npub fn fold_add(lhs: &ExprKind, rhs: &ExprKind) -> Result<Option<Const>> {\n\n match (lhs, rhs) {\n\n (ExprKind::Const(Const::Null), _) | (_, ExprKind::Const(Const::Null)) => {\n\n Ok(Some(Const::Null))\n\n }\n\n (ExprKind::Const(lhs), ExprKind::Const(rhs)) => fold_add_const(lhs, rhs),\n\n _ => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "xngin-expr/src/fold/add.rs", "rank": 82, "score": 226328.93560086243 }, { "content": "pub trait ByteRepr: Default + Copy {\n\n /// Convert value to byte vector.\n\n fn to_bytes(&self) -> SmallVec<[u8; 16]>;\n\n\n\n /// Convert bytes to value.\n\n fn from_bytes(bs: &[u8]) -> Self;\n\n\n\n /// Write value into byte format.\n\n fn write_bytes(&self, buf: &mut [u8]);\n\n\n\n /// Write out value slice in byte format.\n\n fn write_all<W: io::Write>(writer: &mut W, src: &[Self]) -> io::Result<usize>;\n\n}\n\n\n\nmacro_rules! impl_num_for_byte_repr {\n\n ($ty:ty) => {\n\n impl ByteRepr for $ty {\n\n #[inline]\n\n fn to_bytes(&self) -> SmallVec<[u8; 16]> {\n\n self.to_ne_bytes().into_iter().collect()\n", "file_path": "xngin-storage/src/repr.rs", "rank": 83, "score": 226128.45265809173 }, { "content": "fn generate_shape(qs: &QuerySet, root: &QueryID, shape: &mut Vec<OpKind>) {\n\n if let Some(subq) = qs.get(root) {\n\n let mut sg = ShapeGen { qs, shape };\n\n subq.root.walk(&mut sg);\n\n }\n\n}\n\n\n", "file_path": "xngin-plan/src/lgc.rs", "rank": 84, "score": 225628.36409739289 }, { "content": "#[inline]\n\nfn pushdown_pred(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n qry_set.transform_op(qry_id, |qry_set, _, op| {\n\n let mut ppd = PredPushdown { qry_set };\n\n op.walk_mut(&mut ppd).unbranch()\n\n })?\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/pred_pushdown.rs", "rank": 85, "score": 222037.8835580481 }, { "content": "#[inline]\n\nfn simplify_expr(qry_set: &mut QuerySet, qry_id: QueryID) -> Result<RuleEffect> {\n\n qry_set.transform_op(qry_id, |qry_set, _, op| {\n\n let mut es = ExprSimplify { qry_set };\n\n op.walk_mut(&mut es).unbranch()\n\n })?\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/expr_simplify.rs", "rank": 86, "score": 222037.8835580481 }, { "content": "#[inline]\n\nfn extract(op: &mut Op) -> (Op, Vec<(Expr, SmolStr)>) {\n\n match mem::take(op) {\n\n Op::Proj { cols, input } => (*input, cols),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "xngin-plan/src/rule/derived_unfold.rs", "rank": 87, "score": 221787.4225614721 }, { "content": "#[async_trait]\n\npub trait Work: Sized + Send + Sync + 'static {\n\n type Output: Send + Sync;\n\n\n\n /// run the work to get output\n\n async fn run(self) -> Self::Output;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::buf;\n\n use crate::cancel::Cancellable;\n\n use crate::tests::single_thread_executor;\n\n use futures_lite::future;\n\n use futures_lite::StreamExt;\n\n use xngin_compute::eval::QueryEvalPlan;\n\n use xngin_datatype::PreciseType;\n\n use xngin_expr::infer::fix_rec;\n\n use xngin_expr::{Const, Expr, FuncKind, QueryID};\n\n use xngin_storage::attr::Attr;\n", "file_path": "xngin-runtime/src/exec/mod.rs", "rank": 88, "score": 221207.66730726842 }, { "content": "#[inline]\n\npub fn parse_stmt<'a, I: ParseInput<'a>>(input: I) -> Result<Statement<'a>> {\n\n terminated::<_, _, _, NomError<I>, _, _>(statement, spcmt0)(input)\n\n .map(|(_, o)| o)\n\n .map_err(convert_simple_error)\n\n}\n\n\n\n/// verbose statement parsing, if error occurs, use `convert_error` to find more details\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 89, "score": 219846.49404258182 }, { "content": "pub trait OpVisitor {\n\n type Cont: Effect;\n\n type Break;\n\n /// Returns true if continue\n\n #[inline]\n\n fn enter(&mut self, _op: &Op) -> ControlFlow<Self::Break, Self::Cont> {\n\n ControlFlow::Continue(Self::Cont::default())\n\n }\n\n\n\n /// Returns true if continue\n\n #[inline]\n\n fn leave(&mut self, _op: &Op) -> ControlFlow<Self::Break, Self::Cont> {\n\n ControlFlow::Continue(Self::Cont::default())\n\n }\n\n}\n\n\n", "file_path": "xngin-plan/src/op.rs", "rank": 90, "score": 219680.85518394446 }, { "content": "#[inline]\n\npub fn compact_bitmap(bm: &Bitmap) -> Option<Sel> {\n\n let mut new_count = 0;\n\n let mut new_indexes = [0u16; 6];\n\n let mut idx = 0;\n\n 'INDEX: for (flag, n) in bm.range_iter() {\n\n let n = n as u16;\n\n if flag {\n\n for i in idx..idx + n {\n\n if new_count == 6 {\n\n new_count += 1;\n\n break 'INDEX;\n\n }\n\n new_indexes[new_count] = i;\n\n new_count += 1;\n\n }\n\n }\n\n idx += n;\n\n }\n\n match new_count {\n\n 0 => Some(Sel::None(bm.len() as u16)),\n", "file_path": "xngin-storage/src/sel.rs", "rank": 91, "score": 216720.56885356636 }, { "content": "#[inline]\n\npub fn parse_query<'a, I: ParseInput<'a>>(input: I) -> Result<QueryExpr<'a>> {\n\n terminated::<_, _, _, NomError<I>, _, _>(query::query_expr, spcmt0)(input)\n\n .map(|(_, o)| o)\n\n .map_err(convert_simple_error)\n\n}\n\n\n\n/// verbose query parsing, if error occurs, use `convert_error` to find more details\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 92, "score": 216502.50275317984 }, { "content": "#[inline]\n\npub fn parse_stmt_verbose<'a, I: ParseInput<'a>>(input: I) -> Result<Statement<'a>> {\n\n terminated(statement, spcmt0)(input)\n\n .map(|(_, o)| o)\n\n .map_err(|e| convert_verbose_error(input, e))\n\n}\n\n\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 93, "score": 216502.50275317984 }, { "content": "#[inline]\n\nfn validate_order(aggr_groups: &[expr::Expr], scalar_aggr: bool, order: &[SortItem]) -> Result<()> {\n\n if scalar_aggr {\n\n // disallow non-aggr columns in ORDER BY\n\n if order.iter().any(|si| si.expr.contains_non_aggr_cols()) {\n\n return Err(Error::FieldsSelectedNotInGroupBy);\n\n }\n\n return Ok(());\n\n }\n\n if !aggr_groups.is_empty() {\n\n // all non-aggr columns must exist in aggr groups\n\n let mut non_aggr_cols = vec![];\n\n for si in order {\n\n si.expr.collect_non_aggr_cols_into(&mut non_aggr_cols);\n\n }\n\n for c in non_aggr_cols {\n\n if aggr_groups.iter().all(|e| !e.is_col(&c)) {\n\n return Err(Error::FieldsSelectedNotInGroupBy);\n\n }\n\n }\n\n return Ok(());\n", "file_path": "xngin-plan/src/builder.rs", "rank": 94, "score": 214417.54568667483 }, { "content": "/// Simplify function.\n\n///\n\n/// 1. remove pair of negating, e.g.\n\n/// --e => e\n\n/// 2. compute negating constant, e.g.\n\n/// -c => new_c\n\n/// 3. compute addition of constants, e.g.\n\n/// 1+1 => 2\n\n/// 4. remote adding zero, e.g.\n\n/// e+0 => e\n\n/// 5. swap order of variable in addtion, e.g.\n\n/// 1+e => e+1\n\n/// 6. associative, e.g.\n\n/// (e+1)+2 => e+3\n\n/// Note: (1+e)+2 => e+3 -- won't happen after rule 5, only for add/mul\n\n/// 7. commutative and associative, e.g.\n\n/// 1+(e+2) => e+3\n\n/// Note: 1+(2+e) => e+3 -- won't happen after rule 5, only for add/mul\n\n/// 8. commutative and associative, e.g.\n\n/// (e1+1)+(e2+2) => (e1+e2)+3\n\nfn simplify_func(fkind: FuncKind, fargs: &mut [Expr]) -> Result<Option<Expr>> {\n\n let res = match fkind {\n\n FuncKind::Neg => match &mut fargs[0].kind {\n\n // rule 1: --e => e\n\n // todo: should cast to f64 if original expression is not numeric\n\n ExprKind::Func { kind, args, .. } if *kind == FuncKind::Neg => {\n\n Some(mem::take(&mut args[0]))\n\n }\n\n // rule 2: -c => new_c\n\n ExprKind::Const(c) => fold_neg_const(c)?.map(Expr::new_const),\n\n _ => None,\n\n },\n\n FuncKind::Add => match fargs {\n\n // rule 3: 1+1 => 2\n\n [Expr {\n\n kind: ExprKind::Const(c1),\n\n ..\n\n }, Expr {\n\n kind: ExprKind::Const(c2),\n\n ..\n", "file_path": "xngin-plan/src/rule/expr_simplify.rs", "rank": 95, "score": 213846.3113145262 }, { "content": "#[inline]\n\npub fn parse_query_verbose<'a, I: ParseInput<'a>>(input: I) -> Result<QueryExpr<'a>> {\n\n terminated(query_expr, spcmt0)(input)\n\n .map(|(_, o)| o)\n\n .map_err(|e| convert_verbose_error(input, e))\n\n}\n\n\n\n/// fast statement parsing\n", "file_path": "xngin-frontend/src/parser/mod.rs", "rank": 96, "score": 213299.09812710292 }, { "content": "struct FoldExpr<'a, F>(&'a F);\n\n\n\nimpl<'a, F> FoldExpr<'a, F> {\n\n fn update(&mut self, res: Result<Option<Const>>, e: &mut Expr) -> ControlFlow<Error> {\n\n match res {\n\n Err(err) => ControlFlow::Break(err),\n\n Ok(Some(c)) => {\n\n *e = Expr::new(ExprKind::Const(c));\n\n ControlFlow::Continue(())\n\n }\n\n Ok(None) => ControlFlow::Continue(()),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, F: Fn(&mut Expr)> ExprMutVisitor for FoldExpr<'a, F> {\n\n type Cont = ();\n\n type Break = Error;\n\n fn leave(&mut self, e: &mut Expr) -> ControlFlow<Error> {\n\n (self.0)(e);\n", "file_path": "xngin-expr/src/fold/mod.rs", "rank": 97, "score": 212058.3473241995 }, { "content": "pub trait Typed {\n\n /// Returns precise type\n\n fn pty(&self) -> PreciseType;\n\n}\n\n\n\n#[repr(u8)]\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum TimeUnit {\n\n Microsecond = 1,\n\n Second = 2,\n\n Minute = 3,\n\n Hour = 4,\n\n Day = 5,\n\n Week = 6,\n\n Month = 7,\n\n Quarter = 8,\n\n Year = 9,\n\n}\n\n\n\nimpl TimeUnit {\n", "file_path": "xngin-datatype/src/lib.rs", "rank": 98, "score": 211804.44205067272 }, { "content": "pub trait ToResult {\n\n type Output;\n\n\n\n fn must_ok(self) -> Result<Self::Output>;\n\n}\n\n\n\nimpl<T> ToResult for Option<T> {\n\n type Output = T;\n\n\n\n fn must_ok(self) -> Result<Self::Output> {\n\n self.ok_or(Error::MustOK)\n\n }\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum Error {\n\n #[error(\"Unsupported SQL syntax '{0}'\")]\n\n UnsupportedSqlSyntax(String),\n\n #[error(\"Duplicated table alias '{0}'\")]\n\n DuplicatedTableAlias(SmolStr),\n", "file_path": "xngin-plan/src/error.rs", "rank": 99, "score": 211607.2282062648 } ]
Rust
lib/bobbin-sys/src/system.rs
thomasantony/bobbin-sdk
37375ca40351352a029aceb8b0cf17650a3624f6
use core::ops::{Deref, DerefMut}; use bobbin_mcu::mcu::Mcu; use heap::Heap; use tick::Tick; use pend::Pend; use irq_dispatch::IrqDispatcher; use console::Console; struct SystemToken; static mut SYSTEM_TOKEN: Option<SystemToken> = Some(SystemToken); pub trait SystemProvider { type Mcu: Mcu; type Clk; fn init() -> Self; fn init_mcu() -> Self::Mcu; fn init_clk() -> Self::Clk; fn init_heap() -> Heap; fn init_dispatcher() -> IrqDispatcher<Self::Mcu>; fn init_pend() -> Pend; fn init_tick(&Self::Clk) -> Tick; fn init_console(&Self::Clk, &mut Heap) {} fn init_led(&Self::Clk, &mut Heap) {} fn init_btn(&Self::Clk, &mut Heap) {} } pub struct System<S: SystemProvider> { provider: S, mcu: S::Mcu, clk: S::Clk, heap: Heap, tick: Tick, pend: Pend, dispatcher: IrqDispatcher<S::Mcu>, _private: () } impl<S: SystemProvider> System<S> { pub fn take() -> Self { let provider = S::init(); unsafe { asm!("cpsid i "); } unsafe { while let None = SYSTEM_TOKEN.take() {} } let mcu = S::init_mcu(); let clk = S::init_clk(); let mut heap = S::init_heap(); let dispatcher = S::init_dispatcher(); let tick = S::init_tick(&clk); let pend = S::init_pend(); S::init_console(&clk, &mut heap); S::init_led(&clk, &mut heap); S::init_btn(&clk, &mut heap); System { provider, mcu, clk, heap, tick, pend, dispatcher, _private: (), } } pub fn release(system: Self) { let System { provider, mcu, clk, heap, tick, pend, dispatcher, _private } = system; let _ = provider; let _ = mcu; let _ = clk; Tick::release(tick); Pend::release(pend); Heap::release(heap); IrqDispatcher::release(dispatcher); unsafe { SYSTEM_TOKEN = Some(SystemToken) } } pub fn mcu(&self) -> &S::Mcu { &self.mcu } pub fn mcu_mut(&mut self) -> &mut S::Mcu { &mut self.mcu } pub fn clk(&self) -> &S::Clk { &self.clk } pub fn clk_mut(&mut self) -> &mut S::Clk { &mut self.clk } pub fn heap(&self) -> &Heap { &self.heap } pub fn heap_mut(&mut self) -> &mut Heap { &mut self.heap } pub fn tick(&self) -> &Tick { &self.tick } pub fn tick_mut(&mut self) -> &mut Tick { &mut self.tick } pub fn pend(&self) -> &Pend { &self.pend } pub fn pend_mut(&mut self) -> &mut Pend { &mut self.pend } pub fn dispatcher(&self) -> &IrqDispatcher<S::Mcu> { &self.dispatcher } pub fn dispatcher_mut(&mut self) -> &mut IrqDispatcher<S::Mcu> { &mut self.dispatcher } pub fn console(&self) -> &'static Console<'static> { if let Some(console) = Console::borrow() { console } else { loop {} } } pub fn run<T, F: FnOnce(&Self) -> T>(&mut self, f: F) -> T { unsafe { asm!("cpsie i"); } let ret = f(&*self); unsafe { asm!("cpsid i"); } ret } } impl<S: SystemProvider> Deref for System<S> { type Target = S; fn deref(&self) -> &Self::Target { &self.provider } } impl<S: SystemProvider> DerefMut for System<S> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.provider } }
use core::ops::{Deref, DerefMut}; use bobbin_mcu::mcu::Mcu; use heap::Heap; use tick::Tick; use pend::Pend; use irq_dispatch::IrqDispatcher; use console::Console; struct SystemToken; static mut SYSTEM_TOKEN: Option<SystemToken> = Some(SystemToken); pub trait SystemProvider { type Mcu: Mcu; type Clk; fn init() -> Self; fn init_mcu() -> Self::Mcu; fn init_clk() -> Self::Clk; fn init_heap() -> Heap; fn init_dispatcher() -> IrqDispatcher<Self::Mcu>; fn init_pend() -> Pend; fn init_tick(&Self::Clk) -> Tick; fn init_console(&Self::Clk, &mut Heap) {} fn init_led(&Self::Clk, &mut Heap) {} fn init_btn(&Self::Clk, &mut Heap) {} } pub struct System<S: SystemProvider> { provider: S, mcu: S::Mcu, clk: S::Clk, heap: Heap, tick: Tick, pend: Pend, dispatcher: IrqDispatcher<S::Mcu>, _private: () } impl<S: SystemProvider> System<S> { pub fn take() -> Self { let provider = S::init(); unsafe { asm!("cpsid i "); } unsafe { while let None = SYSTEM_TOKEN.take() {} } let mcu = S::init_mcu(); let clk = S::init_clk(); let mut heap = S::init_heap(); let dispatcher = S::init_dispatcher(); let tick = S::init_tick(&clk); let pend = S::init_pend(); S::init_console(&clk, &mut heap); S::init_led(&clk, &mut heap); S::init_btn(&clk, &mut heap); System { provider, mcu, cl
pub fn release(system: Self) { let System { provider, mcu, clk, heap, tick, pend, dispatcher, _private } = system; let _ = provider; let _ = mcu; let _ = clk; Tick::release(tick); Pend::release(pend); Heap::release(heap); IrqDispatcher::release(dispatcher); unsafe { SYSTEM_TOKEN = Some(SystemToken) } } pub fn mcu(&self) -> &S::Mcu { &self.mcu } pub fn mcu_mut(&mut self) -> &mut S::Mcu { &mut self.mcu } pub fn clk(&self) -> &S::Clk { &self.clk } pub fn clk_mut(&mut self) -> &mut S::Clk { &mut self.clk } pub fn heap(&self) -> &Heap { &self.heap } pub fn heap_mut(&mut self) -> &mut Heap { &mut self.heap } pub fn tick(&self) -> &Tick { &self.tick } pub fn tick_mut(&mut self) -> &mut Tick { &mut self.tick } pub fn pend(&self) -> &Pend { &self.pend } pub fn pend_mut(&mut self) -> &mut Pend { &mut self.pend } pub fn dispatcher(&self) -> &IrqDispatcher<S::Mcu> { &self.dispatcher } pub fn dispatcher_mut(&mut self) -> &mut IrqDispatcher<S::Mcu> { &mut self.dispatcher } pub fn console(&self) -> &'static Console<'static> { if let Some(console) = Console::borrow() { console } else { loop {} } } pub fn run<T, F: FnOnce(&Self) -> T>(&mut self, f: F) -> T { unsafe { asm!("cpsie i"); } let ret = f(&*self); unsafe { asm!("cpsid i"); } ret } } impl<S: SystemProvider> Deref for System<S> { type Target = S; fn deref(&self) -> &Self::Target { &self.provider } } impl<S: SystemProvider> DerefMut for System<S> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.provider } }
k, heap, tick, pend, dispatcher, _private: (), } }
function_block-function_prefixed
[ { "content": "pub fn run_with_sys<S: SystemProvider>(mut sys: System<S>) -> ! {\n\n let ticker = Ticker::<S::Mcu>::new();\n\n let pender = Pender::new();\n\n\n\n let _guard_tick = match sys.tick_mut().register(&ticker) {\n\n Ok(guard) => guard,\n\n Err(_) => {\n\n println!(\"Error registering tick handler.\");\n\n loop {}\n\n } \n\n };\n\n\n\n let _guard_pend = match sys.pend_mut().register(&pender) {\n\n Ok(guard) => guard,\n\n Err(_) => {\n\n println!(\"Error registering pend handler.\");\n\n loop {}\n\n } \n\n };\n\n\n", "file_path": "app/examples/src/pend_handler.rs", "rank": 0, "score": 400928.35727193026 }, { "content": "pub fn run_with_sys<S: SystemProvider>(mut sys: System<S>) -> ! {\n\n let ticker = Ticker::new();\n\n let _guard = match sys.tick_mut().register(&ticker) {\n\n Ok(guard) => guard,\n\n Err(_) => {\n\n println!(\"Error registering tick handler.\");\n\n loop {}\n\n } \n\n };\n\n sys.run(|sys| {\n\n loop {\n\n sys.console().write(b\"Tick...\");\n\n sys.console().write_u32(ticker.counter(), 10);\n\n sys.console().writeln(b\"\");\n\n sys.tick().delay(500);\n\n }\n\n }) \n\n}\n\n\n\npub struct Ticker {\n", "file_path": "app/examples/src/tick_handler.rs", "rank": 1, "score": 400922.49841521744 }, { "content": "pub fn run_with_sys<S: SystemProvider>(mut sys: System<S>) -> ! {\n\n if true {\n\n println!(\"Flag Example\");\n\n // Flag Example\n\n let flag_bool: &mut bool = sys.heap_mut().new(false);\n\n let (flag_setter, flag_getter) = flag_pair(flag_bool);\n\n\n\n let flag_task = FlagTask { flag_setter };\n\n let _flag_guard = match sys.tick_mut().register(&flag_task) {\n\n Ok(guard) => guard,\n\n Err(_) => {\n\n println!(\"Unable to register flag task\");\n\n loop {}\n\n } \n\n };\n\n\n\n sys.run(|_| {\n\n println!(\"Waiting for five ticks\");\n\n for i in 0..5 {\n\n while flag_getter.is_clr() {}\n", "file_path": "app/examples/src/ipc.rs", "rank": 2, "score": 351100.33532800386 }, { "content": "pub trait Pend {\n\n fn pend();\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 3, "score": 332284.48277461017 }, { "content": "pub fn run(mcu: &mut GetHeap) -> Result<(), Error> {\n\n for i in 0..1024 {\n\n unsafe { DATA[i] = i as u8; }\n\n }\n\n\n\n // println!(\"Memory Test\");\n\n // println!(\"{:?}\", sys.memory());\n\n\n\n let heap = mcu.heap();\n\n // println!(\"Initial Heap: {:?}\", heap);\n\n\n\n unsafe { heap.extend(4096) }\n\n\n\n // println!(\"{:?}\", heap);\n\n\n\n #[derive(Debug)]\n\n pub struct Abc { \n\n a: u32,\n\n b: u32,\n\n c: u32,\n", "file_path": "board/nucleo-f746zg/examples/memory.rs", "rank": 4, "score": 331402.9950753333 }, { "content": "pub fn run(mcu: &mut GetHeap) -> Result<(), Error> {\n\n for i in 0..1024 {\n\n unsafe { DATA[i] = i as u8; }\n\n }\n\n\n\n // println!(\"Memory Test\");\n\n // println!(\"{:?}\", sys.memory());\n\n\n\n let heap = mcu.heap();\n\n // println!(\"Initial Heap: {:?}\", heap);\n\n\n\n unsafe { heap.extend(4096) }\n\n\n\n // println!(\"{:?}\", heap);\n\n\n\n #[derive(Debug)]\n\n pub struct Abc { \n\n a: u32,\n\n b: u32,\n\n c: u32,\n", "file_path": "board/nucleo-f429zi/examples/memory.rs", "rank": 5, "score": 331402.9950753333 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/feather-m0/src/sys.rs", "rank": 6, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/arduino-zero/src/sys.rs", "rank": 7, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/nucleo-f429zi/src/sys.rs", "rank": 8, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/discovery-stm32f3/src/sys.rs", "rank": 9, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/nucleo-l432kc/src/sys.rs", "rank": 10, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/frdm-k64f/src/sys.rs", "rank": 11, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/discovery-stm32f429i/src/sys.rs", "rank": 12, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board/nucleo-f746zg/src/sys.rs", "rank": 13, "score": 328960.43319371936 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board-src/template-thumbv6/src/sys.rs", "rank": 14, "score": 324426.20523258625 }, { "content": "pub fn init() -> System<Board> {\n\n System::take()\n\n}\n", "file_path": "board-src/template-thumbv7/src/sys.rs", "rank": 15, "score": 324426.20523258625 }, { "content": "pub fn run<S: SystemProvider + GetFlash>(mut sys: System<S>, flash_addr: *mut u8, flash_len: usize) -> ! \n\n{\n\n sys.run(|sys| {\n\n let flash = sys.flash();\n\n let console = sys.console();\n\n console.writeln(b\"Erasing Flash\");\n\n\n\n flash.erase_begin();\n\n flash.erase(flash_addr).unwrap_or_abort(\"Error erasing flash\");\n\n flash.erase_end();\n\n unsafe { console.dump_ptr(flash_addr as *const u8, flash_len); }\n\n\n\n console.writeln(b\"Writing Flash\");\n\n let mut buf = [0u8; 0x100];\n\n for i in 0..buf.len() {\n\n buf[i] = i as u8;\n\n }\n\n flash.write_begin();\n\n flash.write(flash_addr as *mut u8, &buf).unwrap_or_abort(\"Error writing flash\");\n\n flash.write_end();\n\n unsafe { console.dump_ptr(flash_addr as *const u8, flash_len); }\n\n console.writeln(b\"Done\"); \n\n\n\n loop {}\n\n }) \n\n}", "file_path": "app/examples/src/flash.rs", "rank": 16, "score": 323374.4083848638 }, { "content": "pub trait Mcu : IrqEnable + GetActiveIrq + Pend + Sleep {\n\n fn id(&self) -> &'static str;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 18, "score": 295644.32702713896 }, { "content": "pub trait SignalType {}\n\n\n", "file_path": "lib/bobbin-mcu/src/signal.rs", "rank": 19, "score": 287992.02538379806 }, { "content": "pub fn size_type(size: u64) -> &'static str {\n\n match size {\n\n 8 => \"u8\",\n\n 16 => \"u16\",\n\n 32 => \"u32\",\n\n 64 => \"u64\",\n\n _ => panic!(\"Unsupported size: {}\", size),\n\n }\n\n}\n\n\n\n\n", "file_path": "dsl/bobbin-chip/src/codegen/mod.rs", "rank": 20, "score": 277417.7478867106 }, { "content": "/// Marker trait for an interrupt type.\n\npub trait IrqType : Default {}\n\n\n", "file_path": "lib/bobbin-mcu/src/irq.rs", "rank": 21, "score": 276048.69539021445 }, { "content": "pub trait Init {\n\n fn init_test(&self) -> &Self;\n\n}\n\n\n\nimpl Init for AdcPeriph {\n\n fn init_test(&self) -> &Self {\n\n println!(\"CCR: {:?}\", C_ADC12.ccr());\n\n println!(\"A\");\n\n self.with_cr(|r| r.set_aden(0));\n\n while self.isr().adrdy() != 0 {}\n\n\n\n println!(\"B\");\n\n // Enable Analog Voltage Regulator\n\n self.with_cr(|r| r.set_advregen(0b00));\n\n self.with_cr(|r| r.set_advregen(0b01));\n\n \n\n println!(\"C\");\n\n // Calibrate\n\n self.with_cr(|r| r.set_adcaldif(0));\n\n self.with_cr(|r| r.set_adcal(1));\n", "file_path": "board/discovery-stm32f3/examples/adc.rs", "rank": 22, "score": 273989.67711405817 }, { "content": "pub fn init() {\n\n PE9.port().gate_enable();\n\n PE9.mode_output();\n\n\n\n PE8.port().gate_enable();\n\n PE8.mode_output();\n\n\n\n PE10.port().gate_enable();\n\n PE10.mode_output();\n\n\n\n PE15.port().gate_enable();\n\n PE15.mode_output();\n\n\n\n PE11.port().gate_enable();\n\n PE11.mode_output();\n\n\n\n PE14.port().gate_enable();\n\n PE14.mode_output();\n\n\n\n PE12.port().gate_enable();\n", "file_path": "board/discovery-stm32f3/src/led.rs", "rank": 23, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n BTN0_PT.port().gate_enable();\n\n BTN0_PT.connect_to(BTN0);\n\n\n\n BTN1_PT.port().gate_enable();\n\n BTN1_PT.connect_to(BTN1);\n\n\n\n\n\n BTN0.set_dir_input();\n\n BTN1.set_dir_input();\n\n}\n\n\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnLow<GpioCh> { BtnLow::new(PC6_CH) }\n\n pub fn btn1(&self) -> BtnLow<GpioCh> { BtnLow::new(PA4_CH) }\n\n}", "file_path": "board/frdm-k64f/src/btn.rs", "rank": 24, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PA17.port().gate_enable();\n\n PA17.set_mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 1 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<PortPin> {\n\n LedHigh::new(PA17_PIN)\n\n }\n\n}", "file_path": "board/feather-m0/src/led.rs", "rank": 25, "score": 273456.6298518628 }, { "content": "pub fn init() { \n\n PTB22.port().gate_enable();\n\n PTB22.connect_to(PB22); \n\n PB22.set_dir_output().set_output(true);\n\n\n\n PTB21.port().gate_enable();\n\n PTB21.connect_to(PB21);\n\n PB21.set_dir_output().set_output(true);\n\n\n\n PTE26.port().gate_enable();\n\n PTE26.connect_to(PE26);\n\n PE26.set_dir_output().set_output(true);\n\n\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n", "file_path": "board/frdm-k64f/src/led.rs", "rank": 26, "score": 273456.6298518628 }, { "content": "pub fn init() { \n\n}", "file_path": "board/feather-m0/src/btn.rs", "rank": 27, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PC13_PIN) }\n\n}", "file_path": "board/nucleo-f746zg/src/btn.rs", "rank": 28, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PG13.port().gate_enable();\n\n PG13.mode_output();\n\n\n\n PG14.port().gate_enable();\n\n PG14.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 2 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PG13_PIN) }\n\n pub fn led1(&self) -> LedHigh<GpioPin> { LedHigh::new(PG14_PIN) }\n\n}", "file_path": "board/discovery-stm32f429i/src/led.rs", "rank": 29, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PA0_PIN) }\n\n}", "file_path": "board/discovery-stm32f429i/src/btn.rs", "rank": 30, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_up();\n\n}\n\npub use mcu::pin::*;\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnLow<GpioPin> { BtnLow::new(PA12_PIN) }\n\n}", "file_path": "board/nucleo-l432kc/src/btn.rs", "rank": 31, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PA17.port().gate_enable();\n\n PA17.set_mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 1 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<PortPin> {\n\n LedHigh::new(PA17_PIN)\n\n }\n\n}", "file_path": "board/arduino-zero/src/led.rs", "rank": 32, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PB0.port().gate_enable();\n\n PB0.mode_output();\n\n\n\n PB7.port().gate_enable();\n\n PB7.mode_output();\n\n\n\n PB14.port().gate_enable();\n\n PB14.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n\n 2 => &LED2,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 3 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PB0_PIN) }\n\n pub fn led1(&self) -> LedHigh<GpioPin> { LedHigh::new(PB7_PIN) }\n\n pub fn led2(&self) -> LedHigh<GpioPin> { LedHigh::new(PB14_PIN) }\n\n}", "file_path": "board/nucleo-f746zg/src/led.rs", "rank": 33, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PA0_PIN) }\n\n}", "file_path": "board/discovery-stm32f3/src/btn.rs", "rank": 34, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PA11.port().gate_enable();\n\n PA11.set_mode_input().set_pull_enabled(true).set_output(true);\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnLow<PortPin> {\n\n BtnLow::new(PA11_PIN)\n\n }\n\n}", "file_path": "board/arduino-zero/src/btn.rs", "rank": 35, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PB3.port().gate_enable();\n\n PB3.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 1 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PB3_PIN) }\n\n}", "file_path": "board/nucleo-l432kc/src/led.rs", "rank": 36, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n PB0.port().gate_enable();\n\n PB0.mode_output();\n\n\n\n PB7.port().gate_enable();\n\n PB7.mode_output();\n\n\n\n PB14.port().gate_enable();\n\n PB14.mode_output();\n\n}\n\n\n\nimpl GetLed for ::Board {\n\n fn get_led(&self, index: usize) -> &Led {\n\n match index {\n\n 0 => &LED0,\n\n 1 => &LED1,\n\n 2 => &LED2,\n\n _ => unimplemented!()\n\n }\n\n }\n\n fn get_led_count(&self) -> usize { 3 }\n\n}\n\n\n\nimpl ::Board {\n\n pub fn led0(&self) -> LedHigh<GpioPin> { LedHigh::new(PB0_PIN) }\n\n pub fn led1(&self) -> LedHigh<GpioPin> { LedHigh::new(PB7_PIN) }\n\n pub fn led2(&self) -> LedHigh<GpioPin> { LedHigh::new(PB14_PIN) }\n\n}", "file_path": "board/nucleo-f429zi/src/led.rs", "rank": 37, "score": 273456.6298518628 }, { "content": "pub fn init() {\n\n BTN0.port().gate_enable();\n\n BTN0.mode_input().pull_down();\n\n}\n\n\n\nimpl ::Board {\n\n pub fn btn0(&self) -> BtnHigh<GpioPin> { BtnHigh::new(PC13_PIN) }\n\n}", "file_path": "board/nucleo-f429zi/src/btn.rs", "rank": 38, "score": 273456.6298518628 }, { "content": "pub trait ClockProvider : Default {\n\n type Extal: Clock;\n\n type Extal32: Clock;\n\n fn extal(&self) -> Hz { Self::Extal::hz() }\n\n fn extal32(&self) -> Hz { Self::Extal32::hz() }\n\n fn irc48m(&self) -> Hz { Hz::from_num(48000000) }\n\n fn irc4m(&self) -> Hz { Hz::from_num(4000000) }\n\n fn irc32k(&self) -> Hz { Hz::from_num(32000) }\n\n fn lpo(&self) -> Hz { Hz::from_num(1000) }\n\n fn system(&self) -> Hz { unimplemented!() }\n\n fn bus(&self) -> Hz { unimplemented!() }\n\n fn flexbus(&self) -> Hz { unimplemented!() }\n\n fn flash(&self) -> Hz { unimplemented!() }\n\n fn mcgirclk(&self) -> Hz { unimplemented!() }\n\n fn erclk32k(&self) -> Hz { unimplemented!() }\n\n fn oscerclk(&self) -> Hz { unimplemented!() }\n\n fn systick(&self) -> Hz { unimplemented!() }\n\n}\n\n\n\nimpl<CP> ClockFor<::enet::Enet> for Clocks<CP> where CP: ClockProvider {\n", "file_path": "mcu/bobbin-kinetis/k64/src/clock.rs", "rank": 39, "score": 271779.8546628352 }, { "content": "pub trait ClockProvider : Default {\n\n type Osc: Clock;\n\n type Osc32: Clock;\n\n fn osc(&self) -> Hz { Self::Osc::hz() }\n\n fn osc32(&self) -> Hz { Self::Osc32::hz() }\n\n fn hsi(&self) -> Hz { Hz::from_num(8000000) }\n\n fn hse(&self) -> Hz { self.osc() }\n\n fn lsi(&self) -> Hz { Hz::from_num(40000) }\n\n fn lse(&self) -> Hz { self.osc32() }\n\n fn pllclk(&self) -> Hz { unimplemented!() }\n\n fn sysclk(&self) -> Hz { unimplemented!() }\n\n fn hclk(&self) -> Hz { unimplemented!() }\n\n fn systick(&self) -> Hz { unimplemented!() }\n\n fn fhclk(&self) -> Hz { unimplemented!() }\n\n fn pclk1(&self) -> Hz { unimplemented!() }\n\n fn pclk2(&self) -> Hz { unimplemented!() }\n\n fn tim_pclk1(&self) -> Hz { unimplemented!() }\n\n fn tim_pclk2(&self) -> Hz { unimplemented!() }\n\n fn i2c1(&self) -> Hz { unimplemented!() }\n\n fn i2c2(&self) -> Hz { unimplemented!() }\n", "file_path": "mcu/bobbin-stm32/stm32f303x/src/clock.rs", "rank": 40, "score": 271779.8546628352 }, { "content": "pub trait ClockProvider : Default {\n\n type Osc: Clock;\n\n type Osc32: Clock;\n\n fn osc(&self) -> Hz { Self::Osc::hz() }\n\n fn osc32(&self) -> Hz { Self::Osc32::hz() }\n\n fn hsi(&self) -> Hz { Hz::from_num(16000000) }\n\n fn hse(&self) -> Hz { self.osc() }\n\n fn lsi(&self) -> Hz { Hz::from_num(32000) }\n\n fn lse(&self) -> Hz { self.osc32() }\n\n fn pllclk(&self) -> Hz { unimplemented!() }\n\n fn pll48clk(&self) -> Hz { unimplemented!() }\n\n fn sysclk(&self) -> Hz { unimplemented!() }\n\n fn i2s(&self) -> Hz { unimplemented!() }\n\n fn otg_hs_scl(&self) -> Hz { unimplemented!() }\n\n fn hclk(&self) -> Hz { unimplemented!() }\n\n fn systick(&self) -> Hz { unimplemented!() }\n\n fn fclk(&self) -> Hz { unimplemented!() }\n\n fn pclk1(&self) -> Hz { unimplemented!() }\n\n fn pclk2(&self) -> Hz { unimplemented!() }\n\n fn tim_pclk1(&self) -> Hz { unimplemented!() }\n", "file_path": "mcu/bobbin-stm32/stm32f42x/src/clock.rs", "rank": 41, "score": 271779.8546628352 }, { "content": "pub trait ClockProvider : Default {\n\n type Osc: Clock;\n\n type Osc32: Clock;\n\n fn osc(&self) -> Hz { Self::Osc::hz() }\n\n fn osc32(&self) -> Hz { Self::Osc32::hz() }\n\n fn hsi16(&self) -> Hz { Hz::from_num(16000000) }\n\n fn hse(&self) -> Hz { self.osc() }\n\n fn lsi(&self) -> Hz { Hz::from_num(32000) }\n\n fn lse(&self) -> Hz { self.osc32() }\n\n fn pllclk(&self) -> Hz { unimplemented!() }\n\n fn pll48clk(&self) -> Hz { unimplemented!() }\n\n fn sysclk(&self) -> Hz { unimplemented!() }\n\n fn hclk(&self) -> Hz { unimplemented!() }\n\n fn systick(&self) -> Hz { unimplemented!() }\n\n fn pclk1(&self) -> Hz { unimplemented!() }\n\n fn pclk2(&self) -> Hz { unimplemented!() }\n\n fn tim_pclk1(&self) -> Hz { unimplemented!() }\n\n fn tim_pclk2(&self) -> Hz { unimplemented!() }\n\n fn usart1(&self) -> Hz { unimplemented!() }\n\n fn usart2(&self) -> Hz { unimplemented!() }\n", "file_path": "mcu/bobbin-stm32/stm32l432x/src/clock.rs", "rank": 42, "score": 271779.8546628352 }, { "content": "pub trait ClockProvider : Default {\n\n type Osc: Clock;\n\n type Osc32: Clock;\n\n fn osc(&self) -> Hz { Self::Osc::hz() }\n\n fn osc32(&self) -> Hz { Self::Osc32::hz() }\n\n fn hsi(&self) -> Hz { Hz::from_num(16000000) }\n\n fn hse(&self) -> Hz { self.osc() }\n\n fn lsi(&self) -> Hz { Hz::from_num(32000) }\n\n fn lse(&self) -> Hz { self.osc32() }\n\n fn pllclk(&self) -> Hz { unimplemented!() }\n\n fn pll48clk(&self) -> Hz { unimplemented!() }\n\n fn sysclk(&self) -> Hz { unimplemented!() }\n\n fn i2s(&self) -> Hz { unimplemented!() }\n\n fn otg_hs_scl(&self) -> Hz { unimplemented!() }\n\n fn hclk(&self) -> Hz { unimplemented!() }\n\n fn systick(&self) -> Hz { unimplemented!() }\n\n fn fclk(&self) -> Hz { unimplemented!() }\n\n fn pclk1(&self) -> Hz { unimplemented!() }\n\n fn pclk2(&self) -> Hz { unimplemented!() }\n\n fn tim_pclk1(&self) -> Hz { unimplemented!() }\n", "file_path": "mcu/bobbin-stm32/stm32f74x/src/clock.rs", "rank": 43, "score": 271779.8546628352 }, { "content": "pub trait ClockProvider : Default {\n\n type Xosc: Clock;\n\n type Xosc32k: Clock;\n\n fn xosc(&self) -> Hz { Self::Xosc::hz() }\n\n fn xosc32k(&self) -> Hz { Self::Xosc32k::hz() }\n\n fn osc32k(&self) -> Hz { Hz::from_num(32768) }\n\n fn osc8m(&self) -> Hz { Hz::from_num(8000000) }\n\n fn osculp32k(&self) -> Hz { Hz::from_num(32000) }\n\n fn dfll48m(&self) -> Hz { Hz::from_num(48000000) }\n\n fn fdpll96m(&self) -> Hz { Hz::from_num(96000000) }\n\n fn gclkgen0(&self) -> Hz { unimplemented!() }\n\n fn gclkgen1(&self) -> Hz { unimplemented!() }\n\n fn gclkgen2(&self) -> Hz { unimplemented!() }\n\n fn gclkgen3(&self) -> Hz { unimplemented!() }\n\n fn gclkgen4(&self) -> Hz { unimplemented!() }\n\n fn gclkgen5(&self) -> Hz { unimplemented!() }\n\n fn gclkgen6(&self) -> Hz { unimplemented!() }\n\n fn gclkgen7(&self) -> Hz { unimplemented!() }\n\n fn gclkgen8(&self) -> Hz { unimplemented!() }\n\n fn gclk_dffl48m_ref(&self) -> Hz { unimplemented!() }\n", "file_path": "mcu/bobbin-sam/samd21/src/clock.rs", "rank": 44, "score": 271779.8546628352 }, { "content": "pub fn init() { \n\n}", "file_path": "board-src/template-thumbv6/src/btn.rs", "rank": 45, "score": 269784.45697628445 }, { "content": "pub fn init() { \n\n}", "file_path": "board-src/template-thumbv7/src/btn.rs", "rank": 46, "score": 269784.4569762844 }, { "content": "pub fn init() {\n\n}\n", "file_path": "board-src/template-thumbv6/src/led.rs", "rank": 47, "score": 269784.45697628445 }, { "content": "pub fn init() {\n\n}\n", "file_path": "board-src/template-thumbv7/src/led.rs", "rank": 48, "score": 269784.4569762844 }, { "content": "pub trait Signal<ST: SignalType> {}\n\n\n", "file_path": "lib/bobbin-mcu/src/signal.rs", "rank": 49, "score": 266025.4867927248 }, { "content": "pub trait Selector<ST: SignalType> {\n\n fn selector(&self) -> u8;\n\n}\n", "file_path": "lib/bobbin-mcu/src/signal.rs", "rank": 50, "score": 266025.4867927248 }, { "content": "/// Associates a specific interrupt number with a specific interrupt type, implemented\n\n/// for a Peripheral or Channel.\n\npub trait Irq<IT: IrqType> : Default {\n\n type Output: IrqNumber;\n\n /// Returns the interrupt number associated with the IRQ Type and peripheral.\n\n fn irq_number_for(&self, IT) -> u8 { Self::Output::irq_number() }\n\n}\n\n\n\n#[derive(Default, Debug)]\n\npub struct IrqMain {}\n\nimpl IrqType for IrqMain {}\n\n\n\npub const IRQ_MAIN: IrqMain = IrqMain {};", "file_path": "lib/bobbin-mcu/src/irq.rs", "rank": 51, "score": 261497.10382498306 }, { "content": "#[inline]\n\npub fn write_u16(addr: *mut u16, value: u16) {\n\n write(addr, value)\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 52, "score": 258753.95752367337 }, { "content": "#[inline]\n\npub fn write_u32(addr: *mut u32, value: u32) {\n\n write(addr, value)\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 53, "score": 258753.95752367337 }, { "content": "#[inline]\n\npub fn write_u8(addr: *mut u8, value: u8) {\n\n write(addr, value)\n\n} ", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 54, "score": 258753.95752367337 }, { "content": "pub trait HandlePend {\n\n fn handle_pend(&self);\n\n}\n", "file_path": "lib/bobbin-sys/src/pend.rs", "rank": 55, "score": 255944.1389099812 }, { "content": "pub trait HandleTick {\n\n fn handle_tick(&self, counter: u32);\n\n}\n", "file_path": "lib/bobbin-sys/src/tick.rs", "rank": 56, "score": 255935.920151555 }, { "content": "#[inline]\n\npub fn write<T>(addr: *mut T, value: T) {\n\n unsafe { write_volatile(addr as *mut T, value) }\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 57, "score": 254938.82523044682 }, { "content": "/// Indicates that a signal of `SignalType` is associated with a channel.\n\npub trait ChannelSource<ST: SignalType, SRC> {\n\n /// Returns the selector used to connect a channel source.\n\n fn selector(&self) -> u8;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/channel.rs", "rank": 58, "score": 253447.49604235665 }, { "content": "pub trait PinSource<STY: SignalType, SRC> {\n\n fn alt_fn(&self) -> U4;\n\n #[inline]\n\n fn alt_fn_for(&self, _src: SRC) -> U4 { self.alt_fn() }\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/pin.rs", "rank": 59, "score": 253440.20926551253 }, { "content": "pub fn enable_systick_external<C: ClockFor<Systick>>(clk: &C) { \n\n let ms_hz = (clk.clock_for(SYSTICK) / 1000).as_u32() - 1; \n\n let st = SYSTICK;\n\n st.set_clock_source(ClockSource::External);\n\n st.set_reload_value(ms_hz);\n\n st.set_current_value(ms_hz);\n\n st.set_enabled(true);\n\n st.set_tick_interrupt(true); \n\n}\n\n\n\n/// The clock source to be used by self.\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum ClockSource {\n\n /// Use the external clock source.\n\n External = 0,\n\n /// Use the internal clock source.\n\n Internal = 1,\n\n}\n\n\n\nimpl SystickPeriph {\n", "file_path": "mcu/bobbin-cortexm/src/ext/systick.rs", "rank": 60, "score": 251727.593770804 }, { "content": "pub fn enable_systick_internal<C: ClockFor<Systick>>(clk: &C) { \n\n let ms_hz = (clk.clock_for(SYSTICK) / 1000).as_u32() - 1; \n\n let st = SYSTICK;\n\n st.set_clock_source(ClockSource::Internal);\n\n st.set_reload_value(ms_hz);\n\n st.set_current_value(ms_hz);\n\n st.set_enabled(true);\n\n st.set_tick_interrupt(true); \n\n}\n\n\n", "file_path": "mcu/bobbin-cortexm/src/ext/systick.rs", "rank": 61, "score": 251727.593770804 }, { "content": "pub trait ConnectTo<STY: SignalType, SRC, PIN> {\n\n fn connect_to(&self, src: SRC);\n\n}\n\n\n\nimpl<STY, SRC, PIN, T> ConnectTo<STY, SRC, PIN> for T\n\nwhere \n\n STY: SignalType, \n\n PIN: SetSource,\n\n Self: PeriphPin<PIN> + PinSource<STY, SRC> \n\n{\n\n fn connect_to(&self, src: SRC) {\n\n let alt_fn = self.alt_fn_for(src);\n\n self.periph_pin().set_source(alt_fn);\n\n }\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/pin.rs", "rank": 62, "score": 249772.90336850885 }, { "content": "pub trait Sleep {\n\n fn sleep();\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 63, "score": 240263.97089266562 }, { "content": "pub fn gen_peripheral_impl<W: Write>(_cfg: &Config, out: &mut W, p: &Peripheral) -> Result<()> {\n\n let p_type = format!(\"{}Periph\", to_camel(&p.group_name.as_ref().unwrap()));\n\n let ch_type = format!(\"{}Ch\", to_camel(&p.group_name.as_ref().unwrap()));\n\n\n\n\n\n // TODO: Description must be added at top of file\n\n // if let Some(ref desc) = p.description {\n\n // let desc = desc.trim();\n\n // if desc.len() > 0 {\n\n // try!(writeln!(out, \"// //! {}\", desc));\n\n // try!(writeln!(out, \"\"));\n\n // }\n\n // }\n\n\n\n // try!(writeln!(out, \"#[allow(unused_imports)] use {}::*;\", cfg.common));\n\n // try!(writeln!(out, \"\"));\n\n\n\n if let Some(ref desc) = p.description {\n\n try!(gen_doc(out, 0, desc));\n\n }\n", "file_path": "dsl/bobbin-chip/src/codegen/modules.rs", "rank": 64, "score": 239896.78380650113 }, { "content": "pub fn gen_descriptor<W: Write>(out: &mut W, _p_type: &str, desc: &Descriptor) -> Result<()> {\n\n let d_type = to_camel(&desc.name);\n\n let d_size = desc.size.expect(\"Descriptor size is required\");\n\n\n\n try!(writeln!(out, \"\"));\n\n\n\n if let Some(ref desc) = desc.description {\n\n try!(gen_doc(out, 0, desc));\n\n }\n\n try!(writeln!(out, \"#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)]\"));\n\n try!(writeln!(out, \"pub struct {}(pub [u8; {}]);\", d_type, d_size));\n\n try!(writeln!(out, \"\"));\n\n\n\n try!(writeln!(out, \"impl {} {{\", d_type));\n\n\n\n\n\n for r in desc.registers.iter() {\n\n let r_type = format!(\"{}\", to_camel(&r.name));\n\n // let r_ptr = field_ptr(&r.name);\n\n // let r_mut = field_mut(&r.name);\n", "file_path": "dsl/bobbin-chip/src/codegen/modules.rs", "rank": 65, "score": 239893.598560563 }, { "content": "pub trait IrqEnable {\n\n fn irq_enabled(u8) -> bool;\n\n fn irq_enable(u8);\n\n fn irq_disable(u8);\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 66, "score": 237117.91319206977 }, { "content": "pub trait GetActiveIrq {\n\n fn get_active_irq() -> u8;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 67, "score": 234081.75823851392 }, { "content": "#[doc(hidden)]\n\npub fn with_console<F: FnOnce(&mut Console)>(f: F) {\n\n unsafe {\n\n if let Some(ref mut console) = CONSOLE {\n\n f(console)\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/bobbin-sys/src/console.rs", "rank": 68, "score": 231007.0758008322 }, { "content": "pub trait CrcInit<T> {\n\n fn init(&self, value: T) -> &Self;\n\n}", "file_path": "lib/bobbin-hal/src/crc.rs", "rank": 69, "score": 230360.0256497704 }, { "content": "pub fn gen_peripheral_group_impl<W: Write>(_cfg: &Config, out: &mut W, pg: &PeripheralGroup) -> Result<()> {\n\n let pg_name = if let Some(ref prototype) = pg.prototype {\n\n if let Some(ref name) = prototype.group_name {\n\n format!(\"{}\", name)\n\n } else {\n\n format!(\"{}\", pg.name)\n\n }\n\n } else {\n\n format!(\"{}\", pg.name)\n\n };\n\n let pg_type = format!(\"{}Periph\", to_camel(&pg_name));\n\n let ch_type = format!(\"{}Ch\", to_camel(&pg_name));\n\n\n\n\n\n // try!(writeln!(out, \"#[allow(unused_imports)] use {}::*;\", cfg.common));\n\n // try!(writeln!(out, \"\"));\n\n\n\n // Generate Periphal Group Impl\n\n\n\n if pg.modules.len() == 0 {\n", "file_path": "dsl/bobbin-chip/src/codegen/modules.rs", "rank": 70, "score": 230268.4737431279 }, { "content": "pub trait Get<T> {\n\n fn get(&self) -> T;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 71, "score": 229580.31839704578 }, { "content": "pub trait En {\n\n fn en(&self) -> bits::U1;\n\n fn set_en<V: Into<bits::U1>>(&self, value: V);\n\n} \n", "file_path": "lib/bobbin-mcu/src/lib.rs", "rank": 72, "score": 229557.71301972016 }, { "content": "pub trait AsPeriph {\n\n type Target;\n\n fn as_periph(&self) -> &'static Self::Target;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/periph.rs", "rank": 73, "score": 229557.71301972016 }, { "content": "/// Return the number of milliseconds since the clock was started.\n\npub trait Millis {\n\n /// Returns the number of milliseconds modulo 2^32 since the clock was started.\n\n fn millis(&self) -> u32;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/clock.rs", "rank": 74, "score": 229557.71301972016 }, { "content": "/// Handle an interrupt.\n\npub trait HandleIrq : Sync {\n\n /// Handle an interrupt. The interrupt number is provided for handlers that support\n\n /// more than interrupt.\n\n fn handle_irq(&self, irq: u8);\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct IrqHandler {\n\n irq_num: u8,\n\n handler: *const HandleIrq,\n\n}\n\n\n\nimpl IrqHandler {\n\n fn new(irq_num: u8, handler: *const HandleIrq, ) -> Self {\n\n Self { irq_num, handler }\n\n }\n\n}\n\n\n\n/// The interrupt dispatcher singleton.\n\npub struct IrqDispatcher<MCU: Mcu> {\n", "file_path": "lib/bobbin-sys/src/irq_dispatch.rs", "rank": 75, "score": 226953.59029611078 }, { "content": "pub trait GetPeriph<T> {\n\n fn get_periph(&self) -> T;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 76, "score": 226544.16344348993 }, { "content": "/// Query and Set the Peripheral Reset Gate.\n\n/// \n\n/// Enabling this gate will reset the peripheral. Check the MCU documentation\n\n/// to see if the gate will automatically be cleared after a successful reset\n\n/// or if it needs to be manually cleared.\n\n/// \n\n/// Note: implement a higher-level Reset trait that abstracts the full peripheral\n\n/// reset process.\n\npub trait GateRst {\n\n /// Returns true if the Reset gate is enabled.\n\n fn gate_rst(&self) -> bits::U1;\n\n /// Set the state of the Reset gate to `value`.\n\n fn set_gate_rst<V: Into<bits::U1>>(&self, value: V) -> &Self;\n\n /// Toggle the state of the Reset gate.\n\n fn toggle_gate_reset(&self) -> &Self { self.set_gate_rst(true).set_gate_rst(false) }\n\n} \n\n\n", "file_path": "lib/bobbin-mcu/src/gate.rs", "rank": 77, "score": 226198.94597695154 }, { "content": "/// Marker trait indicating the item is a Clock Tree.\n\npub trait ClockTree {}\n\n\n", "file_path": "lib/bobbin-mcu/src/clock.rs", "rank": 78, "score": 226194.341066299 }, { "content": "/// Query and Set the Peripheral Clock Enable Gate\n\npub trait GateEn {\n\n /// Returns true if the gate is enabled.\n\n fn gate_en(&self) -> bits::U1;\n\n /// Set the gate state to `value`.\n\n fn set_gate_en<V: Into<bits::U1>>(&self, value: V) -> &Self; \n\n /// Enable the gate for the peripheral.\n\n fn gate_enable(&self) -> &Self { self.set_gate_en(true) }\n\n /// Disable the gate for the peripheral.\n\n fn gate_disable(&self) -> &Self { self.set_gate_en(false) }\n\n} \n\n\n", "file_path": "lib/bobbin-mcu/src/gate.rs", "rank": 79, "score": 226187.03769738114 }, { "content": "pub trait RefCount {\n\n fn ref_count_mut() -> &'static mut u8;\n\n fn incr_ref() { *Self::ref_count_mut() += 1 }\n\n fn decr_ref() { *Self::ref_count_mut() -= 1 }\n\n fn ref_count() -> u8 { *Self::ref_count_mut() }\n\n}", "file_path": "lib/bobbin-mcu/src/owned.rs", "rank": 80, "score": 226187.03769738114 }, { "content": "pub trait SetSource {\n\n fn set_source<V: Into<U4>>(&self, src: V);\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/pin.rs", "rank": 81, "score": 226187.03769738114 }, { "content": "pub fn reset_vm() {\n\n MEM.with(|m| m.borrow_mut().reset());\n\n} \n\n\n", "file_path": "lib/bobbin-mcu/src/rw.rs", "rank": 82, "score": 225654.65507068083 }, { "content": "#[inline]\n\npub fn sleep() {\n\n #[cfg(target_os=\"none\")]\n\n unsafe { asm!(\"\n\n cpsid i\n\n wfi\n\n cpsie i\n\n \")}\n\n}\n", "file_path": "mcu/bobbin-cortexm/src/ext/mod.rs", "rank": 83, "score": 225654.65507068083 }, { "content": "pub trait GetPeriphInstance<T> {\n\n fn get_periph_instance(&self, index: usize) -> Option<T>;\n\n fn get_periph_instance_count(&self) -> usize;\n\n}", "file_path": "lib/bobbin-mcu/src/mcu.rs", "rank": 84, "score": 223612.08972642972 }, { "content": "/// Query and Set the Peripheral Clock Enable on Sleep Gate\n\n/// \n\n/// Enabling this gate will allow the peripheral to continue operating while the\n\n/// MCU is in a Sleep state.\n\npub trait GateSleepEn {\n\n /// Returns true if the Sleep gate is enabled.\n\n fn gate_sleep_en(&self) -> bits::U1;\n\n /// Set the state of the Sleep gate to `value`.\n\n fn set_gate_sleep_en<V: Into<bits::U1>>(&self, value: V) -> &Self; \n\n /// Enables the Sleep gate.\n\n fn gate_sleep_enable(&self) -> &Self { self.set_gate_sleep_en(true) }\n\n /// Disables the Sleep gate.\n\n fn gate_sleep_disable(&self) -> &Self { self.set_gate_sleep_en(false) }\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/gate.rs", "rank": 85, "score": 222951.51325537625 }, { "content": "/// Query and Sets the Peripheral Clock Enable on Stop Gate\n\n/// \n\n/// Enabling this gate wil allow the peripheral to continue operating while\n\n/// the MCU is in a Stop state.\n\npub trait GateStopEn {\n\n /// Returns true if the Stop gate is enabled.\n\n fn gate_stop_en(&self) -> bits::U1;\n\n /// Sets the state of the Stop gate to `value`.\n\n fn set_gate_stop_en<V: Into<bits::U1>>(&self, value: V) -> &Self; \n\n /// Enables the Stop gate.\n\n fn gate_stop_enable(&self) -> &Self { self.set_gate_stop_en(true) }\n\n /// Disables the Stop gate.\n\n fn gate_stop_disable(&self) -> &Self { self.set_gate_stop_en(false) }\n\n}\n", "file_path": "lib/bobbin-mcu/src/gate.rs", "rank": 86, "score": 222951.51325537625 }, { "content": "/// Query and Set the Peripheral Clock Enable on Deep Sleep Gate\n\n/// \n\n/// Enabling this gate will allow the peripheral to continue operating while\n\n/// the MCU is in a Deep Sleep state.\n\npub trait GateDeepSleepEn {\n\n /// Returns true if the Deep Sleep gate is enabled.\n\n fn gate_deep_sleep_en(&self) -> bits::U1;\n\n /// Sets the state of the Deep Sleep gate to `value`.\n\n fn set_gate_deep_sleep_en<V: Into<bits::U1>>(&self, value: V) -> &Self; \n\n /// Enables the Deep Sleep gate.\n\n fn gate_deep_sleep_enable(&self) -> &Self { self.set_gate_deep_sleep_en(true) }\n\n /// Disables the Deep Sleep gate.\n\n fn gate_deep_sleep_disable(&self) -> &Self { self.set_gate_deep_sleep_en(false) }\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/gate.rs", "rank": 87, "score": 219830.60036731113 }, { "content": "pub trait FlashBusy {\n\n fn flash_busy(&self) -> bool;\n\n}\n\n\n\nimpl FlashLockUnlock for FlashPeriph {\n\n fn flash_locked(&self) -> bool {\n\n self.cr().test_lock()\n\n }\n\n\n\n fn flash_lock(&self) {\n\n self.with_cr(|r| r.set_lock(1));\n\n }\n\n\n\n fn flash_unlock(&self) {\n\n self.set_keyr(|r| r.set_key(KEY1));\n\n self.set_keyr(|r| r.set_key(KEY2));\n\n }\n\n}\n\n\n\nimpl FlashBusy for FlashPeriph {\n", "file_path": "mcu/bobbin-stm32/stm32f74x/src/ext/flash.rs", "rank": 88, "score": 219824.07719255172 }, { "content": "pub trait FlashBusy {\n\n fn flash_busy(&self) -> bool;\n\n}\n\n\n\nimpl FlashLockUnlock for FlashPeriph {\n\n fn flash_locked(&self) -> bool {\n\n self.cr().test_lock()\n\n }\n\n\n\n fn flash_lock(&self) {\n\n self.with_cr(|r| r.set_lock(1));\n\n }\n\n\n\n fn flash_unlock(&self) {\n\n self.set_keyr(|r| r.set_key(KEY1));\n\n self.set_keyr(|r| r.set_key(KEY2));\n\n }\n\n}\n\n\n\nimpl FlashBusy for FlashPeriph {\n", "file_path": "mcu/bobbin-stm32/stm32f42x/src/ext/flash.rs", "rank": 89, "score": 219824.07719255172 }, { "content": "pub trait FlashBusy {\n\n fn flash_busy(&self) -> bool;\n\n}\n\n\n\nimpl FlashBusy for FtfePeriph {\n\n fn flash_busy(&self) -> bool {\n\n !self.fstat().test_ccif()\n\n }\n\n}\n\n\n", "file_path": "mcu/bobbin-kinetis/k64/src/ext/flash.rs", "rank": 90, "score": 219824.07719255172 }, { "content": "pub trait FlashBusy {\n\n fn flash_busy(&self) -> bool;\n\n}\n\n\n\nimpl FlashLockUnlock for FlashPeriph {\n\n fn flash_locked(&self) -> bool {\n\n self.cr().test_lock()\n\n }\n\n\n\n fn flash_lock(&self) {\n\n self.with_cr(|r| r.set_lock(1));\n\n }\n\n\n\n fn flash_unlock(&self) {\n\n self.set_keyr(|r| r.set_keyr(KEY1));\n\n self.set_keyr(|r| r.set_keyr(KEY2));\n\n }\n\n}\n\n\n\nimpl FlashBusy for FlashPeriph {\n", "file_path": "mcu/bobbin-stm32/stm32l432x/src/ext/flash.rs", "rank": 91, "score": 219824.07719255172 }, { "content": "pub trait TccExt {\n\n fn mode_up(&self) -> &Self;\n\n fn mode_down(&self) -> &Self;\n\n fn mode_continuous(&self) -> &Self;\n\n fn mode_oneshot(&self) -> &Self;\n\n}\n\n\n\nimpl TccExt for TccPeriph {\n\n fn mode_up(&self) -> &Self {\n\n self.set_ctrlbclr(|r| r.set_dir(1))\n\n }\n\n\n\n fn mode_down(&self) -> &Self {\n\n self.set_ctrlbset(|r| r.set_dir(1))\n\n }\n\n\n\n fn mode_continuous(&self) -> &Self {\n\n self.set_ctrlbset(|r| r.set_oneshot(1))\n\n }\n\n\n", "file_path": "mcu/bobbin-sam/samd21/src/ext/tcc.rs", "rank": 92, "score": 219824.0771925517 }, { "content": "pub trait FlashBusy {\n\n fn flash_busy(&self) -> bool;\n\n}\n\n\n\nimpl FlashLockUnlock for FlashPeriph {\n\n fn flash_locked(&self) -> bool {\n\n self.cr().test_lock()\n\n }\n\n\n\n fn flash_lock(&self) {\n\n self.with_cr(|r| r.set_lock(1));\n\n }\n\n\n\n fn flash_unlock(&self) {\n\n self.set_keyr(|r| r.set_fkeyr(KEY1));\n\n self.set_keyr(|r| r.set_fkeyr(KEY2));\n\n }\n\n}\n\n\n\nimpl FlashBusy for FlashPeriph {\n", "file_path": "mcu/bobbin-stm32/stm32f303x/src/ext/flash.rs", "rank": 93, "score": 219824.07719255172 }, { "content": "pub trait FlashBusy {\n\n fn flash_busy(&self) -> bool;\n\n}\n\n\n\nimpl FlashBusy for NvmctrlPeriph {\n\n fn flash_busy(&self) -> bool {\n\n self.intflag().test_ready()\n\n }\n\n}\n\n\n\nimpl FlashErase for NvmctrlPeriph {\n\n fn erase_start(&self, addr: *mut u8) -> Result<(), FlashError> {\n\n // Note from https://github.com/blacksphere/blackmagic/blob/master/src/target/samd.c\n\n /* Write address of first word in row to erase it */\n\n\t\t/* Must be shifted right for 16-bit address, see Datasheet §20.8.8 Address */\n\n self.set_addr(|_| Addr(addr as u32 >> 1));\n\n self.set_ctrla(|r| r.set_cmdex(0xa5).set_cmd(0x41));\n\n self.set_ctrla(|r| r.set_cmdex(0xa5).set_cmd(0x02));\n\n Ok(())\n\n }\n", "file_path": "mcu/bobbin-sam/samd21/src/ext/flash.rs", "rank": 94, "score": 219824.0771925517 }, { "content": "pub fn run_48mhz() {\n\n // See https://github.com/arduino/ArduinoCore-samd/blob/master/bootloaders/zero/board_init.c\n\n // * At reset:\n\n // * - OSC8M clock source is enabled with a divider by 8 (1MHz).\n\n // * - Generic Clock Generator 0 (GCLKMAIN) is using OSC8M as source.\n\n // * We need to:\n\n // * 1) Enable XOSC32K clock (External on-board 32.768Hz oscillator), will be used as DFLL48M reference.\n\n // * 2) Put XOSC32K as source of Generic Clock Generator 1\n\n // * 3) Put Generic Clock Generator 1 as source for Generic Clock Multiplexer 0 (DFLL48M reference)\n\n // * 4) Enable DFLL48M clock\n\n // * 5) Switch Generic Clock Generator 0 to DFLL48M. CPU will run at 48MHz.\n\n // * 6) Modify PRESCaler value of OSCM to have 8MHz\n\n // * 7) Put OSC8M as source for Generic Clock Generator 3\n\n\n\n /* Set 1 Flash Wait State for 48MHz, cf tables 20.9 and 35.27 in SAMD21 Datasheet */\n\n // NVMCTRL->CTRLB.bit.RWS = NVMCTRL_CTRLB_RWS_HALF_Val;\n\n nvmctrl::NVMCTRL.with_ctrlb(|r| r.set_rws(0x1));\n\n\n\n /* Turn on the digital interface clock */\n\n pm::PM.with_apbamask(|r| r.set_gclk(1));\n", "file_path": "mcu/bobbin-sam/samd21/src/ext/clock.rs", "rank": 95, "score": 219292.94922523 }, { "content": "pub fn enable_instruction_cache() {\n\n // Enable Instruction Cache\n\n SCB.set_iciallu(|r| r);\n\n #[cfg(target_os=\"none\")]\n\n unsafe { \n\n asm!(\"dsb\");\n\n asm!(\"isb\");\n\n }\n\n SCB.with_ccr(|r| r.set_ic(1)); \n\n}\n\n\n", "file_path": "mcu/bobbin-cortexm/src/ext/mod.rs", "rank": 96, "score": 219292.94922523 }, { "content": "/// Trait allowing access to the clock for type `T`.\n\npub trait ClockFor<T> {\n\n fn clock_for(&self, T) -> Hz; \n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/clock.rs", "rank": 97, "score": 218664.0330481191 }, { "content": "/// Trait indicating the item is a clock with output frequency `hz()`.\n\npub trait Clock : Default {\n\n fn hz() -> Hz;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/clock.rs", "rank": 98, "score": 218656.6903548654 }, { "content": "pub trait Periph: Default {\n\n fn id(&self) -> &'static str;\n\n fn base(&self) -> *mut u32;\n\n fn index(&self) -> usize;\n\n fn ord(&self) -> usize;\n\n}\n\n\n", "file_path": "lib/bobbin-mcu/src/periph.rs", "rank": 99, "score": 218649.44290235714 } ]
Rust
src/mesh.rs
littleTitan/3d-engine
81009f8949edd809815270f77eb5ed482da17243
use crate::{triangle::Triangle, vec3d::Vec3d}; use byteorder::{LittleEndian, ReadBytesExt}; use std::{ fs::{self, File}, io::prelude::*, }; #[derive(Clone)] pub struct Mesh { pub tris: Vec<Triangle>, pub is_over: bool, pub is_held: bool, pub pos: Vec3d, } impl Mesh { pub fn new(pos: Vec3d) -> Mesh { Mesh { tris: Vec::new(), is_over: false, is_held: false, pos: pos, } } #[allow(dead_code)] pub fn form_cube(&mut self) { self.tris = vec![ Triangle::new( Vec3d::new(-0.5, -0.5, -0.5), Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(0.5, 0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, -0.5, -0.5), Vec3d::new(0.5, 0.5, -0.5), Vec3d::new(0.5, -0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, -0.5), Vec3d::new(0.5, 0.5, -0.5), Vec3d::new(0.5, 0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, -0.5), Vec3d::new(0.5, 0.5, 0.5), Vec3d::new(0.5, -0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(0.5, 0.5, 0.5), Vec3d::new(-0.5, 0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(-0.5, 0.5, 0.5), Vec3d::new(-0.5, -0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, -0.5, 0.5), Vec3d::new(-0.5, 0.5, 0.5), Vec3d::new(-0.5, 0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, -0.5, 0.5), Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(-0.5, -0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(-0.5, 0.5, 0.5), Vec3d::new(0.5, 0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(0.5, 0.5, 0.5), Vec3d::new(0.5, 0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(-0.5, -0.5, 0.5), Vec3d::new(-0.5, -0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(-0.5, -0.5, -0.5), Vec3d::new(0.5, -0.5, -0.5), ) + self.pos, ]; } #[allow(dead_code)] pub fn x_axis_rotation(&mut self, r: f32, origin_y: f32, origin_z: f32) { for i in 0..self.tris.len() { self.tris[i].x_axis_rotation(r, origin_y, origin_z); } } #[allow(dead_code)] pub fn y_axis_rotation(&mut self, r: f32, origin_x: f32, origin_z: f32) { for i in 0..self.tris.len() { self.tris[i].y_axis_rotation(r, origin_x, origin_z); } } #[allow(dead_code)] pub fn z_axis_rotation(&mut self, r: f32, origin_x: f32, origin_y: f32) { for i in 0..self.tris.len() { self.tris[i].z_axis_rotation(r, origin_x, origin_y); } } #[allow(dead_code)] pub fn increment_x(&mut self, inc_x: f32) { self.pos.x += inc_x; for i in 0..self.tris.len() { self.tris[i].increment_x(inc_x); } } #[allow(dead_code)] pub fn increment_y(&mut self, inc_y: f32) { self.pos.y += inc_y; for i in 0..self.tris.len() { self.tris[i].increment_y(inc_y); } } #[allow(dead_code)] pub fn increment_z(&mut self, inc_z: f32) { self.pos.z += inc_z; for i in 0..self.tris.len() { self.tris[i].increment_z(inc_z); } } #[allow(dead_code)] pub fn from_file(&mut self, filename: &str) { println!("In file {}", filename); let file_type = filename.split('.').last().unwrap(); if file_type == "obj" { self.from_obj(filename); } else if file_type == "stl" { match self.from_stl_ascii(filename) { Ok(_) => (), Err(_e) => self.from_stl_bin(filename), } } } pub fn from_obj(&mut self, filename: &str) { let contents = fs::read_to_string(filename).expect("Something went wrong reading the file"); let mut points: Vec<Vec3d> = Vec::new(); for line in contents.lines() { if line.chars().next().unwrap() == 'v' { let mut e = line.split_whitespace(); e.next(); points.push(Vec3d { x: e.next().unwrap().parse::<f32>().unwrap(), y: e.next().unwrap().parse::<f32>().unwrap(), z: e.next().unwrap().parse::<f32>().unwrap(), }); } else if line.chars().next().unwrap() == 'f' { let mut e = line.split_whitespace(); e.next(); self.tris.push(Triangle::new( points[e.next().unwrap().parse::<usize>().unwrap() - 1_usize], points[e.next().unwrap().parse::<usize>().unwrap() - 1_usize], points[e.next().unwrap().parse::<usize>().unwrap() - 1_usize], )); } } } pub fn from_stl_bin(&mut self, filename: &str) { let mut file = File::open(filename).expect("file not found"); let mut header_buf = [0; 80]; let header = file .read(&mut header_buf[..]) .expect("Something went wrong reading the file"); let _contents = String::from_utf8_lossy(&header_buf[..header]); let mut n_tris_buf = [0; 4]; let n_tris_raw = file .read(&mut n_tris_buf[..]) .expect("Something went wrong reading the file"); let mut n_tris_dat = &n_tris_buf[..n_tris_raw]; let n_tris = *&n_tris_dat.read_u32::<LittleEndian>().unwrap() as i32; println!("{:?}", n_tris); let mut tri_buf = [0; 50]; for _i in 0..n_tris { let tri_raw = file .read(&mut tri_buf[..]) .expect("Something went wrong reading the file"); let mut data = &tri_buf[..tri_raw]; self.tris.push(Triangle::new_with_normal( Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, )); &data.read_u16::<LittleEndian>().unwrap(); } } pub fn from_stl_ascii(&mut self, filename: &str) -> Result<(), std::io::Error> { let contents = fs::read_to_string(filename)?; let mut points: Vec<Vec3d> = Vec::new(); let mut normal: Vec3d = Vec3d::new(0.0, 0.0, 0.0); for line in contents.lines() { let mut e = line.split_whitespace(); let s = e.next().unwrap(); if s == "facet" { if e.next().unwrap() == "normal" { normal = Vec3d::new( e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), ); } } else if s == "vertex" { points.push(Vec3d::new( e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), )); } else if s == "endfacet" { self.tris.push(Triangle::new_with_normal( normal, points[0], points[1], points[2], )); normal = Vec3d::new(0.0, 0.0, 0.0); points = Vec::new(); } } Ok(()) } }
use crate::{triangle::Triangle, vec3d::Vec3d}; use byteorder::{LittleEndian, ReadBytesExt}; use std::{ fs::{self, File}, io::prelude::*, }; #[derive(Clone)] pub struct Mesh { pub tris: Vec<Triangle>, pub is_over: bool, pub is_held: bool, pub pos: Vec3d, } impl Mesh { pub fn new(pos: Vec3d) -> Mesh { Mesh { tris: Vec::new(), is_over: false, is_held: false, pos: pos, } } #[allow(dead_code)] pub fn form_cube(&mut self) { self.tris = vec![ Triangle::new( Vec3d::new(-0.5, -0.5, -0.5), Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(0.5, 0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, -0.5, -0.5), Vec3d::new(0.5, 0.5, -0.5), Vec3d::new(0.5, -0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, -0.5), Vec3d::new(0.5, 0.5, -0.5), Vec3d::new(0.5, 0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, -0.5), Vec3d::new(0.5, 0.5, 0.5), Vec3d::new(0.5, -0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(0.5, 0.5, 0.5), Vec3d::new(-0.5, 0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(-0.5, 0.5, 0.5), Vec3d::new(-0.5, -0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, -0.5, 0.5), Vec3d::new(-0.5, 0.5, 0.5), Vec3d::new(-0.5, 0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, -0.5, 0.5), Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(-0.5, -0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(-0.5, 0.5, 0.5), Vec3d::new(0.5, 0.5, 0.5), ) + self.pos, Triangle::new( Vec3d::new(-0.5, 0.5, -0.5), Vec3d::new(0.5, 0.5, 0.5), Vec3d::new(0.5, 0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(-0.5, -0.5, 0.5), Vec3d::new(-0.5, -0.5, -0.5), ) + self.pos, Triangle::new( Vec3d::new(0.5, -0.5, 0.5), Vec3d::new(-0.5, -0.5, -0.5), Vec3d::new(0.5, -0.5, -0.5), ) + self.pos, ]; } #[allow(dead_code)] pub fn x_axis_rotation(&mut self, r: f32, origin_y: f32, origin_z: f32) { for i in 0..self.tris.len() { self.tris[i].x_axis_rotation(r, origin_y, origin_z); } } #[allow(dead_code)] pub fn y_axis_rotation(&mut self, r: f32, origin_x: f32, origin_z: f32) { for i in 0..self.tris.len() { self.tris[i].y_axis_rotation(r, origin_x, origin_z); } } #[allow(dead_code)] pub fn z_axis_rotation(&mut self, r: f32, origin_x: f32, origin_y: f32) { for i in 0..self.tris.len() { self.tris[i].z_axis_rotation(r, origin_x, origin_y); } } #[allow(dead_code)] pub fn increment_x(&mut self, inc_x: f32) { self.pos.x += inc_x; for i in 0..self.tris.len() { self.tris[i].increment_x(inc_x); } } #[allow(dead_code)] pub fn increment_y(&mut self, inc_y: f32) { self.pos.y += inc_y; for i in 0..self.tris.len() { self.tris[i].increment_y(inc_y); } } #[allow(dead_code)] pub fn increment_z(&mut self, inc_z: f32) { self.pos.z += inc_z; for i in 0..self.tris.len() { self.tris[i].increment_z(inc_z); } } #[allow(dead_code)] pub fn from_file(&mut self, filename: &str) { println!("In file {}", filename); let file_type = filename.split('.').last().unwrap(); if file_type == "obj" { self.from_obj(filename); } else if file_type == "stl" { match self.from_stl_ascii(filename) { Ok(_) => (), Err(_e) => self.from_stl_bin(filename), } } } pub fn from_obj(&mut self, filename: &str) { let contents = fs::read_to_string(filename).expect("Something went wrong reading the file"); let mut points: Vec<Vec3d> = Vec::new(); for line in contents.lines() { if line.chars().next().unwrap() == 'v' { let mut e = line.split_whitespace(); e.next(); points.push(Vec3d { x: e.next().unwrap().parse::<f32>().unwrap(), y: e.next().unwrap().parse::<f32>().unwrap(), z: e.next().unwrap().parse::<f32>().unwrap(), }); } else if line.chars().next().unwrap() == 'f' { let mut e = line.split_whitespace();
.expect("Something went wrong reading the file"); let _contents = String::from_utf8_lossy(&header_buf[..header]); let mut n_tris_buf = [0; 4]; let n_tris_raw = file .read(&mut n_tris_buf[..]) .expect("Something went wrong reading the file"); let mut n_tris_dat = &n_tris_buf[..n_tris_raw]; let n_tris = *&n_tris_dat.read_u32::<LittleEndian>().unwrap() as i32; println!("{:?}", n_tris); let mut tri_buf = [0; 50]; for _i in 0..n_tris { let tri_raw = file .read(&mut tri_buf[..]) .expect("Something went wrong reading the file"); let mut data = &tri_buf[..tri_raw]; self.tris.push(Triangle::new_with_normal( Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, Vec3d { x: *&data.read_f32::<LittleEndian>().unwrap(), y: *&data.read_f32::<LittleEndian>().unwrap(), z: *&data.read_f32::<LittleEndian>().unwrap(), }, )); &data.read_u16::<LittleEndian>().unwrap(); } } pub fn from_stl_ascii(&mut self, filename: &str) -> Result<(), std::io::Error> { let contents = fs::read_to_string(filename)?; let mut points: Vec<Vec3d> = Vec::new(); let mut normal: Vec3d = Vec3d::new(0.0, 0.0, 0.0); for line in contents.lines() { let mut e = line.split_whitespace(); let s = e.next().unwrap(); if s == "facet" { if e.next().unwrap() == "normal" { normal = Vec3d::new( e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), ); } } else if s == "vertex" { points.push(Vec3d::new( e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), e.next().unwrap().parse::<f32>().unwrap(), )); } else if s == "endfacet" { self.tris.push(Triangle::new_with_normal( normal, points[0], points[1], points[2], )); normal = Vec3d::new(0.0, 0.0, 0.0); points = Vec::new(); } } Ok(()) } }
e.next(); self.tris.push(Triangle::new( points[e.next().unwrap().parse::<usize>().unwrap() - 1_usize], points[e.next().unwrap().parse::<usize>().unwrap() - 1_usize], points[e.next().unwrap().parse::<usize>().unwrap() - 1_usize], )); } } } pub fn from_stl_bin(&mut self, filename: &str) { let mut file = File::open(filename).expect("file not found"); let mut header_buf = [0; 80]; let header = file .read(&mut header_buf[..])
random
[ { "content": "use ggez::{self, graphics::Vertex, nalgebra as na, nalgebra::geometry::Point2};\n\n\n\nuse std::ops::{Add, AddAssign, Mul, Sub};\n\n\n\n/// A Vec3d is a point in 3D space\n\n#[derive(Copy, Clone)]\n\npub struct Vec3d {\n\n pub x: f32,\n\n pub y: f32,\n\n pub z: f32,\n\n}\n\n\n\nimpl Vec3d {\n\n /// Creates a Vec3d from an x, a y, and a z.\n\n ///\n\n /// # Arguments\n\n /// * `x` - The x position of the Vec3d.\n\n /// * `y` - The y position of the Vec3d.\n\n /// * `z` - The z position of the Vec3d.\n\n ///\n", "file_path": "src/vec3d.rs", "rank": 0, "score": 23748.884367445207 }, { "content": " /// # Return\n\n /// A new Vec3d\n\n ///\n\n pub fn new(x: f32, y: f32, z: f32) -> Vec3d {\n\n Vec3d { x: x, y: y, z: z }\n\n }\n\n\n\n /// Rotates the Vec3d around the x-axis at the origin point.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Vec3d the function was called for.\n\n /// * `r` - The Amount rotated by.\n\n /// * `origin_y` - The y position of the origin point.\n\n /// * `origin_z` - The z position of the origin point.\n\n ///\n\n pub fn x_axis_rotation(&mut self, r: f32, origin_y: f32, origin_z: f32) {\n\n let tmp_y = self.y;\n\n let tmp_z = self.z;\n\n let angle: f32 = (r) * (3.14159265 / 180.0);\n\n self.y =\n", "file_path": "src/vec3d.rs", "rank": 1, "score": 23745.102452210336 }, { "content": " /// Point2 containing the x and y positions of the Vec3d\n\n ///\n\n pub fn form_point2(&mut self) -> Point2<f32> {\n\n na::Point2::new(self.x, self.y)\n\n }\n\n\n\n pub fn form_vertex(&mut self) -> Vertex {\n\n Vertex {\n\n color: [1.0, 1.0, 1.0, 1.0],\n\n pos: [self.x, self.y],\n\n uv: [1.0, 1.0],\n\n }\n\n }\n\n}\n\n\n\n/// Vec3d - Vec3d = Vec3d\n\nimpl Sub for Vec3d {\n\n type Output = Vec3d;\n\n\n\n fn sub(self, other: Vec3d) -> Vec3d {\n", "file_path": "src/vec3d.rs", "rank": 2, "score": 23744.240234800305 }, { "content": " Vec3d::new(self.x - other.x, self.y - other.y, self.z - other.z)\n\n }\n\n}\n\n\n\n/// Vec3d + Vec3d = Vec3d\n\nimpl Add for Vec3d {\n\n type Output = Vec3d;\n\n fn add(self, other: Vec3d) -> Self {\n\n Vec3d::new(self.x + other.x, self.y + other.y, self.z + other.z)\n\n }\n\n}\n\n\n\n/// Vec3d += Vec3d\n\nimpl AddAssign for Vec3d {\n\n fn add_assign(&mut self, rhs: Vec3d) {\n\n self.x += rhs.x;\n\n self.y += rhs.y;\n\n self.z += rhs.z;\n\n }\n\n}\n", "file_path": "src/vec3d.rs", "rank": 3, "score": 23742.330973315104 }, { "content": " (tmp_x - origin_x) * f32::sin(angle) + (tmp_z - origin_z) * f32::cos(angle) + origin_z;\n\n }\n\n\n\n /// Rotates the Vec3d around the z-axis at the origin point.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Vec3d the function was called for.\n\n /// * `r` - The Amount rotated by.\n\n /// * `origin_x` - The x position of the origin point.\n\n /// * `origin_y` - The y position of the origin point.\n\n ///\n\n pub fn z_axis_rotation(&mut self, r: f32, origin_x: f32, origin_y: f32) {\n\n let tmp_x = self.x;\n\n let tmp_y = self.y;\n\n let angle: f32 = (r) * (3.14159265 / 180.0);\n\n self.x =\n\n (tmp_x - origin_x) * f32::cos(angle) - (tmp_y - origin_y) * f32::sin(angle) + origin_x;\n\n self.y =\n\n (tmp_x - origin_x) * f32::sin(angle) + (tmp_y - origin_y) * f32::cos(angle) + origin_y;\n\n }\n", "file_path": "src/vec3d.rs", "rank": 4, "score": 23742.16713976834 }, { "content": " (tmp_y - origin_y) * f32::cos(angle) - (tmp_z - origin_z) * f32::sin(angle) + origin_y;\n\n self.z =\n\n (tmp_y - origin_y) * f32::sin(angle) + (tmp_z - origin_z) * f32::cos(angle) + origin_z;\n\n }\n\n\n\n /// Rotates the Vec3d around the y-axis at the origin point.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Vec3d the function was called for.\n\n /// * `r` - The Amount rotated by.\n\n /// * `origin_x` - The x position of the origin point.\n\n /// * `origin_z` - The z position of the origin point.\n\n ///\n\n pub fn y_axis_rotation(&mut self, r: f32, origin_x: f32, origin_z: f32) {\n\n let tmp_x = self.x;\n\n let tmp_z = self.z;\n\n let angle: f32 = (r) * (3.14159265 / 180.0);\n\n self.x =\n\n (tmp_x - origin_x) * f32::cos(angle) - (tmp_z - origin_z) * f32::sin(angle) + origin_x;\n\n self.z =\n", "file_path": "src/vec3d.rs", "rank": 5, "score": 23742.16713976834 }, { "content": "\n\n/// Vec3d * Vec3d = Vec3d\n\nimpl Mul for Vec3d {\n\n type Output = Vec3d;\n\n\n\n fn mul(self, other: Vec3d) -> Vec3d {\n\n Vec3d::new(self.x * other.x, self.y * other.y, self.z * other.z)\n\n }\n\n}\n\n\n\n/// Vec3d * f32 = Vec3d or f32 * Vec3d = Vec3d\n\nimpl Mul<f32> for Vec3d {\n\n type Output = Vec3d;\n\n\n\n fn mul(self, other: f32) -> Vec3d {\n\n Vec3d::new(self.x * other, self.y * other, self.z * other)\n\n }\n\n}\n", "file_path": "src/vec3d.rs", "rank": 6, "score": 23741.72102462229 }, { "content": "\n\n /// Normalize the Vec3d\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Vec3d the function was called for.\n\n ///\n\n /// # Return\n\n /// self\n\n ///\n\n pub fn normalize(&mut self) -> Vec3d {\n\n let len = f32::sqrt((self.x * self.x + self.y * self.y + self.z * self.z).into());\n\n self.x = self.x / len;\n\n self.y = self.y / len;\n\n self.z = self.z / len;\n\n *self\n\n }\n\n\n\n /// Set the length of the vector to a number.\n\n ///\n\n /// # Arguments\n", "file_path": "src/vec3d.rs", "rank": 7, "score": 23741.122481642287 }, { "content": " /// * `self` - The Vec3d the function was called for.\n\n /// * `len` - The desired length.\n\n ///\n\n /// # Return\n\n /// self\n\n ///\n\n pub fn set_length(&mut self, len: f32) -> Vec3d {\n\n self.normalize();\n\n self.x = self.x * len;\n\n self.y = self.y * len;\n\n self.z = self.z * len;\n\n *self\n\n }\n\n\n\n /// Remove the z value from the Vec3d.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Vec3d the function was called for.\n\n ///\n\n /// # Return\n", "file_path": "src/vec3d.rs", "rank": 8, "score": 23740.899490625405 }, { "content": "/// Main\n\n/// - Generates a Context and an event loop\n\n/// - Creates a Camera with a position and a rotaion\n\n/// - Creates a Mesh with the Camera\n\n/// - Loads in a file or the cube\n\n/// - Creates the window\n\n/// - Starts the eventloop for mesh\n\n///\n\n/// # Return\n\n/// A GameResult\n\n///\n\nfn main() -> ggez::GameResult {\n\n // Create Window\n\n let cb = ggez::ContextBuilder::new(\"my-engine\", \"littleTitan\");\n\n let (ctx, event_loop) = &mut cb.build()?;\n\n\n\n // Create Camera\n\n let camera: Camera = Camera::new(Vec3d::new(0.0, 0.0, -4.0), Vec3d::new(0.0, 0.0, 0.0));\n\n\n\n // Create Mesh\n\n let mut mesh = Mesh::new(Vec3d::new(0.0, 0.0, 0.0));\n\n mesh.from_file(\"models/xyz.stl\");\n\n\n\n let scene = &mut Scene::new(camera, vec![mesh])?;\n\n\n\n // Give Context and Mesh to GGez\n\n ggez::graphics::set_window_title(ctx, \"My Engine\");\n\n event::run(ctx, event_loop, scene)\n\n}\n", "file_path": "src/main.rs", "rank": 27, "score": 19434.603504273175 }, { "content": "use crate::vec3d::Vec3d;\n\nuse ggez::graphics::Vertex;\n\nuse ggez::{self, nalgebra::geometry::Point2};\n\nuse rand::prelude::*;\n\nuse std::ops::Add;\n\n\n\n/// A Triangle is a triangle with a normal, 3 vertecies, a\n\n/// color, a center point and its distance to the camera.\n\n#[derive(Copy, Clone)]\n\npub struct Triangle {\n\n pub normal: Vec3d,\n\n pub verticies: (Vec3d, Vec3d, Vec3d),\n\n pub color: (f32, f32, f32),\n\n pub center: Vec3d,\n\n pub dist: f32,\n\n}\n\n\n\nimpl Triangle {\n\n /// Create a new Triangle from 3 vertecies.\n\n ///\n", "file_path": "src/triangle.rs", "rank": 28, "score": 16.994785132853842 }, { "content": "use crate::{matrix3x3::Matrix3x3, mesh::Mesh, triangle::Triangle, vec3d::Vec3d};\n\n\n\nuse std::{sync::mpsc, thread};\n\n\n\n/// A Camera has a position and a rotation\n\n/// as well as forward up and right vectors\n\n/// used for moving the camera quickly.\n\n#[derive(Copy, Clone)]\n\npub struct Camera {\n\n pub position: Vec3d,\n\n pub rotation: Vec3d,\n\n\n\n // movement vectors\n\n pub r_forward: Vec3d,\n\n pub m_forward: Vec3d,\n\n pub m_right: Vec3d,\n\n}\n\n\n\nimpl Camera {\n\n /// Creates a new camera object with a position and a roation.\n", "file_path": "src/camera.rs", "rank": 29, "score": 14.751043436184553 }, { "content": "use crate::vec3d::Vec3d;\n\nuse std::ops::Mul;\n\n\n\n/// A 3 x 3 Matrix\n\n#[derive(Clone)]\n\npub struct Matrix3x3 {\n\n pub m: [[f32; 3]; 3],\n\n}\n\n\n\nimpl Matrix3x3 {\n\n /// Form a Matrix 3x3 from 3 Vec3d's\n\n ///\n\n /// # Argments\n\n /// * `col1` - column 1\n\n /// * `col2` - column 2\n\n /// * `col3` - column 3\n\n ///\n\n /// # Return\n\n /// A new Matrix3x3.\n\n ///\n", "file_path": "src/matrix3x3.rs", "rank": 30, "score": 14.562349263593456 }, { "content": "use ggez::GameResult;\n\n\n\nuse crate::{Camera, Mesh};\n\n\n\n/// The Scene to be rendered.\n\npub struct Scene {\n\n pub camera: Camera,\n\n pub mesh_vec: Vec<Mesh>,\n\n}\n\n\n\nimpl Scene {\n\n /// Creates a new Mesh with a camera\n\n ///\n\n /// # Arguments\n\n /// * `camera` - The Camera.\n\n /// * `mesh_vec` - A vec of all the meshes to render.\n\n ///\n\n /// # Return\n\n /// A GameRusult<Scene> object\n\n ///\n\n pub fn new(camera: Camera, mesh_vec: Vec<Mesh>) -> GameResult<Scene> {\n\n Ok(Scene {\n\n camera: camera,\n\n mesh_vec: mesh_vec,\n\n })\n\n }\n\n}\n", "file_path": "src/scene.rs", "rank": 31, "score": 13.996373439937656 }, { "content": " /// A GameResult\n\n ///\n\n fn draw(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult {\n\n graphics::clear(ctx, [0.1, 0.2, 0.3, 1.0].into()); // clear\n\n\n\n let size: (f32, f32) = ggez::graphics::drawable_size(ctx);\n\n\n\n for i in 0..self.mesh_vec.len() {\n\n self.mesh_vec[i].is_over = false;\n\n // Get the projected triangles.\n\n let tris = self\n\n .camera\n\n .get_projected_triangles(self.mesh_vec[i].clone(), size);\n\n\n\n let mut raw: Vec<Vertex> = vec![];\n\n // Draw all the triangles\n\n for j in 0..tris.len() {\n\n let mut tri = tris[j];\n\n let pt_list = tri.form_vertexlist();\n\n raw.push(pt_list[0]);\n", "file_path": "src/main.rs", "rank": 32, "score": 12.565218382872745 }, { "content": " }\n\n}\n\n\n\n// Triangle + Vec3d = Triangle\n\nimpl Add<Vec3d> for Triangle {\n\n type Output = Triangle;\n\n\n\n fn add(self, other: Vec3d) -> Self {\n\n let mut new_tri = self.clone();\n\n\n\n new_tri.verticies.0 += other;\n\n new_tri.verticies.1 += other;\n\n new_tri.verticies.2 += other;\n\n new_tri.center = Triangle::calculate_center(new_tri.verticies);\n\n\n\n new_tri\n\n }\n\n}\n", "file_path": "src/triangle.rs", "rank": 33, "score": 12.257948757089611 }, { "content": " /// implements multithreading\n\n /// defines a rotation matrix\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `self` - A mutable reference to the camera the function was called for\n\n /// * `real_mesh` - The real mesh (not projected)\n\n /// * `size` - The dimensions of the screen\n\n ///\n\n /// # Return\n\n ///\n\n /// A vec containing the projected triangles\n\n ///\n\n pub fn get_projected_triangles(&mut self, real_mesh: Mesh, size: (f32, f32)) -> Vec<Triangle> {\n\n // rotation martrix\n\n let rotation = self.rotation * (3.14159265 / 180.0);\n\n\n\n let r_x = Matrix3x3 {\n\n m: [\n\n [1.0, 0.0, 0.0],\n", "file_path": "src/camera.rs", "rank": 34, "score": 11.2759894277571 }, { "content": " self.m_forward = (r_y.clone() * Vec3d::new(0.0, 0.0, 1.0)).normalize();\n\n self.m_right = (r_y.clone() * Vec3d::new(1.0, 0.0, 0.0)).normalize();\n\n\n\n // deal with the triangles\n\n let this = self.clone();\n\n let mut tris: Vec<Triangle> = Vec::new();\n\n let s = real_mesh.tris.len();\n\n let (tx, rx) = mpsc::channel();\n\n for i in 0..s {\n\n let tri = real_mesh.tris[i];\n\n let tx = tx.clone();\n\n\n\n let n_r = r.clone();\n\n thread::spawn(move || {\n\n let look: Vec3d = tri.center - this.position;\n\n let look_len =\n\n f64::sqrt((look.x * look.x + look.y * look.y + look.z * look.z).into());\n\n let lx = (look.x as f64) / look_len;\n\n let ly = (look.y as f64) / look_len;\n\n let lz = (look.z as f64) / look_len;\n", "file_path": "src/camera.rs", "rank": 35, "score": 9.891579151571577 }, { "content": " /// * `origin_y` - The y position of the origin point.\n\n /// * `origin_z` - The z position of the origin point.\n\n ///\n\n pub fn x_axis_rotation(&mut self, r: f32, origin_y: f32, origin_z: f32) {\n\n self.verticies.0.x_axis_rotation(r, origin_y, origin_z);\n\n self.verticies.1.x_axis_rotation(r, origin_y, origin_z);\n\n self.verticies.2.x_axis_rotation(r, origin_y, origin_z);\n\n self.normal = Triangle::calculate_normal(self.verticies);\n\n self.center = Triangle::calculate_center(self.verticies);\n\n }\n\n\n\n /// Rotates the Triangle around the y-axis at the origin point.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle the function was called for.\n\n /// * `r` - The Amount rotated by.\n\n /// * `origin_x` - The x position of the origin point.\n\n /// * `origin_z` - The z position of the origin point.\n\n ///\n\n pub fn y_axis_rotation(&mut self, r: f32, origin_x: f32, origin_z: f32) {\n", "file_path": "src/triangle.rs", "rank": 36, "score": 9.650973182690368 }, { "content": " /// # Arguments\n\n ///\n\n /// * `self` - A mutable reference to the camera the function was called for\n\n /// * `real` - The real position\n\n /// * `size` - The dimensions of the screen\n\n /// * `r` - The rotation matrix\n\n ///\n\n /// # Return\n\n ///\n\n /// The projected Vec3d\n\n ///\n\n pub fn get_point_projection(self, real: Vec3d, size: (f32, f32), r: Matrix3x3) -> Vec3d {\n\n let mut v_e1 = r.clone() * Vec3d::new(1.0, 0.0, 0.0);\n\n let mut v_e2 = r.clone() * Vec3d::new(0.0, 1.0, 0.0);\n\n let v_f = r.clone() * Vec3d::new(0.0, 0.0, 1.0);\n\n let v_v = r.clone() * r.clone() * (self.position - real);\n\n // size the screen cord plane aspect ratio and screen limits\n\n v_e1.set_length(0.5 * (size.0 / size.1));\n\n v_e2.set_length(-0.5); // compensate for flipped y on the x,y plane of the screen\n\n\n\n let mut mat = Matrix3x3::from_vec3ds(v_e1, v_e2, v_v);\n\n let inverse_mat = mat.calculate_inverse();\n\n\n\n inverse_mat * (v_f * -1.0)\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 37, "score": 9.348110047530442 }, { "content": "mod camera;\n\nmod matrix3x3;\n\nmod mesh;\n\nmod scene;\n\nmod triangle;\n\nmod vec3d;\n\n\n\nuse camera::Camera;\n\nuse mesh::Mesh;\n\nuse scene::Scene;\n\nuse vec3d::Vec3d;\n\n\n\nuse ggez::{\n\n self, event, event::KeyCode, graphics, graphics::Vertex, input::keyboard, nalgebra as na,\n\n timer::delta,\n\n};\n\n\n\n/// Event Handler for a Mesh\n\n///\n\n/// NOTE: This structure/Object hierarchy is subject to change\n", "file_path": "src/main.rs", "rank": 38, "score": 9.24872952207029 }, { "content": " }\n\n\n\n /// Clip the Triangle's so that no part of them is being rendered off the screen.\n\n ///\n\n /// # Arguments\n\n /// * `x_bound` - The width of the screen\n\n /// * `y_bound` - The height of the screen\n\n ///\n\n /// # Return\n\n /// A vec of Triangle's that are on the screen and can replace the original\n\n ///\n\n pub fn clip(&mut self, x_bound: f32, y_bound: f32, mut depth: i32) -> Vec<Triangle> {\n\n depth = depth + 1;\n\n // if depth > 100 {\n\n // println!(\"{}\", depth);\n\n // }\n\n if ((self.verticies.0.x > x_bound)\n\n && (self.verticies.1.x > x_bound)\n\n && (self.verticies.2.x > x_bound))\n\n || ((self.verticies.0.y > y_bound)\n", "file_path": "src/triangle.rs", "rank": 39, "score": 8.541497178188857 }, { "content": " /// * `verticies` - A tuple containing the 3 verticies of a Triangle.\n\n ///\n\n /// # Return\n\n /// The surface normal\n\n ///\n\n pub fn calculate_normal(verticies: (Vec3d, Vec3d, Vec3d)) -> Vec3d {\n\n let u: Vec3d = verticies.1 - verticies.0;\n\n let v: Vec3d = verticies.2 - verticies.0;\n\n\n\n let n_x: f32 = (u.y * v.z) - (u.z * v.y);\n\n let n_y: f32 = (u.z * v.x) - (u.x * v.z);\n\n let n_z: f32 = (u.x * v.y) - (u.y * v.x);\n\n\n\n let look_len = f32::sqrt(n_x * n_x + n_y * n_y + n_z * n_z);\n\n\n\n Vec3d::new(n_x / look_len, n_y / look_len, n_z / look_len)\n\n }\n\n\n\n /// Calculates the center of the Triangle.\n\n ///\n", "file_path": "src/triangle.rs", "rank": 40, "score": 8.234576026113036 }, { "content": " vec![*self]\n\n }\n\n\n\n /// sort an array of Triangle's according to depth\n\n ///\n\n /// # Arguments\n\n /// * `v` - The vec of projected Triangle's\n\n ///\n\n pub fn painters_algorithm(projected_triangles: &Vec<Triangle>) -> Vec<Triangle> {\n\n let mut out: Vec<Triangle> = projected_triangles.clone();\n\n out.sort_by(|b, a| a.dist.partial_cmp(&b.dist).unwrap());\n\n out\n\n }\n\n\n\n /// convert a projected Triangle to an array of 3 ggez Point2<f32>'s\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle the function was called for.\n\n ///\n\n /// # Return\n", "file_path": "src/triangle.rs", "rank": 41, "score": 8.17217814616906 }, { "content": " + self.m[2][1] * rhs.m[1][1]\n\n + self.m[2][2] * rhs.m[2][1],\n\n self.m[2][0] * rhs.m[0][2]\n\n + self.m[2][1] * rhs.m[1][2]\n\n + self.m[2][2] * rhs.m[2][2],\n\n ],\n\n ],\n\n }\n\n }\n\n}\n\n\n\n/// Matrix3x3 * Vec3d = Vec3d\n\nimpl Mul<Vec3d> for Matrix3x3 {\n\n type Output = Vec3d;\n\n\n\n fn mul(self, rhs: Vec3d) -> Self::Output {\n\n Vec3d::new(\n\n rhs.x * self.m[0][0] + rhs.y * self.m[0][1] + rhs.z * self.m[0][2],\n\n rhs.x * self.m[1][0] + rhs.y * self.m[1][1] + rhs.z * self.m[1][2],\n\n rhs.x * self.m[2][0] + rhs.y * self.m[2][1] + rhs.z * self.m[2][2],\n", "file_path": "src/matrix3x3.rs", "rank": 42, "score": 7.756513028820348 }, { "content": " )\n\n }\n\n}\n\n\n\n/// Matrix3x3 * f32 = Matrix3x3\n\nimpl Mul<f32> for Matrix3x3 {\n\n type Output = Matrix3x3;\n\n\n\n fn mul(self, rhs: f32) -> Self::Output {\n\n Matrix3x3 {\n\n m: [\n\n [self.m[0][0] * rhs, self.m[0][1] * rhs, self.m[0][2] * rhs],\n\n [self.m[1][0] * rhs, self.m[1][1] * rhs, self.m[1][2] * rhs],\n\n [self.m[2][0] * rhs, self.m[2][1] * rhs, self.m[2][2] * rhs],\n\n ],\n\n }\n\n }\n\n}\n", "file_path": "src/matrix3x3.rs", "rank": 43, "score": 7.72761090750601 }, { "content": " /// An array of 3 Point2<f32>'s representing the projected 3 vertecies\n\n /// of the Triangle.\n\n ///\n\n pub fn form_pointlist(&mut self) -> [Point2<f32>; 3] {\n\n let list: [Point2<f32>; 3] = [\n\n //change\n\n self.verticies.0.form_point2(),\n\n self.verticies.1.form_point2(),\n\n self.verticies.2.form_point2(),\n\n ];\n\n list\n\n }\n\n\n\n pub fn form_vertexlist(&mut self) -> [Vertex; 3] {\n\n let list: [Vertex; 3] = [\n\n self.verticies.0.form_vertex(),\n\n self.verticies.1.form_vertex(),\n\n self.verticies.2.form_vertex(),\n\n ];\n\n list\n", "file_path": "src/triangle.rs", "rank": 44, "score": 7.568616162055743 }, { "content": " /// # Arguments\n\n /// * `vertex1` - The first vertex of the Triangle.\n\n /// * `vertex2` - The second vertex of the Triangle.\n\n /// * `vertex3` - The third vertex of the Triangle.\n\n ///\n\n /// # Return\n\n /// A new Triangle\n\n ///\n\n pub fn new(vertex1: Vec3d, vertex2: Vec3d, vertex3: Vec3d) -> Triangle {\n\n let normal = Triangle::calculate_normal((vertex1, vertex2, vertex3));\n\n let center = Triangle::calculate_center((vertex1, vertex2, vertex3));\n\n\n\n let mut rng = rand::thread_rng();\n\n let r = rng.gen::<f32>();\n\n let g = rng.gen::<f32>();\n\n let b = rng.gen::<f32>();\n\n Triangle {\n\n normal: normal,\n\n verticies: (vertex1, vertex2, vertex3),\n\n color: (r, g, b),\n", "file_path": "src/triangle.rs", "rank": 45, "score": 7.53418255964035 }, { "content": " /// \n\n /// # Arguments\n\n ///\n\n /// * `self` - The Camera the function is being called for.\n\n /// * `n` - Number of degrees to rotate by.\n\n ///\n\n pub fn rotate_right(&mut self, n: f32) {\n\n let mut new_rt = self.rotation.y + n;\n\n while new_rt > 180.0 || new_rt < -180.0 {\n\n if new_rt > 180.0 {\n\n new_rt = -360.0 + new_rt;\n\n }\n\n if new_rt < -180.0 {\n\n new_rt = 360.0 + new_rt;\n\n }\n\n }\n\n self.rotation.y = new_rt;\n\n }\n\n\n\n /// Creates a vec and populates it with of all the projected triangles\n", "file_path": "src/camera.rs", "rank": 46, "score": 7.361951410154446 }, { "content": " raw.push(pt_list[1]);\n\n raw.push(pt_list[2]);\n\n\n\n let triangle = graphics::Mesh::from_triangles(\n\n ctx,\n\n &tri.form_pointlist(),\n\n graphics::Color::new(tri.color.0, tri.color.1, tri.color.2, 1.0),\n\n )?;\n\n graphics::draw(ctx, &triangle, (na::Point2::new(0.0, 0.0),))?;\n\n }\n\n // let triangle = graphics::Mesh::from_raw (\n\n // ctx,\n\n // &raw,\n\n // &vec![],\n\n // None,\n\n // )?;\n\n // graphics::draw(ctx, &triangle, (na::Point2::new(0.0, 0.0),))?;\n\n }\n\n\n\n graphics::present(ctx)?;\n", "file_path": "src/main.rs", "rank": 47, "score": 7.267554491952906 }, { "content": "\n\n let dot_product_normals: f64 = (tri.normal.x as f64 * lx)\n\n + (tri.normal.y as f64 * ly)\n\n + (tri.normal.z as f64 * lz); // fix this\n\n let dot_product_look: f64 = -(this.r_forward.x as f64 * lx)\n\n + (this.r_forward.y as f64 * ly).abs()\n\n + (this.r_forward.z as f64 * lz); // fix this\n\n if dot_product_normals < 0.0 && dot_product_look > 0.4 {\n\n // /\\/\\/\\ this should not be necessary\n\n let mut tri_projected: Triangle = Triangle::new(\n\n this.get_point_projection(tri.verticies.0, size, n_r.clone()),\n\n this.get_point_projection(tri.verticies.1, size, n_r.clone()),\n\n this.get_point_projection(tri.verticies.2, size, n_r.clone()),\n\n );\n\n\n\n tri_projected.color = tri.color;\n\n tri_projected.center = Triangle::calculate_center(tri_projected.verticies);\n\n\n\n tri_projected.verticies.0.x += 1.0;\n\n tri_projected.verticies.0.y += 1.0;\n", "file_path": "src/camera.rs", "rank": 48, "score": 7.2464578804717465 }, { "content": " self.verticies.0.y_axis_rotation(r, origin_x, origin_z);\n\n self.verticies.1.y_axis_rotation(r, origin_x, origin_z);\n\n self.verticies.2.y_axis_rotation(r, origin_x, origin_z);\n\n self.normal = Triangle::calculate_normal(self.verticies);\n\n self.center = Triangle::calculate_center(self.verticies);\n\n }\n\n\n\n /// Rotates the Triangle around the z-axis at the origin point.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle the function was called for.\n\n /// * `r` - The Amount rotated by.\n\n /// * `origin_x` - The x position of the origin point.\n\n /// * `origin_y` - The y position of the origin point.\n\n ///\n\n pub fn z_axis_rotation(&mut self, r: f32, origin_x: f32, origin_y: f32) {\n\n self.verticies.0.z_axis_rotation(r, origin_x, origin_y);\n\n self.verticies.1.z_axis_rotation(r, origin_x, origin_y);\n\n self.verticies.2.z_axis_rotation(r, origin_x, origin_y);\n\n self.normal = Triangle::calculate_normal(self.verticies);\n", "file_path": "src/triangle.rs", "rank": 49, "score": 7.06464476953593 }, { "content": " /// # Arguments\n\n /// * `verticies` - A tuple containing the 3 verticies of a Triangle.\n\n ///\n\n /// # Return\n\n /// The center\n\n ///\n\n pub fn calculate_center(verticies: (Vec3d, Vec3d, Vec3d)) -> Vec3d {\n\n let center: Vec3d = Vec3d::new(\n\n (verticies.0.x + verticies.1.x + verticies.2.x) / 3.0,\n\n (verticies.0.y + verticies.1.y + verticies.2.y) / 3.0,\n\n (verticies.0.z + verticies.1.z + verticies.2.z) / 3.0,\n\n );\n\n center\n\n }\n\n\n\n /// Rotates the Triangle around the x-axis at the origin point.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle the function was called for.\n\n /// * `r` - The Amount rotated by.\n", "file_path": "src/triangle.rs", "rank": 50, "score": 6.807309076458233 }, { "content": " ///\n\n /// * `self` - The Matrix3x3 the function was called for\n\n ///\n\n #[allow(dead_code)]\n\n pub fn determinant(&mut self) -> f32 {\n\n let det: f32 = self.m[0][0]\n\n * ((self.m[1][1] * self.m[2][2]) - (self.m[1][2] * self.m[2][1]))\n\n - self.m[0][1] * ((self.m[1][0] * self.m[2][2]) - (self.m[1][2] * self.m[2][0]))\n\n + self.m[0][2] * ((self.m[1][0] * self.m[2][1]) - (self.m[1][1] * self.m[2][0]));\n\n\n\n det\n\n }\n\n\n\n /// Get the inverse of a 3x3 matrix.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The matrix the function is being called for\n\n ///\n\n /// # Return\n\n /// The inverse Matrix\n", "file_path": "src/matrix3x3.rs", "rank": 51, "score": 6.713058516299686 }, { "content": " && (self.verticies.1.y > y_bound)\n\n && (self.verticies.2.y > y_bound))\n\n || ((self.verticies.0.x < 0.0)\n\n && (self.verticies.1.x < 0.0)\n\n && (self.verticies.2.x < 0.0))\n\n || ((self.verticies.0.y < 0.0)\n\n && (self.verticies.1.y < 0.0)\n\n && (self.verticies.2.y < 0.0))\n\n {\n\n let na: Vec<Triangle> = Vec::new();\n\n return na; //return\n\n }\n\n\n\n // X\n\n if self.verticies.0.x > x_bound {\n\n if self.verticies.1.x > x_bound {\n\n if !(self.verticies.2.x > x_bound) {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n\n let f = (x_bound - self.verticies.2.x) / b_c_pt.x;\n\n b_c_pt.x *= f;\n", "file_path": "src/triangle.rs", "rank": 52, "score": 6.626856277744667 }, { "content": " tri_projected.verticies.1.x += 1.0;\n\n tri_projected.verticies.1.y += 1.0;\n\n tri_projected.verticies.2.x += 1.0;\n\n tri_projected.verticies.2.y += 1.0;\n\n\n\n tri_projected.verticies.0.x *= 0.5 * size.0;\n\n tri_projected.verticies.0.y *= 0.5 * size.1;\n\n tri_projected.verticies.1.x *= 0.5 * size.0;\n\n tri_projected.verticies.1.y *= 0.5 * size.1;\n\n tri_projected.verticies.2.x *= 0.5 * size.0;\n\n tri_projected.verticies.2.y *= 0.5 * size.1;\n\n\n\n let mut projected_tris = tri_projected.clip(size.0, size.1, 0);\n\n\n\n for tri_n in 0..projected_tris.len() {\n\n projected_tris[tri_n].dist = look_len as f32;\n\n }\n\n\n\n tx.send(projected_tris).unwrap();\n\n } else {\n", "file_path": "src/camera.rs", "rank": 53, "score": 6.561244883342992 }, { "content": " /// * `inc_y` - The number the y position will be incremented by.\n\n ///\n\n pub fn increment_y(&mut self, inc_y: f32) {\n\n self.verticies.0.y += inc_y;\n\n self.verticies.1.y += inc_y;\n\n self.verticies.2.y += inc_y;\n\n self.center = Triangle::calculate_center(self.verticies);\n\n }\n\n\n\n /// Increment a Triangle's z position by a number.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle this function was called for.\n\n /// * `inc_z` - The number the z position will be incremented by.\n\n ///\n\n pub fn increment_z(&mut self, inc_z: f32) {\n\n self.verticies.0.z += inc_z;\n\n self.verticies.1.z += inc_z;\n\n self.verticies.2.z += inc_z;\n\n self.center = Triangle::calculate_center(self.verticies);\n", "file_path": "src/triangle.rs", "rank": 54, "score": 6.413032482730202 }, { "content": " ///\n\n pub fn calculate_inverse(&mut self) -> Matrix3x3 {\n\n // matrix (a, b, c)\n\n // (d, e, f)\n\n // (g, h, i)\n\n // determinant of a 3x3 matrix is |A| = a(ei − fh)\n\n // − b(di − fg)\n\n // + c(dh − eg)\n\n\n\n let det: f32 = self.m[0][0]\n\n * ((self.m[1][1] * self.m[2][2]) - (self.m[1][2] * self.m[2][1]))\n\n - self.m[0][1] * ((self.m[1][0] * self.m[2][2]) - (self.m[1][2] * self.m[2][0]))\n\n + self.m[0][2] * ((self.m[1][0] * self.m[2][1]) - (self.m[1][1] * self.m[2][0]));\n\n\n\n // transpose\n\n // matrix (a, b, c)\n\n // (d, e, f)\n\n // (g, h, i)\n\n //\n\n // matrix (a, d, g)\n", "file_path": "src/matrix3x3.rs", "rank": 55, "score": 6.389684063496961 }, { "content": " vertex3: Vec3d,\n\n ) -> Triangle {\n\n let center = Triangle::calculate_center((vertex1, vertex2, vertex3));\n\n\n\n let mut rng = rand::thread_rng();\n\n let r = rng.gen::<f32>();\n\n let g = rng.gen::<f32>();\n\n let b = rng.gen::<f32>();\n\n Triangle {\n\n normal: normal,\n\n verticies: (vertex1, vertex2, vertex3),\n\n color: (r, g, b),\n\n center: center,\n\n dist: 0.0,\n\n }\n\n }\n\n\n\n /// Calculates the surface normal.\n\n ///\n\n /// # Arguments\n", "file_path": "src/triangle.rs", "rank": 56, "score": 6.080711639427704 }, { "content": " };\n\n\n\n adjugate_mat * (1.00000 / det)\n\n }\n\n}\n\n\n\n/// Matrix3x3 * Matrix3x3 = Matrix3x3\n\nimpl Mul for Matrix3x3 {\n\n type Output = Self;\n\n\n\n fn mul(self, rhs: Self) -> Self::Output {\n\n Matrix3x3 {\n\n m: [\n\n [\n\n self.m[0][0] * rhs.m[0][0]\n\n + self.m[0][1] * rhs.m[1][0]\n\n + self.m[0][2] * rhs.m[2][0],\n\n self.m[0][0] * rhs.m[0][1]\n\n + self.m[0][1] * rhs.m[1][1]\n\n + self.m[0][2] * rhs.m[2][1],\n", "file_path": "src/matrix3x3.rs", "rank": 57, "score": 5.8070221262477215 }, { "content": " /// \n\n /// # Arguments\n\n ///\n\n /// * `self` - The Camera the function is being called for.\n\n /// * `n` - Number of degrees to rotate by.\n\n ///\n\n pub fn rotate_down(&mut self, n: f32) {\n\n let mut new_rt = self.rotation.x + n;\n\n while new_rt > 180.0 || new_rt < -180.0 {\n\n if new_rt > 180.0 {\n\n new_rt = -360.0 + new_rt;\n\n }\n\n if new_rt < -180.0 {\n\n new_rt = 360.0 + new_rt;\n\n }\n\n }\n\n self.rotation.x = new_rt;\n\n }\n\n\n\n /// Rotates camera left\n", "file_path": "src/camera.rs", "rank": 58, "score": 5.699736609183851 }, { "content": " /// \n\n /// # Arguments\n\n ///\n\n /// * `self` - The Camera the function is being called for.\n\n /// * `n` - Number of degrees to rotate by.\n\n ///\n\n pub fn rotate_up(&mut self, n: f32) {\n\n let mut new_rt = self.rotation.x - n;\n\n while new_rt > 180.0 || new_rt < -180.0 {\n\n if new_rt > 180.0 {\n\n new_rt = -360.0 + new_rt;\n\n }\n\n if new_rt < -180.0 {\n\n new_rt = 360.0 + new_rt;\n\n }\n\n }\n\n self.rotation.x = new_rt;\n\n }\n\n\n\n /// Rotates camera down\n", "file_path": "src/camera.rs", "rank": 59, "score": 5.699736609183851 }, { "content": " /// \n\n /// # Arguments\n\n ///\n\n /// * `self` - The Camera the function is being called for.\n\n /// * `n` - Number of degrees to rotate by.\n\n ///\n\n pub fn rotate_left(&mut self, n: f32) {\n\n let mut new_rt = self.rotation.y - n;\n\n while new_rt > 180.0 || new_rt < -180.0 {\n\n if new_rt > 180.0 {\n\n new_rt = -360.0 + new_rt;\n\n }\n\n if new_rt < -180.0 {\n\n new_rt = 360.0 + new_rt;\n\n }\n\n }\n\n self.rotation.y = new_rt;\n\n }\n\n\n\n /// Rotates camera right\n", "file_path": "src/camera.rs", "rank": 60, "score": 5.639110810497763 }, { "content": " [0.0, f32::cos(rotation.x), f32::sin(rotation.x)],\n\n [0.0, -f32::sin(rotation.x), f32::cos(rotation.x)],\n\n ],\n\n };\n\n let r_y = Matrix3x3 {\n\n m: [\n\n [f32::cos(rotation.y), 0.0, -f32::sin(rotation.y)],\n\n [0.0, 1.0, 0.0],\n\n [f32::sin(rotation.y), 0.0, f32::cos(rotation.y)],\n\n ],\n\n };\n\n\n\n let r = r_x.clone() * r_y.clone();\n\n\n\n // directions\n\n // real\n\n self.r_forward = (r.clone() * Vec3d::new(0.0, 0.0, 1.0)).normalize();\n\n //println!(\"({},{},{})\", self.r_forward.x, self.r_forward.y, self.r_forward.z);\n\n\n\n // movement\n", "file_path": "src/camera.rs", "rank": 61, "score": 5.2731516071562075 }, { "content": " ///\n\n /// # Arguments\n\n /// * `pos` - a Vec3d containing the position of the camera\n\n /// * `rotation` - a Vec3d containing the rotation of the camera\n\n ///\n\n /// # Return\n\n ///\n\n /// A new Camera Object\n\n ///\n\n pub fn new(pos: Vec3d, rotation: Vec3d) -> Camera {\n\n Camera {\n\n position: pos,\n\n rotation: rotation,\n\n r_forward: Vec3d::new(0.0, 0.0, 1.0),\n\n m_forward: Vec3d::new(0.0, 0.0, 1.0),\n\n m_right: Vec3d::new(1.0, 0.0, 0.0),\n\n }\n\n }\n\n\n\n /// Rotates camera up\n", "file_path": "src/camera.rs", "rank": 62, "score": 5.24393019575029 }, { "content": " pub fn from_vec3ds(col1: Vec3d, col2: Vec3d, col3: Vec3d) -> Matrix3x3 {\n\n Matrix3x3 {\n\n m: [\n\n [col1.x, col1.y, col1.z],\n\n [col2.x, col2.y, col2.z],\n\n [col3.x, col3.y, col3.z],\n\n ],\n\n }\n\n }\n\n\n\n /// Calculate the determinant\n\n ///\n\n /// matrix (a, b, c)\n\n /// (d, e, f)\n\n /// (g, h, i)\n\n /// determinant of a 3x3 matrix is |A| = a(ei − fh)\n\n /// − b(di − fg)\n\n /// + c(dh − eg)\n\n ///\n\n /// # Arguments\n", "file_path": "src/matrix3x3.rs", "rank": 63, "score": 5.093798806513573 }, { "content": " self.center = Triangle::calculate_center(self.verticies);\n\n }\n\n\n\n /// Increment a Triangle's x position by a number.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle this function was called for.\n\n /// * `inc_x` - The number the x position will be incremented by.\n\n ///\n\n pub fn increment_x(&mut self, inc_x: f32) {\n\n self.verticies.0.x += inc_x;\n\n self.verticies.1.x += inc_x;\n\n self.verticies.2.x += inc_x;\n\n self.center = Triangle::calculate_center(self.verticies);\n\n }\n\n\n\n /// Increment a Triangle's y position by a number.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Triangle this function was called for.\n", "file_path": "src/triangle.rs", "rank": 64, "score": 4.781238347549487 }, { "content": " self.camera.position =\n\n self.camera.position + Vec3d::new(0.0, 1.0, 0.0).set_length(time_factor);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::LShift)\n\n || keyboard::is_key_pressed(ctx, KeyCode::LShift)\n\n {\n\n self.camera.position =\n\n self.camera.position - Vec3d::new(0.0, 1.0, 0.0).set_length(time_factor);\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Project the mesh and render it.\n\n ///\n\n /// # Arguments\n\n /// * `self` - The Mesh being updated\n\n /// * `ctx` - GGez's Context\n\n ///\n\n /// # Return\n", "file_path": "src/main.rs", "rank": 65, "score": 4.693902174553353 }, { "content": " /// /\n\n /// e\n\n ///\n\n /// note all variables shown are vectors\n\n ///\n\n /// x = real location of the cordinate denoted (x, y, z)\n\n /// e = the position of your eye directly behind the screen\n\n /// s = the position of the screen\n\n /// g = the projected point on the screen denoted (x', y', λ)\n\n /// e_1 = the x axis defined by your rotation\n\n /// e_2 = the y axis defined by your rotation. note: points up\n\n ///\n\n /// let v: vec3d = x - e; // vector from e to x\n\n /// // maths time\n\n /// f(λ) = e + λ(v) // so that f(0) = e\n\n /// //or // and f(1) = x\n\n /// f(λ) = e + λ(x - e)\n\n ///\n\n /// // plane/screen\n\n ///\n", "file_path": "src/camera.rs", "rank": 66, "score": 4.632717340545324 }, { "content": " center: center,\n\n dist: 0.0,\n\n }\n\n }\n\n\n\n /// Create a new Triangle from 3 vertecies and a predefined normal.\n\n ///\n\n /// # Arguments\n\n /// * `normal` - The surface normal of the Triangle.\n\n /// * `vertex1` - The first vertex of the Triangle.\n\n /// * `vertex2` - The second vertex of the Triangle.\n\n /// * `vertex3` - The third vertex of the Triangle.\n\n ///\n\n /// # Return\n\n /// A new Triangle\n\n ///\n\n pub fn new_with_normal(\n\n normal: Vec3d,\n\n vertex1: Vec3d,\n\n vertex2: Vec3d,\n", "file_path": "src/triangle.rs", "rank": 67, "score": 4.6096112858884 }, { "content": " /// # Arguments\n\n /// * `self` - The Mesh being updated\n\n /// * `ctx` - GGez's Context\n\n ///\n\n /// # Return\n\n /// A GameResult\n\n ///\n\n fn update(&mut self, ctx: &mut ggez::Context) -> ggez::GameResult {\n\n let time_factor = 1.0 / 100000000.0 * delta(ctx).as_nanos() as f32;\n\n\n\n if keyboard::is_key_pressed(ctx, KeyCode::Up) {\n\n self.camera.rotate_up(time_factor * 4.5);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::Down) {\n\n self.camera.rotate_down(time_factor * 4.5);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::Right) {\n\n self.camera.rotate_right(time_factor * 4.5);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::Left) {\n", "file_path": "src/main.rs", "rank": 68, "score": 4.511284485271073 }, { "content": " tx.send(Vec::new()).unwrap();\n\n }\n\n });\n\n }\n\n\n\n // Collect results\n\n for _ in 0..s {\n\n tris.append(&mut rx.recv().unwrap());\n\n }\n\n //tris\n\n Triangle::painters_algorithm(&tris)\n\n }\n\n\n\n /// Projection logic explained:\n\n ///\n\n /// x\n\n /// /\n\n /// (.)e_2 /\n\n /// o-----.------> e_1\n\n /// s / g\n", "file_path": "src/camera.rs", "rank": 69, "score": 4.293706747872507 }, { "content": "MIT License\n\n\n\nCopyright (c) 2020 littleTitan\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\n\nof this software and associated documentation files (the \"Software\"), to deal\n\nin the Software without restriction, including without limitation the rights\n\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the Software is\n\nfurnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all\n\ncopies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n\nSOFTWARE.\n", "file_path": "LICENSE.md", "rank": 70, "score": 3.1111073716259248 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\n/// Main\n\n/// - Generates a Context and an event loop\n\n/// - Creates a Camera with a position and a rotaion\n\n/// - Creates a Mesh with the Camera\n\n/// - Loads in a file or the cube\n\n/// - Creates the window\n\n/// - Starts the eventloop for mesh\n\n///\n\n/// # Return\n\n/// A GameResult\n\n///\n", "file_path": "src/main.rs", "rank": 71, "score": 2.9586163185759573 }, { "content": " } else {\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = -self.verticies.2.x / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(a_b_pt, self.verticies.1, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, self.verticies.2, c_a_pt)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/triangle.rs", "rank": 72, "score": 2.589223718457827 }, { "content": " let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = -self.verticies.2.y / c_a_pt.y;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(a_b_pt, self.verticies.1, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, self.verticies.2, c_a_pt)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n }\n\n\n\n if self.verticies.1.y < 0.0 {\n", "file_path": "src/triangle.rs", "rank": 73, "score": 2.571874101965289 }, { "content": " } else {\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = (y_bound - self.verticies.2.y) / c_a_pt.y;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(a_b_pt, self.verticies.1, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, self.verticies.2, c_a_pt)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/triangle.rs", "rank": 74, "score": 2.571874101965289 }, { "content": " b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(self.verticies.0, a_b_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n\n\n if self.verticies.2.y < 0.0 {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = -self.verticies.0.y / c_a_pt.y;\n\n c_a_pt.x *= f;\n", "file_path": "src/triangle.rs", "rank": 75, "score": 2.5547554470869143 }, { "content": " let f = -self.verticies.2.x / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(self.verticies.0, a_b_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n\n\n if self.verticies.2.x < 0.0 {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = -self.verticies.0.x / c_a_pt.x;\n", "file_path": "src/triangle.rs", "rank": 76, "score": 2.5378631724052947 }, { "content": "\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = (x_bound - self.verticies.1.x) / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n let mut c = Triangle::new(self.verticies.0, self.verticies.1, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(c_a_pt, self.verticies.1, b_c_pt).clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n\n\n // /\\/\\/\\\n\n if depth > 100 {\n\n println!(\"NA\");\n", "file_path": "src/triangle.rs", "rank": 77, "score": 2.521192816878078 }, { "content": " c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = -self.verticies.1.y / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n let mut c = Triangle::new(self.verticies.0, self.verticies.1, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(c_a_pt, self.verticies.1, b_c_pt).clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n\n", "file_path": "src/triangle.rs", "rank": 78, "score": 2.5047400359102596 }, { "content": " if self.verticies.1.x < 0.0 {\n\n let mut a_b_pt = self.verticies.1 - self.verticies.0;\n\n let f = -self.verticies.0.x / a_b_pt.x;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.0;\n\n\n\n if self.verticies.2.x < 0.0 {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = -self.verticies.0.x / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n return Triangle::new(self.verticies.0, a_b_pt, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n } else {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n", "file_path": "src/triangle.rs", "rank": 79, "score": 2.4885005975792573 }, { "content": " let f = (y_bound - self.verticies.2.y) / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(self.verticies.0, a_b_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n\n\n if self.verticies.2.y > y_bound {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = (y_bound - self.verticies.0.y) / c_a_pt.y;\n", "file_path": "src/triangle.rs", "rank": 80, "score": 2.4885005975792573 }, { "content": " c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = -self.verticies.1.x / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n let mut c = Triangle::new(self.verticies.0, self.verticies.1, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(c_a_pt, self.verticies.1, b_c_pt).clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n", "file_path": "src/triangle.rs", "rank": 81, "score": 2.4885005975792573 }, { "content": " self.camera.rotate_left(time_factor * 4.5);\n\n }\n\n\n\n if keyboard::is_key_pressed(ctx, KeyCode::W) {\n\n self.camera.position = self.camera.position\n\n + (self.camera.m_forward * Vec3d::new(-1.0, -1.0, 1.0)).set_length(time_factor);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::S) {\n\n self.camera.position = self.camera.position\n\n - (self.camera.m_forward * Vec3d::new(-1.0, -1.0, 1.0)).set_length(time_factor);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::D) {\n\n self.camera.position = self.camera.position\n\n - (self.camera.m_right * Vec3d::new(-1.0, -1.0, 1.0)).set_length(time_factor);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::A) {\n\n self.camera.position = self.camera.position\n\n + (self.camera.m_right * Vec3d::new(-1.0, -1.0, 1.0)).set_length(time_factor);\n\n }\n\n if keyboard::is_key_pressed(ctx, KeyCode::Space) {\n", "file_path": "src/main.rs", "rank": 82, "score": 2.476605144690197 }, { "content": " c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = (y_bound - self.verticies.1.y) / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n let mut c = Triangle::new(self.verticies.0, self.verticies.1, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(c_a_pt, self.verticies.1, b_c_pt).clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n", "file_path": "src/triangle.rs", "rank": 83, "score": 2.472470379005894 }, { "content": " let mut a_b_pt = self.verticies.1 - self.verticies.0;\n\n let f = -self.verticies.0.y / a_b_pt.y;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.0;\n\n\n\n if self.verticies.2.y < 0.0 {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = -self.verticies.0.y / c_a_pt.y;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n return Triangle::new(self.verticies.0, a_b_pt, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n } else {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n\n let f = -self.verticies.2.y / b_c_pt.y;\n", "file_path": "src/triangle.rs", "rank": 84, "score": 2.472470379005894 }, { "content": " if self.verticies.1.y > y_bound {\n\n let mut a_b_pt = self.verticies.1 - self.verticies.0;\n\n let f = (y_bound - self.verticies.0.y) / a_b_pt.y;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.0;\n\n\n\n if self.verticies.2.y > y_bound {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = (y_bound - self.verticies.0.y) / c_a_pt.y;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n return Triangle::new(self.verticies.0, a_b_pt, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n } else {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n", "file_path": "src/triangle.rs", "rank": 85, "score": 2.4255953763344014 }, { "content": " }\n\n\n\n // < 0\n\n if self.verticies.0.x < 0.0 {\n\n if self.verticies.1.x < 0.0 {\n\n if !(self.verticies.2.x < 0.0) {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n\n let f = -self.verticies.2.x / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = -self.verticies.2.x / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n", "file_path": "src/triangle.rs", "rank": 86, "score": 2.3843376528795193 }, { "content": " a_b_pt = a_b_pt + self.verticies.1;\n\n if self.verticies.2.x > x_bound {\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = (x_bound - self.verticies.1.x) / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n return Triangle::new(a_b_pt, self.verticies.1, b_c_pt)\n\n .clip(x_bound, y_bound, depth); // return\n\n } else {\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = (x_bound - self.verticies.2.x) / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(a_b_pt, self.verticies.1, self.verticies.2)\n", "file_path": "src/triangle.rs", "rank": 87, "score": 2.3804646885880727 }, { "content": " let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = (x_bound - self.verticies.0.x) / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n\n\n\n return Triangle::new(self.verticies.0, a_b_pt, c_a_pt)\n\n .clip(x_bound, y_bound, depth);\n\n } else {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n\n let f = (x_bound - self.verticies.2.x) / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c = Triangle::new(self.verticies.0, a_b_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n c.append(\n", "file_path": "src/triangle.rs", "rank": 88, "score": 2.3804646885880727 }, { "content": " &mut Triangle::new(a_b_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n\n\n //// \\/\\/\\/\n\n if depth > 100 {\n\n println!(\"{}\", depth);\n\n }\n\n\n\n if self.verticies.2.x > x_bound {\n\n let mut c_a_pt = self.verticies.2 - self.verticies.0;\n\n let f = (x_bound - self.verticies.0.x) / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.0;\n", "file_path": "src/triangle.rs", "rank": 89, "score": 2.3623251569634274 }, { "content": "\n\n // < 0\n\n if self.verticies.0.y < 0.0 {\n\n if self.verticies.1.y < 0.0 {\n\n if !(self.verticies.2.y < 0.0) {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n\n let f = -self.verticies.2.y / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = -self.verticies.2.y / c_a_pt.y;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n\n return Triangle::new(c_a_pt, b_c_pt, self.verticies.2)\n", "file_path": "src/triangle.rs", "rank": 90, "score": 2.2781948659014173 }, { "content": " .clip(x_bound, y_bound, depth);\n\n c.append(\n\n &mut Triangle::new(a_b_pt, self.verticies.2, c_a_pt)\n\n .clip(x_bound, y_bound, depth),\n\n );\n\n\n\n return c;\n\n }\n\n }\n\n }\n\n\n\n if self.verticies.1.x > x_bound {\n\n let mut a_b_pt = self.verticies.1 - self.verticies.0;\n\n let f = (x_bound - self.verticies.0.x) / a_b_pt.x;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.0;\n\n\n\n if self.verticies.2.x > x_bound {\n", "file_path": "src/triangle.rs", "rank": 91, "score": 2.2781948659014173 }, { "content": "\n\n // --------------------------------------------------------------------------------------------------------\n\n // Y ------------------------------------------------------------------------------------------------------\n\n if self.verticies.0.y > y_bound {\n\n if self.verticies.1.y > y_bound {\n\n if !(self.verticies.2.y > y_bound) {\n\n let mut b_c_pt = self.verticies.1 - self.verticies.2;\n\n let f = (y_bound - self.verticies.2.y) / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = (y_bound - self.verticies.2.y) / c_a_pt.y;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n", "file_path": "src/triangle.rs", "rank": 92, "score": 2.258090330735177 }, { "content": " b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.2;\n\n\n\n let mut c_a_pt = self.verticies.0 - self.verticies.2;\n\n let f = (x_bound - self.verticies.2.x) / c_a_pt.x;\n\n c_a_pt.x *= f;\n\n c_a_pt.y *= f;\n\n c_a_pt.z *= f;\n\n c_a_pt = c_a_pt + self.verticies.2;\n\n\n\n return Triangle::new(c_a_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth); // return\n\n }\n\n } else {\n\n let mut a_b_pt = self.verticies.0 - self.verticies.1;\n\n let f = (x_bound - self.verticies.1.x) / a_b_pt.x;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n", "file_path": "src/triangle.rs", "rank": 93, "score": 2.181099563133328 }, { "content": " .clip(x_bound, y_bound, depth);\n\n }\n\n } else {\n\n let mut a_b_pt = self.verticies.0 - self.verticies.1;\n\n let f = -self.verticies.1.y / a_b_pt.y;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.1;\n\n if self.verticies.2.y < 0.0 {\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = -self.verticies.1.y / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n return Triangle::new(a_b_pt, self.verticies.1, b_c_pt)\n\n .clip(x_bound, y_bound, depth); // return\n\n } else {\n", "file_path": "src/triangle.rs", "rank": 94, "score": 2.1626652630961134 }, { "content": " return Triangle::new(c_a_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth); // return\n\n }\n\n } else {\n\n let mut a_b_pt = self.verticies.0 - self.verticies.1;\n\n let f = -self.verticies.1.x / a_b_pt.x;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.1;\n\n if self.verticies.2.x < 0.0 {\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = -self.verticies.1.x / b_c_pt.x;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n return Triangle::new(a_b_pt, self.verticies.1, b_c_pt)\n\n .clip(x_bound, y_bound, depth); // return\n", "file_path": "src/triangle.rs", "rank": 95, "score": 2.074978359006105 }, { "content": " return Triangle::new(c_a_pt, b_c_pt, self.verticies.2)\n\n .clip(x_bound, y_bound, depth);\n\n } // else \\/\\/\\/// return\n\n } else {\n\n let mut a_b_pt = self.verticies.0 - self.verticies.1;\n\n let f = (y_bound - self.verticies.1.y) / a_b_pt.y;\n\n a_b_pt.x *= f;\n\n a_b_pt.y *= f;\n\n a_b_pt.z *= f;\n\n a_b_pt = a_b_pt + self.verticies.1;\n\n if self.verticies.2.y > y_bound {\n\n let mut b_c_pt = self.verticies.2 - self.verticies.1;\n\n let f = (y_bound - self.verticies.1.y) / b_c_pt.y;\n\n b_c_pt.x *= f;\n\n b_c_pt.y *= f;\n\n b_c_pt.z *= f;\n\n b_c_pt = b_c_pt + self.verticies.1;\n\n\n\n return Triangle::new(a_b_pt, self.verticies.1, b_c_pt)\n\n .clip(x_bound, y_bound, depth); // return\n", "file_path": "src/triangle.rs", "rank": 96, "score": 2.025698274832675 }, { "content": "# My 3D Engine\n\nA 3D engine made with ggez in rust.\n\n\n\nWARNING: This program may potentially trigger seizures for people with photosensitive epilepsy. Viewer discretion is advised.\n\n\n\n## Description\n\nThis is a 3D engine made with ggez and rust. I decided to make this project because I have always found the concept of 3D projection baffeling and wanted to understand it.\n\n\n\nNOTE: At the moment this program does all the calculations on the CPU...\n\n\n\n## Versions\n\n### v0.1.0\n\nBasic Working algorithm for rendering 3d models.\n\n\n\n### v0.2.0\n\nImplements Multithreading\n\nBetter projection Algorithm\n\nCrop triangles to fit screen\n\nBetter comments.\n\n\n\n### v0.3.0\n\nChanged movement to be more intuitive\n\nCompensate for reversed y axis\n\nBetter render culling\n\nCleaner code\n\n\n\n## Todo\n\n ### Bug Fix\n\n - Fix diagonal turning mirror error\n\n - Fix the big triangles glitch\n\n - Clipping stack overflow error\n\n - <!> Clean lighting <!>\n\n ### UI\n\n + Export Stl and Obj\n\n + Object manipulation\n\n - Rotation\n\n - Movement\n\n - Scale\n\n + Vertex manipulation\n\n + divide sides\n\n + background manipulation\n\n + Lock look\n\n + vertex mode\n\n + transparent mode\n\n\n\n## License\n\n[MIT](https://choosealicense.com/licenses/mit/)\n", "file_path": "README.md", "rank": 97, "score": 1.9773360619845812 }, { "content": "impl event::EventHandler for Scene {\n\n /// Game Loop\n\n /// <p>\n\n /// User Controls\n\n /// - UP_ARROW rotatate around player local x axis\n\n /// -> look up\n\n /// - DOWN_ARROW rotatate around player local x axis\n\n /// -> look down\n\n /// - RIGHT_ARROW rotatate around global y axis\n\n /// -> look right\n\n /// - LEFT_ARROW rotatate around global y axis\n\n /// -> look left\n\n ///\n\n /// - W_KEY Move Forward\n\n /// - S_KEY Move Backward\n\n /// - D_KEY Move Right\n\n /// - A_KEY Move Left\n\n ///\n\n /// NOTE: Controls may be subject to change\n\n ///\n", "file_path": "src/main.rs", "rank": 98, "score": 1.498307123903571 } ]
Rust
api/src/services/item.rs
sanpii/oxfeed
a8d79bdd115b903bdb05ec475ef4fbc9c1b01108
use actix_web::web::{Data, Json, Path}; use oxfeed_common::item::Model; use std::collections::HashMap; pub(crate) fn scope() -> actix_web::Scope { actix_web::web::scope("/items") .service(content) .service(favorites) .service(patch) .service(unread) .service(read_all) .service(all) } #[actix_web::get("")] async fn all( elephantry: Data<elephantry::Pool>, pagination: actix_web::web::Query<oxfeed_common::Pagination>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { fetch( &elephantry, &identity, &elephantry::Where::new(), &pagination, ) } #[actix_web::get("/favorites")] async fn favorites( elephantry: Data<elephantry::Pool>, pagination: actix_web::web::Query<oxfeed_common::Pagination>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { fetch( &elephantry, &identity, &elephantry::Where::from("favorite", Vec::new()), &pagination.into_inner(), ) } #[actix_web::get("/unread")] async fn unread( elephantry: Data<elephantry::Pool>, pagination: actix_web::web::Query<oxfeed_common::Pagination>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { fetch( &elephantry, &identity, &elephantry::Where::from("not read", Vec::new()), &pagination.into_inner(), ) } pub(crate) fn fetch( elephantry: &elephantry::Pool, identity: &crate::Identity, filter: &elephantry::Where, pagination: &oxfeed_common::Pagination, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let model = elephantry.model::<Model>(); let items = model.all(&token, filter, pagination)?; let response = actix_web::HttpResponse::Ok().json(items); Ok(response) } #[actix_web::get("/{item_id}/content")] async fn content( elephantry: Data<elephantry::Pool>, path: Path<uuid::Uuid>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let item_id = Some(path.into_inner()); let sql = include_str!("../../sql/item_content.sql"); let content = elephantry .query::<Option<String>>(sql, &[&item_id, &token])? .next(); let response = match content { Some(content) => actix_web::HttpResponse::Ok().body(&content.unwrap_or_default()), None => actix_web::HttpResponse::NotFound().finish(), }; Ok(response) } #[actix_web::patch("/{item_id}")] async fn patch( elephantry: Data<elephantry::Pool>, path: Path<uuid::Uuid>, json: Json<serde_json::Value>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let item_id = path.into_inner(); match elephantry.model::<Model>().one(&token, &item_id)? { Some(_) => (), None => return Ok(actix_web::HttpResponse::NotFound().finish()), } let mut data = HashMap::new(); for (k, v) in json.as_object().unwrap() { let v = match v { serde_json::Value::Bool(v) => v as &dyn elephantry::ToSql, serde_json::Value::String(v) => v as &dyn elephantry::ToSql, _ => todo!(), }; data.insert(k.clone(), v); } let mut response = if !data.is_empty() { let item = elephantry.update_by_pk::<Model>(&elephantry::pk!(item_id), &data)?; match item { Some(_) => actix_web::HttpResponse::NoContent(), None => actix_web::HttpResponse::NotFound(), } } else { actix_web::HttpResponse::NoContent() }; Ok(response.finish()) } #[actix_web::post("/read")] async fn read_all( elephantry: Data<elephantry::Pool>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let sql = include_str!("../../sql/read_all.sql"); elephantry.query::<()>(sql, &[&token])?; let response = actix_web::HttpResponse::NoContent().finish(); Ok(response) }
use actix_web::web::{Data, Json, Path}; use oxfeed_common::item::Model; use std::collections::HashMap; pub(crate) fn scope() -> actix_web::Scope { actix_web::web::scope("/items") .service(content) .service(favorites) .service(patch) .service(unread) .service(read_all) .service(all) } #[actix_web::get("")] async fn all( elephantry: Data<elephantry::Pool>, pagination: actix_web::web::Query<oxfeed_common::Pagination>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { fetch( &elephantry, &identity, &elephantry::Where::new(), &pagination, ) } #[actix_web::get("/favorites")] async fn favorites( elephantry: Data<elephantry::Pool>, pagination: actix_web::web::Query<oxfeed_common::Pagination>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { fetch( &elephantry, &identity, &elephantry::Where::from("favorite", Vec::new()), &pagination.into_inner(), ) } #[actix_web::get("/unread")]
pub(crate) fn fetch( elephantry: &elephantry::Pool, identity: &crate::Identity, filter: &elephantry::Where, pagination: &oxfeed_common::Pagination, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let model = elephantry.model::<Model>(); let items = model.all(&token, filter, pagination)?; let response = actix_web::HttpResponse::Ok().json(items); Ok(response) } #[actix_web::get("/{item_id}/content")] async fn content( elephantry: Data<elephantry::Pool>, path: Path<uuid::Uuid>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let item_id = Some(path.into_inner()); let sql = include_str!("../../sql/item_content.sql"); let content = elephantry .query::<Option<String>>(sql, &[&item_id, &token])? .next(); let response = match content { Some(content) => actix_web::HttpResponse::Ok().body(&content.unwrap_or_default()), None => actix_web::HttpResponse::NotFound().finish(), }; Ok(response) } #[actix_web::patch("/{item_id}")] async fn patch( elephantry: Data<elephantry::Pool>, path: Path<uuid::Uuid>, json: Json<serde_json::Value>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let item_id = path.into_inner(); match elephantry.model::<Model>().one(&token, &item_id)? { Some(_) => (), None => return Ok(actix_web::HttpResponse::NotFound().finish()), } let mut data = HashMap::new(); for (k, v) in json.as_object().unwrap() { let v = match v { serde_json::Value::Bool(v) => v as &dyn elephantry::ToSql, serde_json::Value::String(v) => v as &dyn elephantry::ToSql, _ => todo!(), }; data.insert(k.clone(), v); } let mut response = if !data.is_empty() { let item = elephantry.update_by_pk::<Model>(&elephantry::pk!(item_id), &data)?; match item { Some(_) => actix_web::HttpResponse::NoContent(), None => actix_web::HttpResponse::NotFound(), } } else { actix_web::HttpResponse::NoContent() }; Ok(response.finish()) } #[actix_web::post("/read")] async fn read_all( elephantry: Data<elephantry::Pool>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { let token = identity.token(&elephantry)?; let sql = include_str!("../../sql/read_all.sql"); elephantry.query::<()>(sql, &[&token])?; let response = actix_web::HttpResponse::NoContent().finish(); Ok(response) }
async fn unread( elephantry: Data<elephantry::Pool>, pagination: actix_web::web::Query<oxfeed_common::Pagination>, identity: crate::Identity, ) -> oxfeed_common::Result<actix_web::HttpResponse> { fetch( &elephantry, &identity, &elephantry::Where::from("not read", Vec::new()), &pagination.into_inner(), ) }
function_block-full_function
[ { "content": "fn path(url: &str) -> std::path::PathBuf {\n\n let digest = ring::digest::digest(&ring::digest::SHA256, url.as_bytes());\n\n\n\n let mut path = digest\n\n .as_ref()\n\n .chunks(4)\n\n .map(|x| {\n\n x.iter()\n\n .fold(String::new(), |acc, b| format!(\"{}{:02x}\", acc, b))\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n path.insert(0, crate::env(\"CACHE_DIR\").unwrap());\n\n\n\n path.iter().collect()\n\n}\n", "file_path": "api/src/cache.rs", "rank": 0, "score": 107075.51660028835 }, { "content": "fn count<T: elephantry::Entity>(\n\n elephantry: &elephantry::Pool,\n\n sql: &str,\n\n params: &[&dyn elephantry::ToSql],\n\n pagination: &oxfeed_common::Pagination,\n\n) -> oxfeed_common::Result<elephantry::Pager<T>> {\n\n let sql_count = format!(\"with items as ({}) select count(items) from items\", sql);\n\n let count = elephantry.query_one::<i64>(&sql_count, params)?;\n\n\n\n let mut sql = sql.to_string();\n\n sql.push_str(&pagination.to_sql());\n\n\n\n let items = elephantry.query::<T>(&sql, params)?;\n\n\n\n let pager = elephantry::Pager::new(items, count as usize, pagination.page, pagination.limit);\n\n\n\n Ok(pager)\n\n}\n", "file_path": "api/src/services/search.rs", "rank": 1, "score": 104239.61268129971 }, { "content": "fn save(\n\n elephantry: &elephantry::Pool,\n\n outline: &opml::Outline,\n\n user: &oxfeed_common::user::Entity,\n\n) {\n\n for outline in &outline.outlines {\n\n save(&elephantry, outline, user);\n\n }\n\n\n\n let source = match source_try_from(outline, user) {\n\n Ok(source) => source,\n\n Err(_) => return,\n\n };\n\n\n\n if let Err(error) = elephantry.insert_one::<oxfeed_common::source::Model>(&source) {\n\n log::error!(\"Unable to import outline '{}': {}\", source.title, error);\n\n }\n\n}\n\n\n", "file_path": "api/src/services/opml.rs", "rank": 2, "score": 88672.4457326752 }, { "content": "fn search(\n\n elephantry: &elephantry::Pool,\n\n identity: &crate::Identity,\n\n clause: &elephantry::Where,\n\n query: &Request,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let mut clause = clause.clone();\n\n\n\n let mut sql = if let Some(q) = &query.q {\n\n clause.and_where(\"document @@ websearch_to_tsquery($*)\", vec![q]);\n\n include_str!(\"../../sql/fts_items.sql\").to_string()\n\n } else {\n\n include_str!(\"../../sql/search_items.sql\").to_string()\n\n };\n\n\n\n if let Some(tag) = &query.tag {\n\n clause.and_where(\"$* = any(tags)\", vec![tag]);\n\n }\n\n\n\n let token = identity.token(&elephantry)?;\n", "file_path": "api/src/services/search.rs", "rank": 3, "score": 88672.4457326752 }, { "content": "fn source_try_from(\n\n outline: &opml::Outline,\n\n user: &oxfeed_common::user::Entity,\n\n) -> Result<oxfeed_common::source::Entity, ()> {\n\n let url = match &outline.xml_url {\n\n Some(url) => url.clone(),\n\n None => return Err(()),\n\n };\n\n\n\n let mut tags = Vec::new();\n\n\n\n if let Some(category) = &outline.category {\n\n tags.push(category.clone());\n\n }\n\n\n\n let entity = oxfeed_common::source::Entity {\n\n tags,\n\n title: outline.text.clone(),\n\n url,\n\n user_id: user.id,\n", "file_path": "api/src/services/opml.rs", "rank": 4, "score": 86203.22149255118 }, { "content": "fn default_page() -> usize {\n\n 1\n\n}\n\n\n", "file_path": "common/src/pagination.rs", "rank": 5, "score": 84117.4609833028 }, { "content": "fn default_limit() -> usize {\n\n 25\n\n}\n\n\n", "file_path": "common/src/pagination.rs", "rank": 6, "score": 84117.4609833028 }, { "content": "fn parse<'de, D>(deserializer: D) -> Result<usize, D::Error>\n\nwhere\n\n D: serde::Deserializer<'de>,\n\n{\n\n use serde::de::Error;\n\n use serde::Deserialize;\n\n\n\n let s = String::deserialize(deserializer)?;\n\n\n\n s.parse().map_err(D::Error::custom)\n\n}\n\n\n\nimpl Pagination {\n\n pub fn new() -> Self {\n\n Self {\n\n page: default_page(),\n\n limit: default_limit(),\n\n }\n\n }\n\n\n", "file_path": "common/src/pagination.rs", "rank": 7, "score": 62904.77694079444 }, { "content": "fn main() {\n\n #[cfg(debug_assertions)]\n\n dotenv::dotenv().ok();\n\n\n\n println!(\n\n \"cargo:rustc-env=API_URL={}\",\n\n std::env::var(\"API_URL\").unwrap_or_default()\n\n );\n\n println!(\n\n \"cargo:rustc-env=SECRET={}\",\n\n std::env::var(\"SECRET\").unwrap_or_default()\n\n );\n\n}\n", "file_path": "front/build.rs", "rank": 8, "score": 59592.10713466504 }, { "content": "fn main() {\n\n wasm_logger::init(wasm_logger::Config::new(log::Level::Debug));\n\n yew::initialize();\n\n yew::App::<App>::new().mount_to_body();\n\n}\n\n\n\n#[macro_export]\n\nmacro_rules! change {\n\n () => {\n\n fn change(&mut self, _: Self::Properties) -> yew::ShouldRender {\n\n false\n\n }\n\n };\n\n\n\n (props) => {\n\n fn change(&mut self, props: Self::Properties) -> yew::ShouldRender {\n\n let should_render = self.props != props;\n\n\n\n self.props = props;\n\n\n", "file_path": "front/src/main.rs", "rank": 9, "score": 58125.80273223653 }, { "content": "fn env(name: &str) -> oxfeed_common::Result<String> {\n\n std::env::var(name).map_err(|_| oxfeed_common::Error::Env(name.to_string()))\n\n}\n", "file_path": "api/src/main.rs", "rank": 10, "score": 41936.396003129215 }, { "content": "create index if not exists item_favorite on item(favorite);\n", "file_path": "api/sql/structure.sql", "rank": 11, "score": 38087.768614234985 }, { "content": "#[derive(Default, serde::Deserialize)]\n\npub(crate) struct Identity {\n\n token: uuid::Uuid,\n\n}\n\n\n\nimpl Identity {\n\n pub fn token(&self, elephantry: &elephantry::Connection) -> oxfeed_common::Result<uuid::Uuid> {\n\n use oxfeed_common::user::Model;\n\n\n\n if elephantry.exist_where::<Model>(\"token = $*\", &[&self.token])? {\n\n Ok(self.token)\n\n } else {\n\n Err(oxfeed_common::Error::Auth)\n\n }\n\n }\n\n\n\n fn unauthorized() -> futures_util::future::Ready<oxfeed_common::Result<Self>> {\n\n futures_util::future::err(oxfeed_common::Error::Auth)\n\n }\n\n}\n", "file_path": "api/src/identity.rs", "rank": 12, "score": 35443.31663198218 }, { "content": "\n\n let mid = match authorization.find(' ') {\n\n Some(mid) => mid,\n\n None => return Self::unauthorized(),\n\n };\n\n\n\n let (ty, token) = authorization.split_at(mid);\n\n\n\n if ty.eq_ignore_ascii_case(\"bearer\") {\n\n match token.trim().parse() {\n\n Ok(token) => futures_util::future::ok(Identity { token }),\n\n _ => Self::unauthorized(),\n\n }\n\n } else {\n\n Self::unauthorized()\n\n }\n\n }\n\n}\n", "file_path": "api/src/identity.rs", "rank": 13, "score": 35436.86348702325 }, { "content": "\n\nimpl actix_web::FromRequest for Identity {\n\n type Config = ();\n\n type Error = oxfeed_common::Error;\n\n type Future = futures_util::future::Ready<oxfeed_common::Result<Self>>;\n\n\n\n #[inline]\n\n fn from_request(\n\n request: &actix_web::web::HttpRequest,\n\n _: &mut actix_web::dev::Payload,\n\n ) -> Self::Future {\n\n let authorization = match request\n\n .headers()\n\n .get(\"Authorization\")\n\n .map(|x| x.to_str().ok())\n\n .flatten()\n\n {\n\n Some(authorization) => authorization,\n\n None => return Self::unauthorized(),\n\n };\n", "file_path": "api/src/identity.rs", "rank": 14, "score": 35436.545556607976 }, { "content": " pub fn to_sql(self) -> String {\n\n format!(\n\n \"offset {} fetch first {} rows only\",\n\n (self.page - 1) * self.limit,\n\n self.limit,\n\n )\n\n }\n\n\n\n pub fn to_query(self) -> String {\n\n format!(\"page={}&limit={}\", self.page, self.limit)\n\n }\n\n}\n\n\n\nimpl Default for Pagination {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n", "file_path": "common/src/pagination.rs", "rank": 15, "score": 35405.58016406835 }, { "content": "#[derive(Clone, Copy, Eq, PartialEq, serde::Deserialize)]\n\npub struct Pagination {\n\n #[serde(default = \"default_page\", deserialize_with = \"parse\")]\n\n pub page: usize,\n\n #[serde(default = \"default_limit\", deserialize_with = \"parse\")]\n\n pub limit: usize,\n\n}\n\n\n", "file_path": "common/src/pagination.rs", "rank": 16, "score": 35402.46777611222 }, { "content": "use actix_web::web::{Data, Json, Path};\n\nuse oxfeed_common::item::Model as ItemModel;\n\nuse oxfeed_common::source::Model;\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/sources\")\n\n .service(get)\n\n .service(delete)\n\n .service(update)\n\n .service(all)\n\n .service(create)\n\n}\n\n\n\n#[actix_web::get(\"\")]\n\nasync fn all(\n\n elephantry: Data<elephantry::Pool>,\n\n pagination: actix_web::web::Query<oxfeed_common::Pagination>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n fetch(\n", "file_path": "api/src/services/source.rs", "rank": 18, "score": 33249.94024475441 }, { "content": "use oxfeed_common::webhook::Model;\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/webhooks\")\n\n .service(all)\n\n .service(create)\n\n .service(delete)\n\n .service(update)\n\n}\n\n\n\n#[actix_web::get(\"\")]\n\nasync fn all(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n let model = elephantry.model::<Model>();\n\n let items = model.all(&token)?;\n\n let response = actix_web::HttpResponse::Ok().json(items);\n\n\n", "file_path": "api/src/services/webhook.rs", "rank": 20, "score": 33242.71518960224 }, { "content": "use actix_web::web::Data;\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/opml\")\n\n .service(export)\n\n .service(import)\n\n}\n\n\n\n#[actix_web::post(\"\")]\n\nasync fn import(\n\n elephantry: Data<elephantry::Pool>,\n\n xml: String,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n\n\n let user = match elephantry\n\n .model::<oxfeed_common::user::Model>()\n\n .find_from_token(&token)\n\n {\n", "file_path": "api/src/services/opml.rs", "rank": 21, "score": 33240.51654037796 }, { "content": "pub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/tags\").service(all)\n\n}\n\n\n\n#[actix_web::get(\"\")]\n\nasync fn all(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n pagination: actix_web::web::Query<oxfeed_common::Pagination>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n\n\n let mut clause = elephantry::Where::new();\n\n clause.and_where(\"\\\"user\\\".token = $*\", vec![&token]);\n\n let params = clause.params();\n\n\n\n let query = format!(\n\n r#\"\n", "file_path": "api/src/services/tag.rs", "rank": 22, "score": 33240.091752622386 }, { "content": " &elephantry,\n\n &identity,\n\n &elephantry::Where::new(),\n\n &pagination,\n\n )\n\n}\n\n\n\npub(crate) fn fetch(\n\n elephantry: &elephantry::Pool,\n\n identity: &crate::Identity,\n\n filter: &elephantry::Where,\n\n pagination: &oxfeed_common::Pagination,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let model = elephantry.model::<Model>();\n\n let token = identity.token(&elephantry)?;\n\n let sources = model.all(&token, filter, pagination)?;\n\n let response = actix_web::HttpResponse::Ok().json(sources);\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/source.rs", "rank": 25, "score": 33238.7952723497 }, { "content": " None => actix_web::HttpResponse::NoContent().finish(),\n\n };\n\n\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::put(\"/{source_id}\")]\n\nasync fn update(\n\n elephantry: Data<elephantry::Pool>,\n\n mut data: Json<crate::form::Source>,\n\n path: Path<uuid::Uuid>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n use std::convert::TryInto;\n\n\n\n let token = identity.token(&elephantry)?;\n\n\n\n let user = match elephantry\n\n .model::<oxfeed_common::user::Model>()\n\n .find_from_token(&token)\n", "file_path": "api/src/services/source.rs", "rank": 26, "score": 33238.659667621505 }, { "content": "\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::put(\"/{source_id}\")]\n\nasync fn update(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n mut data: actix_web::web::Json<crate::form::Webhook>,\n\n path: actix_web::web::Path<uuid::Uuid>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n use std::convert::TryInto;\n\n\n\n let token = identity.token(&elephantry)?;\n\n\n\n let user = match elephantry\n\n .model::<oxfeed_common::user::Model>()\n\n .find_from_token(&token)\n\n {\n\n Some(user) => user,\n", "file_path": "api/src/services/webhook.rs", "rank": 27, "score": 33238.24483129125 }, { "content": "use oxfeed_common::new_user::{Entity, Model};\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/users\").service(create)\n\n}\n\n\n\n#[actix_web::post(\"\")]\n\nasync fn create(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n data: actix_web::web::Json<Entity>,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n elephantry.insert_one::<Model>(&data.into_inner())?;\n\n let response = actix_web::HttpResponse::NoContent().finish();\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/user.rs", "rank": 29, "score": 33237.18511618815 }, { "content": "use actix::ActorContext;\n\nuse actix_web_actors::ws;\n\n\n\n/// How often heartbeat pings are sent\n\nconst HEARTBEAT_INTERVAL: std::time::Duration = std::time::Duration::from_secs(5);\n\n/// How long before lack of client response causes a timeout\n\nconst CLIENT_TIMEOUT: std::time::Duration = std::time::Duration::from_secs(10);\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/ws\").service(websocket)\n\n}\n\n\n\n#[actix_web::get(\"\")]\n\nasync fn websocket(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n identity: actix_web::web::Query<crate::Identity>,\n\n request: actix_web::HttpRequest,\n\n stream: actix_web::web::Payload,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n let websocket = Websocket::new(elephantry.into_inner(), token);\n\n let response = ws::start(websocket, &request, stream)?;\n\n\n\n Ok(response)\n\n}\n\n\n", "file_path": "api/src/services/websocket.rs", "rank": 30, "score": 33236.37232308197 }, { "content": " identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n search(&elephantry, &identity, &elephantry::Where::new(), &query)\n\n}\n\n\n\n#[actix_web::get(\"/favorites\")]\n\nasync fn favorites(\n\n elephantry: Data<elephantry::Pool>,\n\n query: Query<Request>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let clause = elephantry::Where::from(\"favorite\", Vec::new());\n\n search(&elephantry, &identity, &clause, &query)\n\n}\n\n\n\n#[actix_web::get(\"/unread\")]\n\nasync fn unread(\n\n elephantry: Data<elephantry::Pool>,\n\n query: Query<Request>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let clause = elephantry::Where::from(\"not read\", Vec::new());\n\n search(&elephantry, &identity, &clause, &query)\n\n}\n\n\n", "file_path": "api/src/services/search.rs", "rank": 31, "score": 33236.365085042664 }, { "content": "pub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/auth\")\n\n .service(login)\n\n .service(logout)\n\n}\n\n\n\n#[actix_web::post(\"/login\")]\n\nasync fn login(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n token: actix_web::web::Json<String>,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let secret = crate::env(\"SECRET\")?;\n\n\n\n use hmac::NewMac;\n\n use jwt::VerifyWithKey;\n\n\n\n let key: hmac::Hmac<sha2::Sha256> = hmac::Hmac::new_from_slice(secret.as_bytes()).unwrap();\n\n let claims: std::collections::BTreeMap<String, String> = token.verify_with_key(&key)?;\n\n\n\n if claims.get(\"email\").is_none() || claims.get(\"password\").is_none() {\n", "file_path": "api/src/services/auth.rs", "rank": 32, "score": 33236.2461230336 }, { "content": "\n\n data.user_id = Some(user.id);\n\n let webhook = elephantry.insert_one::<Model>(&data.into_inner().try_into()?)?;\n\n let response = actix_web::HttpResponse::Ok().json(webhook);\n\n\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::delete(\"/{webhook_id}\")]\n\nasync fn delete(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n path: actix_web::web::Path<uuid::Uuid>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let webhook_id = path.into_inner();\n\n let token = identity.token(&elephantry)?;\n\n let response = match elephantry.model::<Model>().delete(&token, &webhook_id)? {\n\n Some(webhook) => actix_web::HttpResponse::Ok().json(webhook),\n\n None => actix_web::HttpResponse::NoContent().finish(),\n\n };\n", "file_path": "api/src/services/webhook.rs", "rank": 33, "score": 33236.15536886347 }, { "content": "\n\n#[actix_web::delete(\"/{source_id}\")]\n\nasync fn delete(\n\n elephantry: Data<elephantry::Pool>,\n\n path: Path<uuid::Uuid>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let source_id = path.into_inner();\n\n\n\n let token = identity.token(&elephantry)?;\n\n\n\n let source = match elephantry.model::<Model>().one(&token, &source_id)? {\n\n Some(source) => source,\n\n None => return Ok(actix_web::HttpResponse::NoContent().finish()),\n\n };\n\n\n\n elephantry.delete_where::<ItemModel>(\"source_id = $*\", &[&source_id])?;\n\n\n\n let response = match elephantry.delete_one::<Model>(&source)? {\n\n Some(source) => actix_web::HttpResponse::Ok().json(source),\n", "file_path": "api/src/services/source.rs", "rank": 34, "score": 33236.08654258036 }, { "content": "pub(crate) mod auth;\n\npub(crate) mod icon;\n\npub(crate) mod item;\n\npub(crate) mod opml;\n\npub(crate) mod search;\n\npub(crate) mod source;\n\npub(crate) mod tag;\n\npub(crate) mod user;\n\npub(crate) mod webhook;\n\npub(crate) mod websocket;\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/\").service(counts)\n\n}\n\n\n\n#[actix_web::get(\"/counts\")]\n\nasync fn counts(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n\n\n let sql = include_str!(\"../../sql/counts.sql\");\n\n let counts = elephantry.query_one::<oxfeed_common::Counts>(sql, &[&token])?;\n\n let response = actix_web::HttpResponse::Ok().json(counts);\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/mod.rs", "rank": 35, "score": 33235.49565346746 }, { "content": " let source = elephantry.insert_one::<Model>(&data.into_inner().try_into()?)?;\n\n let response = actix_web::HttpResponse::Ok().json(source);\n\n\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::get(\"/{source_id}\")]\n\nasync fn get(\n\n elephantry: Data<elephantry::Pool>,\n\n source_id: Path<uuid::Uuid>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n let response = match elephantry.model::<Model>().one(&source_id, &token)? {\n\n Some(source) => actix_web::HttpResponse::Ok().json(source),\n\n None => actix_web::HttpResponse::NotFound().finish(),\n\n };\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/source.rs", "rank": 36, "score": 33235.4735279253 }, { "content": "\n\n#[actix_web::post(\"\")]\n\nasync fn create(\n\n elephantry: Data<elephantry::Pool>,\n\n mut data: Json<crate::form::Source>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n use std::convert::TryInto;\n\n\n\n let token = identity.token(&elephantry)?;\n\n\n\n let user = match elephantry\n\n .model::<oxfeed_common::user::Model>()\n\n .find_from_token(&token)\n\n {\n\n Some(user) => user,\n\n None => return Ok(actix_web::HttpResponse::Unauthorized().finish()),\n\n };\n\n\n\n data.user_id = Some(user.id);\n", "file_path": "api/src/services/source.rs", "rank": 37, "score": 33235.441712638785 }, { "content": " Ok(response)\n\n}\n\n\n\n#[actix_web::post(\"\")]\n\nasync fn create(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n mut data: actix_web::web::Json<crate::form::Webhook>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n use std::convert::TryInto;\n\n\n\n let token = identity.token(&elephantry)?;\n\n\n\n let user = match elephantry\n\n .model::<oxfeed_common::user::Model>()\n\n .find_from_token(&token)\n\n {\n\n Some(user) => user,\n\n None => return Ok(actix_web::HttpResponse::Unauthorized().finish()),\n\n };\n", "file_path": "api/src/services/webhook.rs", "rank": 38, "score": 33235.18457637486 }, { "content": ") -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n let sql = include_str!(\"../../sql/search_tags.sql\");\n\n let q = query.q.as_ref().map(|x| format!(\"^{}\", x));\n\n\n\n let pager = count::<String>(&elephantry, sql, &[&token, &q], &query.pagination)?;\n\n let response = actix_web::HttpResponse::Ok().json(pager);\n\n\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::get(\"/sources\")]\n\nasync fn sources(\n\n elephantry: Data<elephantry::Pool>,\n\n query: Query<Request>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let q = query.q.clone().unwrap_or_else(|| \".*\".to_string());\n\n\n\n let mut clause = elephantry::Where::builder()\n", "file_path": "api/src/services/search.rs", "rank": 39, "score": 33234.58097221314 }, { "content": " clause.and_where(\"token = $*\", vec![&token]);\n\n\n\n sql.push_str(&format!(\"where {}\\n\", clause.to_string()));\n\n\n\n if query.q.is_some() {\n\n sql.push_str(\"order by ts_rank_cd(f.document, websearch_to_tsquery($1)) desc\\n\");\n\n }\n\n\n\n let pager =\n\n count::<oxfeed_common::item::Item>(&elephantry, &sql, &clause.params(), &query.pagination)?;\n\n let response = actix_web::HttpResponse::Ok().json(pager);\n\n\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::get(\"/tags\")]\n\nasync fn tags(\n\n elephantry: Data<elephantry::Pool>,\n\n query: Query<Request>,\n\n identity: crate::Identity,\n", "file_path": "api/src/services/search.rs", "rank": 40, "score": 33234.40603917805 }, { "content": " .and_where(\"source.title ~* $*\", vec![&q])\n\n .or_where(\"source.url ~* $*\", vec![&q])\n\n .build();\n\n\n\n if let Some(tag) = &query.tag {\n\n clause.and_where(\"$* = any(source.tags)\", vec![tag]);\n\n }\n\n\n\n super::source::fetch(&elephantry, &identity, &clause, &query.pagination)\n\n}\n\n\n", "file_path": "api/src/services/search.rs", "rank": 41, "score": 33233.55785604089 }, { "content": "const EMPTY_IMG: [u8; 43] = [\n\n 71, 73, 70, 56, 57, 97, 1, 0, 1, 0, 128, 0, 0, 255, 255, 255, 255, 255, 255, 33, 249, 4, 1, 10,\n\n 0, 1, 0, 44, 0, 0, 0, 0, 1, 0, 1, 0, 0, 2, 2, 76, 1, 0, 59,\n\n];\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/icons\").service(icon)\n\n}\n\n\n\n#[actix_web::get(\"/{url:.*}\")]\n\nasync fn icon(url: actix_web::web::Path<String>) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let url = base64::decode(url.into_inner())?;\n\n let icon = String::from_utf8(url)?;\n\n\n\n let body = match crate::cache::get(&icon) {\n\n Ok(body) => body,\n\n Err(_) => EMPTY_IMG.to_vec(),\n\n };\n\n\n\n let mut mime = tree_magic::from_u8(&body);\n", "file_path": "api/src/services/icon.rs", "rank": 42, "score": 33231.18708088325 }, { "content": " return Ok(actix_web::HttpResponse::BadRequest().finish());\n\n }\n\n\n\n let sql = include_str!(\"../../sql/login.sql\");\n\n let token = match elephantry\n\n .query::<uuid::Uuid>(sql, &[&claims[\"email\"], &claims[\"password\"]])?\n\n .try_get(0)\n\n {\n\n Some(token) => token,\n\n None => return Ok(actix_web::HttpResponse::Forbidden().finish()),\n\n };\n\n\n\n let response = actix_web::HttpResponse::Ok().body(&token.to_string());\n\n\n\n Ok(response)\n\n}\n\n\n\n#[actix_web::post(\"/logout\")]\n\nasync fn logout(\n\n elephantry: actix_web::web::Data<elephantry::Pool>,\n\n identity: crate::Identity,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let token = identity.token(&elephantry)?;\n\n let sql = include_str!(\"../../sql/logout.sql\");\n\n elephantry.query_one::<()>(sql, &[&token])?;\n\n\n\n Ok(actix_web::HttpResponse::NoContent().finish())\n\n}\n", "file_path": "api/src/services/auth.rs", "rank": 44, "score": 33229.81613147201 }, { "content": " None => return Ok(actix_web::HttpResponse::Unauthorized().finish()),\n\n };\n\n\n\n data.user_id = Some(user.id);\n\n let webhook_id = Some(path.into_inner());\n\n let pk = elephantry::pk!(webhook_id);\n\n let webhook = elephantry.update_one::<Model>(&pk, &data.into_inner().try_into()?)?;\n\n\n\n let response = match webhook {\n\n Some(webhook) => actix_web::HttpResponse::Ok().json(webhook),\n\n None => actix_web::HttpResponse::NotFound().finish(),\n\n };\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/webhook.rs", "rank": 45, "score": 33229.12058615554 }, { "content": " {\n\n Some(user) => user,\n\n None => return Ok(actix_web::HttpResponse::Unauthorized().finish()),\n\n };\n\n\n\n data.user_id = Some(user.id);\n\n let source_id = Some(path.into_inner());\n\n let pk = elephantry::pk!(source_id);\n\n let source = elephantry.update_one::<Model>(&pk, &data.into_inner().try_into()?)?;\n\n\n\n let response = match source {\n\n Some(source) => actix_web::HttpResponse::Ok().json(source),\n\n None => actix_web::HttpResponse::NotFound().finish(),\n\n };\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/source.rs", "rank": 46, "score": 33228.99671795506 }, { "content": "\n\n ..Default::default()\n\n };\n\n\n\n Ok(entity)\n\n}\n\n\n\n#[actix_web::get(\"\")]\n\nasync fn export(\n\n elephantry: Data<elephantry::Pool>,\n\n) -> oxfeed_common::Result<actix_web::HttpResponse> {\n\n let mut opml = opml::OPML::default();\n\n\n\n let feeds = elephantry.query::<(String, String)>(\"select (title, url) from source\", &[])?;\n\n\n\n for (title, url) in feeds {\n\n opml.add_feed(&title, &url);\n\n }\n\n\n\n let response = actix_web::HttpResponse::Ok()\n", "file_path": "api/src/services/opml.rs", "rank": 48, "score": 33227.348368080384 }, { "content": "use actix_web::web::{Data, Query};\n\n\n\n#[derive(serde::Deserialize)]\n", "file_path": "api/src/services/search.rs", "rank": 49, "score": 33224.75322395621 }, { "content": " if let Err(err) = actor.notify(context) {\n\n log::error!(\"{}\", err);\n\n }\n\n });\n\n }\n\n\n\n fn ping(&self, context: &mut <Self as actix::Actor>::Context) {\n\n if std::time::Instant::now().duration_since(self.hb) > CLIENT_TIMEOUT {\n\n log::warn!(\"Websocket Client heartbeat failed, disconnecting!\");\n\n context.stop();\n\n return;\n\n }\n\n\n\n context.ping(b\"\");\n\n }\n\n\n\n fn notify(&self, context: &mut <Self as actix::Actor>::Context) -> elephantry::Result {\n\n while let Some(notify) = self.elephantry.notifies()? {\n\n if notify.extra() == self.token.to_string() {\n\n context.text(notify.relname());\n", "file_path": "api/src/services/websocket.rs", "rank": 50, "score": 33224.22145527675 }, { "content": " Some(user) => user,\n\n None => return Ok(actix_web::HttpResponse::Unauthorized().finish()),\n\n };\n\n\n\n let opml = opml::OPML::from_str(&xml).unwrap();\n\n\n\n for outline in opml.body.outlines {\n\n save(&elephantry, &outline, &user);\n\n }\n\n\n\n let response = actix_web::HttpResponse::NoContent().finish();\n\n\n\n Ok(response)\n\n}\n\n\n", "file_path": "api/src/services/opml.rs", "rank": 51, "score": 33223.796103999004 }, { "content": " }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl actix::Actor for Websocket {\n\n type Context = ws::WebsocketContext<Self>;\n\n\n\n fn started(&mut self, context: &mut Self::Context) {\n\n match self.elephantry.listen(\"item_new\") {\n\n Ok(_) => (),\n\n Err(err) => log::error!(\"Unable to listen postgresql: {}\", err),\n\n }\n\n\n\n self.hb(context);\n\n }\n\n\n\n fn stopped(&mut self, _: &mut Self::Context) {\n", "file_path": "api/src/services/websocket.rs", "rank": 52, "score": 33223.685162754446 }, { "content": " self.elephantry.unlisten(\"item_new\").ok();\n\n }\n\n}\n\n\n\nimpl actix::StreamHandler<Result<ws::Message, ws::ProtocolError>> for Websocket {\n\n fn handle(&mut self, msg: Result<ws::Message, ws::ProtocolError>, context: &mut Self::Context) {\n\n match msg {\n\n Ok(ws::Message::Ping(msg)) => {\n\n self.hb = std::time::Instant::now();\n\n context.pong(&msg);\n\n }\n\n Ok(ws::Message::Pong(_)) => self.hb = std::time::Instant::now(),\n\n Ok(ws::Message::Close(reason)) => {\n\n context.close(reason);\n\n context.stop();\n\n }\n\n _ => context.stop(),\n\n }\n\n }\n\n}\n", "file_path": "api/src/services/websocket.rs", "rank": 53, "score": 33223.102790240766 }, { "content": " .header(\"Content-Type\", \"text/xml; charset=utf-8\")\n\n .header(\n\n \"Content-Disposition\",\n\n \"attachment; filename=\\\"oxfeed-subscriptions.xml\\\"\",\n\n )\n\n .body(opml.to_string()?);\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/opml.rs", "rank": 54, "score": 33221.02383452111 }, { "content": " if mime == \"text/plain\" {\n\n mime = \"image/svg+xml\".to_string();\n\n }\n\n\n\n let response = actix_web::HttpResponse::Ok()\n\n .header(\"Content-Type\", mime)\n\n .header(\"Cache-Control\", \"public, max-age=604800, immutable\")\n\n .body(body);\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/icon.rs", "rank": 55, "score": 33221.02383452111 }, { "content": "struct Websocket {\n\n elephantry: std::sync::Arc<elephantry::Pool>,\n\n hb: std::time::Instant,\n\n token: uuid::Uuid,\n\n}\n\n\n\nimpl Websocket {\n\n fn new(elephantry: std::sync::Arc<elephantry::Pool>, token: uuid::Uuid) -> Self {\n\n Self {\n\n elephantry,\n\n hb: std::time::Instant::now(),\n\n token,\n\n }\n\n }\n\n\n\n fn hb(&self, context: &mut <Self as actix::Actor>::Context) {\n\n use actix::AsyncContext;\n\n\n\n context.run_interval(HEARTBEAT_INTERVAL, |actor, context| {\n\n actor.ping(context);\n", "file_path": "api/src/services/websocket.rs", "rank": 56, "score": 31895.978073936014 }, { "content": "#[derive(serde::Deserialize)]\n\nstruct Request {\n\n q: Option<String>,\n\n tag: Option<String>,\n\n #[serde(flatten)]\n\n pagination: oxfeed_common::Pagination,\n\n}\n\n\n\npub(crate) fn scope() -> actix_web::Scope {\n\n actix_web::web::scope(\"/search\")\n\n .service(all)\n\n .service(favorites)\n\n .service(unread)\n\n .service(tags)\n\n .service(sources)\n\n}\n\n\n\n#[actix_web::get(\"/all\")]\n\nasync fn all(\n\n elephantry: Data<elephantry::Pool>,\n\n query: Query<Request>,\n", "file_path": "api/src/services/search.rs", "rank": 57, "score": 31895.978073936014 }, { "content": "select unnest(tags) as name, count(*) as count\n\n from source\n\n join \"user\" using (user_id)\n\n where {}\n\n group by name\n\n order by name\n\n {}\n\n \"#,\n\n clause.to_string(),\n\n pagination.to_sql(),\n\n );\n\n\n\n let rows = elephantry.query::<oxfeed_common::Tag>(&query, &params)?;\n\n\n\n let response = actix_web::HttpResponse::Ok().json(rows);\n\n\n\n Ok(response)\n\n}\n", "file_path": "api/src/services/tag.rs", "rank": 58, "score": 27507.368894406856 }, { "content": "create index if not exists fts_item_document on fts.item using gin(document);\n\n\n\ncommit;\n", "file_path": "api/sql/structure.sql", "rank": 59, "score": 24745.390658583903 }, { "content": " let elephantry = elephantry::Pool::new(&database_url)?;\n\n\n\n let update = update::Actor::new(&elephantry);\n\n let actor = actix_web::web::Data::new(update.start());\n\n\n\n actix_web::HttpServer::new(move || {\n\n let cors = actix_cors::Cors::permissive();\n\n\n\n actix_web::App::new()\n\n .wrap(actix_web::middleware::NormalizePath::new(\n\n actix_web::middleware::normalize::TrailingSlash::Trim,\n\n ))\n\n .data(actor.clone())\n\n .data(elephantry.clone())\n\n .wrap(cors)\n\n .service(services::auth::scope())\n\n .service(services::icon::scope())\n\n .service(services::item::scope())\n\n .service(services::opml::scope())\n\n .service(services::search::scope())\n", "file_path": "api/src/main.rs", "rank": 60, "score": 14.550358985309806 }, { "content": "mod errors;\n\npub mod item;\n\npub mod new_user;\n\npub mod source;\n\nmod tag;\n\npub mod user;\n\npub mod webhook;\n\n\n\nmod pagination;\n\n\n\npub use errors::*;\n\npub use pagination::*;\n\npub use tag::*;\n\n\n\n#[derive(Clone, serde::Deserialize, serde::Serialize)]\n\n#[cfg_attr(feature = \"elephantry\", derive(elephantry::Entity))]\n\npub struct Counts {\n\n pub all: i64,\n\n pub favorites: i64,\n\n pub sources: i64,\n\n pub tags: i64,\n\n pub unread: i64,\n\n}\n", "file_path": "common/src/lib.rs", "rank": 61, "score": 12.125192175625289 }, { "content": "use std::collections::HashMap;\n\n\n\npub(crate) struct Location {\n\n router: yew_router::service::RouteService<()>,\n\n}\n\n\n\nimpl Location {\n\n pub fn new() -> Self {\n\n let router = yew_router::service::RouteService::<()>::new();\n\n\n\n Self { router }\n\n }\n\n\n\n pub fn path(&self) -> String {\n\n self.router.get_path()\n\n }\n\n\n\n pub fn set_path(&mut self, path: &str) {\n\n use yew::agent::Dispatched;\n\n\n", "file_path": "front/src/location.rs", "rank": 62, "score": 12.083817738897965 }, { "content": "mod cache;\n\nmod form;\n\nmod identity;\n\nmod services;\n\nmod update;\n\n\n\nuse identity::*;\n\n\n\n#[actix_web::main]\n\nasync fn main() -> oxfeed_common::Result {\n\n #[cfg(debug_assertions)]\n\n dotenv::dotenv().ok();\n\n\n\n env_logger::init();\n\n\n\n let database_url = env(\"DATABASE_URL\")?;\n\n let ip = env(\"LISTEN_IP\")?;\n\n let port = env(\"LISTEN_PORT\")?;\n\n let bind = format!(\"{}:{}\", ip, port);\n\n\n", "file_path": "api/src/main.rs", "rank": 63, "score": 10.978532466678727 }, { "content": " .service(services::source::scope())\n\n .service(services::tag::scope())\n\n .service(services::user::scope())\n\n .service(services::webhook::scope())\n\n .service(services::websocket::scope())\n\n .service(services::scope())\n\n })\n\n .bind(&bind)?\n\n .run()\n\n .await?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "api/src/main.rs", "rank": 64, "score": 10.16402431179055 }, { "content": "#[derive(Clone, Eq, PartialEq, serde::Deserialize, serde::Serialize)]\n\n#[cfg_attr(feature = \"elephantry\", derive(elephantry::Entity))]\n\npub struct Item {\n\n #[cfg_attr(feature = \"elephantry\", elephantry(column = \"item_id\"))]\n\n pub id: uuid::Uuid,\n\n pub link: String,\n\n pub published: chrono::DateTime<chrono::offset::Utc>,\n\n pub title: String,\n\n pub source: String,\n\n pub icon: Option<String>,\n\n pub read: bool,\n\n pub favorite: bool,\n\n pub tags: Vec<String>,\n\n}\n\n\n\nimpl From<&Item> for std::result::Result<std::string::String, anyhow::Error> {\n\n fn from(item: &Item) -> Self {\n\n let json = serde_json::to_string(item)?;\n\n\n\n Ok(json)\n", "file_path": "common/src/item.rs", "rank": 65, "score": 8.929004071934724 }, { "content": "use oxfeed_common::item::Entity as Item;\n\nuse oxfeed_common::item::Model as ItemModel;\n\nuse oxfeed_common::source::Entity as Source;\n\nuse oxfeed_common::source::Model as SourceModel;\n\nuse oxfeed_common::webhook::Entity as Webhook;\n\nuse oxfeed_common::webhook::Model as WebhookModel;\n\nuse rayon::iter::{IntoParallelRefIterator, ParallelIterator};\n\n\n\npub(crate) struct Actor {\n\n elephantry: elephantry::Pool,\n\n}\n\n\n\nimpl Actor {\n\n pub fn new(elephantry: &elephantry::Pool) -> Self {\n\n Self {\n\n elephantry: elephantry.clone(),\n\n }\n\n }\n\n\n\n pub fn start(self) -> actix::Addr<Self> {\n", "file_path": "api/src/update.rs", "rank": 66, "score": 8.690531318993841 }, { "content": " },\n\n \"favorites\" => yew::html! {\n\n <super::Items kind=\"favorites\" filter=filter pagination=self.pagination />\n\n },\n\n \"unread\" => yew::html! {\n\n <super::Items kind=\"unread\" filter=filter pagination=self.pagination />\n\n },\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n crate::change!(kind, pagination);\n\n}\n", "file_path": "front/src/components/search/mod.rs", "rank": 67, "score": 8.683696654797028 }, { "content": "}\n\n\n\n#[cfg(feature = \"elephantry\")]\n\nimpl<'a> Model<'a> {\n\n pub fn all(\n\n &self,\n\n token: &uuid::Uuid,\n\n filter: &elephantry::Where,\n\n pagination: &crate::Pagination,\n\n ) -> elephantry::Result<elephantry::Pager<Entity>> {\n\n let mut clause = filter.clone();\n\n clause.and_where(\"\\\"user\\\".token = $*\", vec![token]);\n\n let params = clause.params();\n\n\n\n let query = format!(\n\n r#\"\n", "file_path": "common/src/source.rs", "rank": 68, "score": 8.143649018330574 }, { "content": " let url = format!(\n\n \"/search/{}?{}&{}\",\n\n what,\n\n filter.to_url_param(),\n\n pagination.to_query()\n\n );\n\n\n\n let kind = match what {\n\n \"all\" | \"unread\" | \"favorites\" => Kind::SearchItems,\n\n \"sources\" => Kind::SearchSources,\n\n \"tags\" => Kind::SearchTags,\n\n _ => {\n\n log::error!(\"Unknow '{}' search type\", what);\n\n unreachable!();\n\n }\n\n };\n\n\n\n self.fetch(kind, http::Method::GET, &url, yew::format::Nothing)\n\n }\n\n\n", "file_path": "front/src/api/mod.rs", "rank": 69, "score": 8.12958571123377 }, { "content": "#[derive(Clone, serde::Deserialize, serde::Serialize)]\n\n#[cfg_attr(feature = \"elephantry\", derive(elephantry::Entity))]\n\n#[cfg_attr(\n\n feature = \"elephantry\",\n\n elephantry(model = \"Model\", structure = \"Structure\", relation = \"public.user\")\n\n)]\n\npub struct Entity {\n\n pub email: String,\n\n pub password: String,\n\n}\n\n\n\nimpl From<&Entity> for std::result::Result<std::string::String, anyhow::Error> {\n\n fn from(entity: &Entity) -> Self {\n\n let json = serde_json::to_string(entity)?;\n\n\n\n Ok(json)\n\n }\n\n}\n", "file_path": "common/src/new_user.rs", "rank": 70, "score": 7.908189891575159 }, { "content": " actix::Supervisor::start(|_| self)\n\n }\n\n\n\n fn run(&self) {\n\n if let Err(error) = Task::run(&self.elephantry) {\n\n log::error!(\"{}\", error);\n\n }\n\n }\n\n}\n\n\n\nimpl actix::Actor for Actor {\n\n type Context = actix::Context<Self>;\n\n\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n use actix::AsyncContext;\n\n\n\n let minutes = crate::env(\"UPDATE_INTERVAL\")\n\n .unwrap_or_else(|_| \"20\".to_string())\n\n .parse()\n\n .unwrap_or(20);\n", "file_path": "api/src/update.rs", "rank": 71, "score": 7.823585704431526 }, { "content": "\n\n fn fetch<B>(&mut self, kind: Kind, method: http::Method, url: &str, body: B)\n\n where\n\n B: Into<Result<String, anyhow::Error>>,\n\n {\n\n let request = match yew::services::fetch::Request::builder()\n\n .method(method)\n\n .uri(&format!(\"{}{}\", env!(\"API_URL\"), url))\n\n .header(\"Content-Type\", \"application/json\")\n\n .header(\"Authorization\", &format!(\"Bearer {}\", Self::token()))\n\n .body(body)\n\n {\n\n Ok(request) => request,\n\n Err(err) => {\n\n Self::error(err.into());\n\n return;\n\n }\n\n };\n\n\n\n let callback = self.link.batch_callback(\n", "file_path": "front/src/api/mod.rs", "rank": 72, "score": 7.7307411048077075 }, { "content": " Vec::new()\n\n }\n\n }\n\n },\n\n );\n\n\n\n match yew::services::FetchService::fetch(request, callback) {\n\n Ok(task) => self.tasks.push(task),\n\n Err(err) => {\n\n Self::error(err.into());\n\n }\n\n };\n\n }\n\n\n\n fn on_response(\n\n kind: Kind,\n\n response: yew::services::fetch::Response<yew::format::Text>,\n\n ) -> oxfeed_common::Result<crate::event::Api> {\n\n if response.status() == http::status::StatusCode::UNAUTHORIZED {\n\n let mut event_bus = crate::event::Bus::dispatcher();\n", "file_path": "front/src/api/mod.rs", "rank": 73, "score": 7.721536446555975 }, { "content": " {\n\n match route {\n\n Route::All => yew::html!{<super::Items kind=\"all\" pagination=pagination />},\n\n Route::Favorites => yew::html!{<super::Items kind=\"favorites\" pagination=pagination />},\n\n Route::Settings => yew::html!{<super::Settings />},\n\n Route::Sources => yew::html!{<super::Sources pagination=pagination />},\n\n Route::Tags => yew::html!{<super::Tags pagination=pagination />},\n\n Route::Unread => yew::html!{<super::Items kind=\"unread\" pagination=pagination />},\n\n Route::Search(kind) => yew::html!{<super::Search kind=kind pagination=pagination />},\n\n Route::NotFound => yew::html!{<super::NotFound />},\n\n Route::Index => unreachable!(),\n\n }\n\n }\n\n </main>\n\n </div>\n\n </div>\n\n </>\n\n }\n\n })\n\n />\n\n }\n\n }\n\n\n\n crate::change!();\n\n}\n", "file_path": "front/src/components/app.rs", "rank": 74, "score": 7.69706967324997 }, { "content": " pub published: Option<chrono::DateTime<chrono::offset::Utc>>,\n\n pub icon: Option<String>,\n\n}\n\n\n\n#[cfg(feature = \"elephantry\")]\n\nimpl<'a> Model<'a> {\n\n pub fn all(\n\n &self,\n\n token: &uuid::Uuid,\n\n filter: &elephantry::Where,\n\n pagination: &crate::Pagination,\n\n ) -> elephantry::Result<elephantry::Pager<Item>> {\n\n let mut clause = filter.clone();\n\n clause.and_where(\"\\\"user\\\".token = $*\", vec![token]);\n\n let params = clause.params();\n\n\n\n let query = format!(\n\n r#\"\n", "file_path": "common/src/item.rs", "rank": 75, "score": 7.609767798143511 }, { "content": " ) {\n\n log::error!(\"{}\", err);\n\n }\n\n });\n\n\n\n elephantry.execute(\"refresh materialized view concurrently fts.item\")?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn fetch(elephantry: &elephantry::Connection, source: &Source) -> oxfeed_common::Result {\n\n log::info!(\"Fetching {}\", source.url);\n\n\n\n let webhooks = elephantry\n\n .find_where::<WebhookModel>(\"webhook_id = any($*)\", &[&source.webhooks], None)?\n\n .into_vec();\n\n\n\n let contents = attohttpc::RequestBuilder::try_new(attohttpc::Method::GET, &source.url)?\n\n .send()?\n\n .text()?;\n", "file_path": "api/src/update.rs", "rank": 76, "score": 7.515139326111312 }, { "content": " }\n\n\n\n pub fn items_tag(&mut self, id: &uuid::Uuid, key: &str, value: bool) {\n\n let url = format!(\"/items/{}\", id);\n\n\n\n let json = serde_json::json!({\n\n key: value,\n\n });\n\n\n\n self.fetch(\n\n super::Kind::ItemPatch,\n\n http::Method::PATCH,\n\n &url,\n\n yew::format::Json(&json),\n\n )\n\n }\n\n}\n", "file_path": "front/src/api/items.rs", "rank": 77, "score": 7.433170421889393 }, { "content": "pub(crate) fn get(url: &str) -> oxfeed_common::Result<Vec<u8>> {\n\n let path = path(url);\n\n\n\n let body = if path.exists() {\n\n use std::io::Read;\n\n\n\n let mut content = Vec::new();\n\n let mut file = std::fs::File::open(&path)?;\n\n file.read_to_end(&mut content)?;\n\n\n\n content\n\n } else {\n\n use std::io::Write;\n\n\n\n let content = attohttpc::RequestBuilder::try_new(attohttpc::Method::GET, url)?\n\n .send()?\n\n .bytes()?;\n\n std::fs::create_dir_all(path.parent().unwrap())?;\n\n let mut file = std::fs::File::create(&path)?;\n\n file.write_all(&content)?;\n\n\n\n content\n\n };\n\n\n\n Ok(body)\n\n}\n\n\n", "file_path": "api/src/cache.rs", "rank": 78, "score": 7.2630022317553316 }, { "content": " move |response: yew::services::fetch::Response<yew::format::Text>| {\n\n use std::convert::TryFrom;\n\n\n\n let event = match Self::on_response(kind, response) {\n\n Ok(event) => event,\n\n Err(err) => {\n\n if err.to_string() != \"AbortError: The operation was aborted. \" {\n\n Self::error(err);\n\n }\n\n return Vec::new();\n\n }\n\n };\n\n\n\n let mut event_bus = crate::event::Bus::dispatcher();\n\n event_bus.send(crate::event::Event::Api(event.clone()));\n\n\n\n match <C as yew::Component>::Message::try_from(event) {\n\n Ok(message) => vec![message],\n\n Err(_) => {\n\n log::error!(\"fetch error\");\n", "file_path": "front/src/api/mod.rs", "rank": 79, "score": 7.242342054401506 }, { "content": "impl<C> super::Api<C>\n\nwhere\n\n C: yew::Component,\n\n <C as yew::Component>::Message: From<crate::event::Api>,\n\n{\n\n pub fn sources_all(&mut self, pagination: &oxfeed_common::Pagination) {\n\n let url = format!(\"/sources?{}\", pagination.to_query());\n\n\n\n self.fetch(\n\n super::Kind::Sources,\n\n http::Method::GET,\n\n &url,\n\n yew::format::Nothing,\n\n )\n\n }\n\n\n\n pub fn sources_create(&mut self, source: &oxfeed_common::source::Entity) {\n\n self.fetch(\n\n super::Kind::SourceCreate,\n\n http::Method::POST,\n", "file_path": "front/src/api/sources.rs", "rank": 80, "score": 7.020262105217939 }, { "content": " join \"user\" using(user_id)\n\n where \"user\".token = $1\n\n ),\n\n count_unread as (\n\n select count(*) from user_item where not read\n\n ),\n\n count_all as (\n\n select count(*) from user_item\n\n ),\n\n count_favorites as (\n\n select count(*) from user_item where favorite\n\n )\n", "file_path": "api/sql/counts.sql", "rank": 81, "score": 6.979084254385413 }, { "content": "#[derive(Clone, Default, Eq, PartialEq, serde::Deserialize, serde::Serialize)]\n\n#[cfg_attr(feature = \"elephantry\", derive(elephantry::Entity))]\n\n#[cfg_attr(\n\n feature = \"elephantry\",\n\n elephantry(model = \"Model\", structure = \"Structure\", relation = \"public.webhook\")\n\n)]\n\npub struct Entity {\n\n #[cfg_attr(feature = \"elephantry\", elephantry(pk, column = \"webhook_id\"))]\n\n pub id: Option<uuid::Uuid>,\n\n pub user_id: Option<uuid::Uuid>,\n\n pub name: String,\n\n pub url: String,\n\n pub last_error: Option<String>,\n\n pub mark_read: bool,\n\n}\n\n\n\nimpl From<&Entity> for std::result::Result<std::string::String, anyhow::Error> {\n\n fn from(entity: &Entity) -> Self {\n\n let json = serde_json::to_string(entity)?;\n\n\n", "file_path": "common/src/webhook.rs", "rank": 82, "score": 6.887190444169845 }, { "content": " link,\n\n tasks: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn counts(&mut self) {\n\n self.fetch(\n\n Kind::Counts,\n\n http::Method::GET,\n\n \"/counts\",\n\n yew::format::Nothing,\n\n )\n\n }\n\n\n\n pub fn search(\n\n &mut self,\n\n what: &str,\n\n filter: &crate::Filter,\n\n pagination: &oxfeed_common::Pagination,\n\n ) {\n", "file_path": "front/src/api/mod.rs", "rank": 83, "score": 6.845510179819728 }, { "content": "impl<C> super::Api<C>\n\nwhere\n\n C: yew::Component,\n\n <C as yew::Component>::Message: From<crate::event::Api>,\n\n{\n\n pub fn tags_all(&mut self, pagination: &oxfeed_common::Pagination) {\n\n let url = format!(\"/tags?{}\", pagination.to_query());\n\n\n\n self.fetch(\n\n super::Kind::Tags,\n\n http::Method::GET,\n\n &url,\n\n yew::format::Nothing,\n\n )\n\n }\n\n}\n", "file_path": "front/src/api/tag.rs", "rank": 84, "score": 6.770632687954956 }, { "content": " }\n\n}\n\n\n\n#[derive(serde::Serialize)]\n\n#[cfg_attr(feature = \"elephantry\", derive(elephantry::Entity))]\n\n#[cfg_attr(\n\n feature = \"elephantry\",\n\n elephantry(model = \"Model\", structure = \"Structure\", relation = \"public.item\")\n\n)]\n\npub struct Entity {\n\n #[cfg_attr(feature = \"elephantry\", elephantry(pk, column = \"item_id\"))]\n\n pub id: Option<uuid::Uuid>,\n\n pub source_id: uuid::Uuid,\n\n #[cfg_attr(feature = \"elephantry\", elephantry(column = \"id\"))]\n\n pub feed_id: String,\n\n pub link: String,\n\n pub title: String,\n\n pub content: Option<String>,\n\n pub read: bool,\n\n pub favorite: bool,\n", "file_path": "common/src/item.rs", "rank": 85, "score": 6.581501976651557 }, { "content": "mod bar;\n\n\n\npub(crate) enum Message {\n\n Event(crate::event::Event),\n\n}\n\n\n\npub(crate) use bar::Component as Bar;\n\n\n\n#[derive(Clone, yew::Properties)]\n\npub(crate) struct Properties {\n\n pub kind: String,\n\n pub pagination: oxfeed_common::Pagination,\n\n}\n\n\n\npub(crate) struct Component {\n\n kind: String,\n\n pagination: oxfeed_common::Pagination,\n\n filter: crate::Filter,\n\n _producer: Box<dyn yew::agent::Bridge<crate::event::Bus>>,\n\n}\n", "file_path": "front/src/components/search/mod.rs", "rank": 86, "score": 6.512840231306027 }, { "content": " #[prop_or_default]\n\n pub filter: crate::Filter,\n\n pub kind: String,\n\n pub pagination: oxfeed_common::Pagination,\n\n}\n\n\n\npub(crate) struct Component {\n\n api: crate::Api<Self>,\n\n kind: String,\n\n filter: crate::Filter,\n\n link: yew::ComponentLink<Self>,\n\n pager: Option<crate::Pager<oxfeed_common::item::Item>>,\n\n pagination: oxfeed_common::Pagination,\n\n _producer: Box<dyn yew::agent::Bridge<crate::event::Bus>>,\n\n}\n\n\n\nimpl Component {\n\n fn fetch(&mut self) {\n\n if self.filter.is_empty() {\n\n self.api.items_all(&self.kind, &self.pagination);\n", "file_path": "front/src/components/items.rs", "rank": 87, "score": 6.396483858930058 }, { "content": "impl<C> super::Api<C>\n\nwhere\n\n C: yew::Component,\n\n <C as yew::Component>::Message: From<crate::event::Api>,\n\n{\n\n pub fn items_all(&mut self, kind: &str, pagination: &oxfeed_common::Pagination) {\n\n let kind = if kind == \"all\" {\n\n String::new()\n\n } else {\n\n kind.to_string()\n\n };\n\n\n\n let url = format!(\"/items/{}?{}\", kind, pagination.to_query());\n\n\n\n self.fetch(\n\n super::Kind::Items,\n\n http::Method::GET,\n\n &url,\n\n yew::format::Nothing,\n\n )\n", "file_path": "front/src/api/items.rs", "rank": 88, "score": 6.37310688609391 }, { "content": " Ok(json)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"elephantry\")]\n\nimpl<'a> Model<'a> {\n\n pub fn delete(\n\n &self,\n\n token: &uuid::Uuid,\n\n webhook_id: &uuid::Uuid,\n\n ) -> elephantry::Result<Option<Entity>> {\n\n let sql = include_str!(\"../sql/webhook-delete.sql\");\n\n self.connection\n\n .query::<Entity>(sql, &[webhook_id, token])\n\n .map(|x| x.try_get(0))\n\n }\n\n\n\n pub fn all(&self, token: &uuid::Uuid) -> elephantry::Result<elephantry::Rows<Entity>> {\n\n let sql = include_str!(\"../sql/webhooks.sql\");\n\n self.connection.query::<Entity>(sql, &[token])\n\n }\n\n}\n", "file_path": "common/src/webhook.rs", "rank": 89, "score": 6.29381113912522 }, { "content": " \"/auth/login\",\n\n yew::format::Json(&token),\n\n )\n\n }\n\n\n\n pub fn auth_logout(&mut self) {\n\n self.fetch(\n\n super::Kind::AuthLogout,\n\n http::Method::POST,\n\n \"/auth/logout\",\n\n yew::format::Nothing,\n\n )\n\n }\n\n}\n", "file_path": "front/src/api/auth.rs", "rank": 90, "score": 6.289299538279362 }, { "content": "\n\nimpl std::ops::Deref for Location {\n\n type Target = yew_router::service::RouteService<()>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.router\n\n }\n\n}\n\n\n\nimpl std::ops::DerefMut for Location {\n\n fn deref_mut(&mut self) -> &mut Self::Target {\n\n &mut self.router\n\n }\n\n}\n\n\n\npub(crate) fn base_url() -> String {\n\n let location = Location::new();\n\n\n\n location.path()\n\n}\n", "file_path": "front/src/location.rs", "rank": 91, "score": 6.28588918758946 }, { "content": " };\n\n\n\n let mut item = Item {\n\n id: None,\n\n feed_id: entry.id,\n\n icon: feed_icon.clone().or_else(|| Self::icon(&link)),\n\n content,\n\n title,\n\n published: entry.published,\n\n read: false,\n\n source_id: source.id.unwrap(),\n\n link,\n\n favorite: false,\n\n };\n\n\n\n item.read = Self::call_webhooks(elephantry, &webhooks, &item);\n\n elephantry.insert_one::<ItemModel>(&item)?;\n\n }\n\n }\n\n\n", "file_path": "api/src/update.rs", "rank": 92, "score": 5.878589912574109 }, { "content": "}\n\n\n\nimpl yew::Component for Component {\n\n type Properties = Properties;\n\n type Message = Message;\n\n\n\n fn create(props: Self::Properties, link: yew::ComponentLink<Self>) -> Self {\n\n use yew::agent::Bridged;\n\n\n\n let callback = link.callback(Self::Message::Event);\n\n\n\n let component = Self {\n\n api: crate::Api::new(link.clone()),\n\n filter: props.filter,\n\n link,\n\n scene: Scene::View,\n\n pager: None,\n\n pagination: props.pagination,\n\n _producer: crate::event::Bus::bridge(callback),\n\n };\n", "file_path": "front/src/components/sources.rs", "rank": 93, "score": 5.553263367023819 }, { "content": "\n\nimpl yew::Component for Component {\n\n type Message = Message;\n\n type Properties = Properties;\n\n\n\n fn create(props: Self::Properties, link: yew::ComponentLink<Self>) -> Self {\n\n use yew::agent::Bridged;\n\n\n\n let callback = link.callback(Self::Message::Event);\n\n\n\n Self {\n\n pagination: props.pagination,\n\n kind: props.kind,\n\n filter: crate::Filter::new(),\n\n _producer: crate::event::Bus::bridge(callback),\n\n }\n\n }\n\n\n\n fn update(&mut self, msg: Self::Message) -> yew::ShouldRender {\n\n match msg {\n", "file_path": "front/src/components/search/mod.rs", "rank": 94, "score": 5.553263367023819 }, { "content": " WebhookDelete,\n\n WebhookUpdate,\n\n}\n\n\n\npub(crate) struct Api<C>\n\nwhere\n\n C: yew::Component,\n\n <C as yew::Component>::Message: std::convert::TryFrom<crate::event::Api>,\n\n{\n\n link: yew::ComponentLink<C>,\n\n tasks: Vec<yew::services::fetch::FetchTask>,\n\n}\n\n\n\nimpl<C> Api<C>\n\nwhere\n\n C: yew::Component,\n\n <C as yew::Component>::Message: From<crate::event::Api>,\n\n{\n\n pub fn new(link: yew::ComponentLink<C>) -> Self {\n\n Self {\n", "file_path": "front/src/api/mod.rs", "rank": 95, "score": 5.52100157186543 }, { "content": " } else {\n\n self.api.search(&self.kind, &self.filter, &self.pagination);\n\n }\n\n }\n\n}\n\n\n\nimpl yew::Component for Component {\n\n type Message = Message;\n\n type Properties = Properties;\n\n\n\n fn create(props: Self::Properties, link: yew::ComponentLink<Self>) -> Self {\n\n use yew::Bridged;\n\n\n\n let callback = link.callback(Self::Message::Event);\n\n\n\n let component = Self {\n\n api: crate::Api::new(link.clone()),\n\n kind: props.kind,\n\n filter: props.filter,\n\n link,\n", "file_path": "front/src/components/items.rs", "rank": 96, "score": 4.89128920834983 }, { "content": "mod switch;\n\nmod tag;\n\nmod tags;\n\nmod webhook;\n\n\n\npub(crate) use actions::Component as Actions;\n\npub(crate) use alerts::Component as Alerts;\n\npub(crate) use app::Component as App;\n\npub(crate) use empty::Component as Empty;\n\npub(crate) use error::Component as Error;\n\npub(crate) use header::Component as Header;\n\npub(crate) use item::Component as Item;\n\npub(crate) use items::Component as Items;\n\npub(crate) use list::Component as List;\n\npub(crate) use login::Component as Login;\n\npub(crate) use not_found::Component as NotFound;\n\npub(crate) use popover::Component as Popover;\n\npub(crate) use search::Component as Search;\n\npub(crate) use settings::Component as Settings;\n\npub(crate) use sidebar::Component as Sidebar;\n\npub(crate) use source::Component as Source;\n\npub(crate) use sources::Component as Sources;\n\npub(crate) use svg::Component as Svg;\n\npub(crate) use switch::Component as Switch;\n\npub(crate) use tag::Component as Tag;\n\npub(crate) use tags::Component as Tags;\n\npub(crate) use webhook::Component as Webhook;\n", "file_path": "front/src/components/mod.rs", "rank": 97, "score": 4.839382402870676 }, { "content": "impl<C> super::Api<C>\n\nwhere\n\n C: yew::Component,\n\n <C as yew::Component>::Message: From<crate::event::Api>,\n\n{\n\n pub fn auth_login(&mut self, email: &str, password: &str, remember_me: bool) {\n\n use hmac::NewMac;\n\n use jwt::SignWithKey;\n\n\n\n let key: hmac::Hmac<sha2::Sha256> =\n\n hmac::Hmac::new_from_slice(env!(\"SECRET\").as_bytes()).unwrap();\n\n let mut claims = std::collections::BTreeMap::new();\n\n claims.insert(\"email\", email);\n\n claims.insert(\"password\", password);\n\n\n\n let token = claims.sign_with_key(&key).unwrap();\n\n\n\n self.fetch(\n\n super::Kind::AuthLogin(remember_me),\n\n http::Method::POST,\n", "file_path": "front/src/api/auth.rs", "rank": 98, "score": 4.784856894525349 }, { "content": " <div class=\"card\">\n\n <div class=\"card-header\">\n\n { \"Webhooks\" }\n\n <span class=\"help\">\n\n <crate::components::Svg icon=\"question-circle\" size=16 />\n\n <crate::components::Popover\n\n title=\"What is a webhook?\".to_string()\n\n text=\"\n\n A webhook is an URL called when a new item is fetched.<br />\n\n This URL is called via POST method and the new item will be pass as json body.\n\n \"\n\n position=\"end\"\n\n />\n\n </span>\n\n </div>\n\n <div class=\"card-body\">\n\n <webhooks::Component />\n\n </div>\n\n </div>\n\n <div class=\"card\">\n", "file_path": "front/src/components/settings/mod.rs", "rank": 99, "score": 4.702674671301235 } ]
Rust
src/types/metadata.rs
Techcable/steven
2e99712cc8b467a236a8be57e6e43c888ba602e9
use std::collections::HashMap; use std::marker::PhantomData; use std::io; use std::fmt; use protocol; use protocol::Serializable; use format; use item; use shared::Position; pub struct MetadataKey<T: MetaValue> { index: i32, ty: PhantomData<T>, } impl <T: MetaValue> MetadataKey<T> { #[allow(dead_code)] const fn new(index: i32) -> MetadataKey<T> { MetadataKey { index: index, ty: PhantomData, } } } pub struct Metadata { map: HashMap<i32, Value>, } impl Metadata { pub fn new() -> Metadata { Metadata { map: HashMap::new() } } pub fn get<T: MetaValue>(&self, key: &MetadataKey<T>) -> Option<&T> { self.map.get(&key.index).map(T::unwrap) } pub fn put<T: MetaValue>(&mut self, key: &MetadataKey<T>, val: T) { self.map.insert(key.index, val.wrap()); } fn put_raw<T: MetaValue>(&mut self, index: i32, val: T) { self.map.insert(index, val.wrap()); } } impl Serializable for Metadata { fn read_from<R: io::Read>(buf: &mut R) -> Result<Self, protocol::Error> { let mut m = Metadata::new(); loop { let index = try!(u8::read_from(buf)) as i32; if index == 0xFF { break; } let ty = try!(u8::read_from(buf)); match ty { 0 => m.put_raw(index, try!(i8::read_from(buf))), 1 => m.put_raw(index, try!(protocol::VarInt::read_from(buf)).0), 2 => m.put_raw(index, try!(f32::read_from(buf))), 3 => m.put_raw(index, try!(String::read_from(buf))), 4 => m.put_raw(index, try!(format::Component::read_from(buf))), 5 => m.put_raw(index, try!(Option::<item::Stack>::read_from(buf))), 6 => m.put_raw(index, try!(bool::read_from(buf))), 7 => m.put_raw(index, [try!(f32::read_from(buf)), try!(f32::read_from(buf)), try!(f32::read_from(buf))]), 8 => m.put_raw(index, try!(Position::read_from(buf))), 9 => { if try!(bool::read_from(buf)) { m.put_raw(index, try!(Option::<Position>::read_from(buf))); } else { m.put_raw::<Option<Position>>(index, None); } } 10 => m.put_raw(index, try!(protocol::VarInt::read_from(buf))), 11 => { if try!(bool::read_from(buf)) { m.put_raw(index, try!(Option::<protocol::UUID>::read_from(buf))); } else { m.put_raw::<Option<protocol::UUID>>(index, None); } } 12 => m.put_raw(index, try!(protocol::VarInt::read_from(buf)).0 as u16), _ => return Err(protocol::Error::Err("unknown metadata type".to_owned())), } } Ok(m) } fn write_to<W: io::Write>(&self, buf: &mut W) -> Result<(), protocol::Error> { for (k, v) in &self.map { try!((*k as u8).write_to(buf)); match *v { Value::Byte(ref val) => { try!(u8::write_to(&0, buf)); try!(val.write_to(buf)); } Value::Int(ref val) => { try!(u8::write_to(&1, buf)); try!(protocol::VarInt(*val).write_to(buf)); } Value::Float(ref val) => { try!(u8::write_to(&2, buf)); try!(val.write_to(buf)); } Value::String(ref val) => { try!(u8::write_to(&3, buf)); try!(val.write_to(buf)); } Value::FormatComponent(ref val) => { try!(u8::write_to(&4, buf)); try!(val.write_to(buf)); } Value::OptionalItemStack(ref val) => { try!(u8::write_to(&5, buf)); try!(val.write_to(buf)); } Value::Bool(ref val) => { try!(u8::write_to(&6, buf)); try!(val.write_to(buf)); } Value::Vector(ref val) => { try!(u8::write_to(&7, buf)); try!(val[0].write_to(buf)); try!(val[1].write_to(buf)); try!(val[2].write_to(buf)); } Value::Position(ref val) => { try!(u8::write_to(&8, buf)); try!(val.write_to(buf)); } Value::OptionalPosition(ref val) => { try!(u8::write_to(&9, buf)); try!(val.is_some().write_to(buf)); try!(val.write_to(buf)); } Value::Direction(ref val) => { try!(u8::write_to(&10, buf)); try!(val.write_to(buf)); } Value::OptionalUUID(ref val) => { try!(u8::write_to(&11, buf)); try!(val.is_some().write_to(buf)); try!(val.write_to(buf)); } Value::Block(ref val) => { try!(u8::write_to(&11, buf)); try!(protocol::VarInt(*val as i32).write_to(buf)); } } } try!(u8::write_to(&0xFF, buf)); Ok(()) } } impl fmt::Debug for Metadata { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "Metadata[ ")); for (k, v) in &self.map { try!(write!(f, "{:?}={:?}, ", k, v)); } write!(f, "]") } } impl Default for Metadata { fn default() -> Metadata { Metadata::new() } } #[derive(Debug)] pub enum Value { Byte(i8), Int(i32), Float(f32), String(String), FormatComponent(format::Component), OptionalItemStack(Option<item::Stack>), Bool(bool), Vector([f32; 3]), Position(Position), OptionalPosition(Option<Position>), Direction(protocol::VarInt), OptionalUUID(Option<protocol::UUID>), Block(u16), } pub trait MetaValue { fn unwrap(&Value) -> &Self; fn wrap(self) -> Value; } impl MetaValue for i8 { fn unwrap(value: &Value) -> &Self { match *value { Value::Byte(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Byte(self) } } impl MetaValue for i32 { fn unwrap(value: &Value) -> &Self { match *value { Value::Int(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Int(self) } } impl MetaValue for f32 { fn unwrap(value: &Value) -> &Self { match *value { Value::Float(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Float(self) } } impl MetaValue for String { fn unwrap(value: &Value) -> &Self { match *value { Value::String(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::String(self) } } impl MetaValue for format::Component { fn unwrap(value: &Value) -> &Self { match *value { Value::FormatComponent(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::FormatComponent(self) } } impl MetaValue for Option<item::Stack> { fn unwrap(value: &Value) -> &Self { match *value { Value::OptionalItemStack(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::OptionalItemStack(self) } } impl MetaValue for bool { fn unwrap(value: &Value) -> &Self { match *value { Value::Bool(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Bool(self) } } impl MetaValue for [f32; 3] { fn unwrap(value: &Value) -> &Self { match *value { Value::Vector(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Vector(self) } } impl MetaValue for Position { fn unwrap(value: &Value) -> &Self { match *value { Value::Position(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Position(self) } } impl MetaValue for Option<Position> { fn unwrap(value: &Value) -> &Self { match *value { Value::OptionalPosition(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::OptionalPosition(self) } } impl MetaValue for protocol::VarInt { fn unwrap(value: &Value) -> &Self { match *value { Value::Direction(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Direction(self) } } impl MetaValue for Option<protocol::UUID> { fn unwrap(value: &Value) -> &Self { match *value { Value::OptionalUUID(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::OptionalUUID(self) } } impl MetaValue for u16 { fn unwrap(value: &Value) -> &Self { match *value { Value::Block(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Block(self) } } #[cfg(test)] mod test { use super::*; use std::marker::PhantomData; const TEST: MetadataKey<String> = MetadataKey { index: 0, ty: PhantomData, }; #[test] fn basic() { let mut m = Metadata::new(); m.put(&TEST, "Hello world".to_owned()); match m.get(&TEST) { Some(val) => { assert!(val == "Hello world"); } None => panic!("failed"), } } }
use std::collections::HashMap; use std::marker::PhantomData; use std::io; use std::fmt; use protocol; use protocol::Serializable; use format; use item; use shared::Position; pub struct MetadataKey<T: MetaValue> { index: i32, ty: PhantomData<T>, } impl <T: MetaValue> MetadataKey<T> { #[allow(dead_code)] const fn new(index: i32) -> MetadataKey<T> { MetadataKey { index: index, ty: PhantomData, } } } pub struct Metadata { map: HashMap<i32, Value>, } impl Metadata { pub fn new() -> Metadata { Metadata { map: HashMap::new() } } pub fn get<T: MetaValue>(&self, key: &MetadataKey<T>) -> Option<&T> { self.map.get(&key.index).map(T::unwrap) } pub fn put<T: MetaValue>(&mut self, key: &MetadataKey<T>, val: T) { self.map.insert(key.index, val.wrap()); } fn put_raw<T: MetaValue>(&mut self, index: i32, val: T) { self.map.insert(index, val.wrap()); } } impl Serializable for Metadata { fn read_from<R: io::Read>(buf: &mut R) -> Result<Self, protocol::Error> { let mut m = Metadata::new(); loop { let index = try!(u8::read_from(buf)) as i32; if index == 0xFF { break; } let ty = try!(u8::read_from(buf)); match ty { 0 => m.put_raw(index, try!(i8::read_from(buf))), 1 => m.put_raw(index, try!(protocol::VarInt::read_from(buf)).0), 2 => m.put_raw(index, try!(f32::read_from(buf))), 3 => m.put_raw(index, try!(String::read_from(buf))), 4 => m.put_raw(index, try!(format::Component::read_from(buf))), 5 => m.put_raw(index, try!(Option::<item::Stack>::read_from(buf))), 6 => m.put_raw(index, try!(bool::read_from(buf))), 7 => m.put_raw(index, [try!(f32::read_from(buf)), try!(f32::read_from(buf)), try!(f32::read_from(buf))]), 8 => m.put_raw(index, try!(Position::read_from(buf))), 9 => { if try!(bool::read_from(buf)) { m.put_raw(index, try!(Option::<Position>::read_from(buf))); } else { m.put_raw::<Option<Position>>(index, None); } } 10 => m.put_raw(index, try!(protocol::VarInt::read_from(buf))), 11 => { if try!(bool::read_from(buf)) { m.put_raw(index, try!(Option::<protocol::UUID>::read_from(buf))); } else { m.put_raw::<Option<protocol::UUID>>(index, None); } } 12 => m.put_raw(index, try!(protocol::VarInt::read_from(buf)).0 as u16), _ => return Err(protocol::Error::Err("unknown metadata type".to_owned())), } } Ok(m) } fn write_to<W: io::Write>(&self, buf: &mut W) -> Result<(), protocol::Error> { for (k, v) in &self.map { try!((*k as u8).write_to(buf)); match *v { Value::Byte(ref val) => { try!(u8::write_to(&0, buf)); try!(val.write_to(buf)); } Value::Int(ref val) => { try!(u8::write_to(&1, buf)); try!(protocol::VarInt(*val).write_to(buf)); } Value::Float(ref val) => { try!(u8::write_to(&2, buf)); try!(val.write_to(buf)); } Value::String(ref val) => { try!(u8::write_to(&3, buf)); try!(val.write_to(buf)); } Value::FormatComponent(ref val) => { try!(u8::write_to(&4, buf)); try!(val.write_to(buf)); } Value::OptionalItemStack(ref val) => { try!(u8::write_to(&5, buf)); try!(val.write_to(buf)); } Value::Bool(ref val) => { try!(u8::write_to(&6, buf)); try!(val.write_to(buf)); } Value::Vector(ref val) => { try!(u8::write_to(&7, buf)); try!(val[0].write_to(buf)); try!(val[1].write_to(buf)); try!(val[2].write_to(buf)); } Value::Position(ref val) => { try!(u8::write_to(&8, buf)); try!(val.write_to(buf)); } Value::OptionalPosition(ref val) => { try!(u8::write_to(&9, buf)); try!(val.is_some().write_to(buf)); try!(val.write_to(buf)); } Value::Direction(ref val) => { try!(u8::write_to(&10, buf)); try!(val.write_to(buf)); } Value::OptionalUUID(ref val) => { try!(u8::write_to(&11, buf)); try!(val.is_some().write_to(buf)); try!(val.write_to(buf)); } Value::Block(ref val) => { try!(u8::write_to(&11, buf)); try!(protocol::VarInt(*val as i32).write_to(buf)); } } } try!(u8::write_to(&0xFF, buf)); Ok(()) } } impl fmt::Debug for Metadata { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "Metadata[ ")); for (k, v) in &self.map { try!(write!(f, "{:?}={:?}, ", k, v)); } write!(f, "]") } } impl Default for Metadata { fn default() -> Metadata { Metadata::new() } } #[derive(Debug)] pub enum Value { Byte(i8), Int(i32), Float(f32), String(String), FormatComponent(format::Component), OptionalItemStack(Option<item::Stack>), Bool(bool), Vector([f32; 3]), Position(Position), OptionalPosition(Option<Position>), Direction(protocol::VarInt), OptionalUUID(Option<protocol::UUID>), Block(u16), } pub trait MetaValue { fn unwrap(&Value) -> &Self; fn wrap(self) -> Value; } impl MetaValue for i8 { fn unwrap(value: &Value) -> &Self { match *value { Value::Byte(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Byte(self) } } impl MetaValue for i32 { fn unwrap(value: &Value) -> &Self { match *value { Value::Int(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Int(self) } } impl MetaValue for f32 { fn unwrap(value: &Value) -> &Self { match *value { Value::Float(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Float(self) } } impl MetaValue for String { fn unwrap(value: &Value) -> &Self { match *value { Value::String(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::String(self) } } impl MetaValue for format::Component { fn unwrap(value: &Value) -> &Self { match *value { Value::FormatComponent(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::FormatComponent(self) } } impl MetaValue for Option<item::Stack> { fn unwrap(value: &Value) -> &Self { match *value { Value::OptionalItemStack(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::OptionalItemStack(self) } } impl MetaValue for bool { fn unwrap(value: &Value) -> &Self { match *value { Value::Bool(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Bool(self) } } impl MetaValue for [f32; 3] { fn unwrap(value: &Value) -> &Self { match *value { Value::Vector(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Vector(self) } } impl MetaValue for Position { fn unwrap(value: &Value) -> &Self { match *value { Value::Position(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Position(self) } } impl MetaValue for Option<Position> { fn unwrap(value: &Value) -> &Self { match *value { Value::OptionalPosition(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::OptionalPosition(self) } } impl MetaValue for protocol::VarInt { fn unwrap(value: &Value) -> &Self { match *value { Value::Direction(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Direction(self) } } impl MetaValue for Option<protocol::UUID> { fn unwrap(value: &Value) -> &Sel
fn wrap(self) -> Value { Value::OptionalUUID(self) } } impl MetaValue for u16 { fn unwrap(value: &Value) -> &Self { match *value { Value::Block(ref val) => val, _ => panic!("incorrect key"), } } fn wrap(self) -> Value { Value::Block(self) } } #[cfg(test)] mod test { use super::*; use std::marker::PhantomData; const TEST: MetadataKey<String> = MetadataKey { index: 0, ty: PhantomData, }; #[test] fn basic() { let mut m = Metadata::new(); m.put(&TEST, "Hello world".to_owned()); match m.get(&TEST) { Some(val) => { assert!(val == "Hello world"); } None => panic!("failed"), } } }
f { match *value { Value::OptionalUUID(ref val) => val, _ => panic!("incorrect key"), } }
function_block-function_prefixed
[ { "content": "pub fn read_string<R: io::Read>(buf: &mut R) -> Result<String, protocol::Error> {\n\n let len: i16 = try!(buf.read_i16::<BigEndian>());\n\n let mut ret = String::new();\n\n try!(buf.take(len as u64).read_to_string(&mut ret));\n\n Result::Ok(ret)\n\n}\n", "file_path": "src/nbt/mod.rs", "rank": 0, "score": 401517.3859874158 }, { "content": "pub fn write_string<W: io::Write>(buf: &mut W, s: &str) -> Result<(), protocol::Error> {\n\n let data = s.as_bytes();\n\n try!((data.len() as i16).write_to(buf));\n\n buf.write_all(data).map_err(|v| v.into())\n\n}\n\n\n", "file_path": "src/nbt/mod.rs", "rank": 1, "score": 364695.6621202596 }, { "content": "fn can_connect_sides<F: Fn(Block) -> bool, W: WorldAccess>(world: &W, pos: Position, f: &F) -> (bool, bool, bool, bool) {\n\n (can_connect(world, pos.shift(Direction::North), f),\n\n can_connect(world, pos.shift(Direction::South), f),\n\n can_connect(world, pos.shift(Direction::West), f),\n\n can_connect(world, pos.shift(Direction::East), f))\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 2, "score": 288419.93528942927 }, { "content": "pub fn render_liquid<W: Write>(textures: Arc<RwLock<render::TextureManager>>,lava: bool, snapshot: &world::Snapshot, x: i32, y: i32, z: i32, buf: &mut W) -> usize {\n\n let get_liquid = if lava {\n\n get_lava_level\n\n } else {\n\n get_water_level\n\n };\n\n\n\n let mut count = 0;\n\n\n\n let (tl, tr, bl, br) = if get_liquid(snapshot, x, y + 1, z).is_some() {\n\n (8, 8, 8, 8)\n\n } else {\n\n (\n\n average_liquid_level(get_liquid, snapshot, x, y, z),\n\n average_liquid_level(get_liquid, snapshot, x+1, y, z),\n\n average_liquid_level(get_liquid, snapshot, x, y, z+1),\n\n average_liquid_level(get_liquid, snapshot, x+1, y, z+1)\n\n )\n\n };\n\n\n", "file_path": "src/model/liquid.rs", "rank": 3, "score": 285761.92068385554 }, { "content": "fn can_connect<F: Fn(Block) -> bool, W: WorldAccess>(world: &W, pos: Position, f: &F) -> bool {\n\n let block = world.get_block(pos);\n\n f(block) || (block.get_material().renderable && block.get_material().should_cull_against)\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 4, "score": 283109.5381932041 }, { "content": "pub trait Lengthable : Serializable + Copy + Default {\n\n fn into(self) -> usize;\n\n fn from(usize) -> Self;\n\n}\n\n\n\npub struct LenPrefixed<L: Lengthable, V> {\n\n len: L,\n\n pub data: Vec<V>,\n\n}\n\n\n\nimpl <L: Lengthable, V: Default> LenPrefixed<L, V> {\n\n pub fn new(data: Vec<V>) -> LenPrefixed<L, V> {\n\n LenPrefixed {\n\n len: Default::default(),\n\n data: data,\n\n }\n\n }\n\n}\n\n\n\nimpl <L: Lengthable, V: Serializable> Serializable for LenPrefixed<L, V> {\n", "file_path": "src/protocol/mod.rs", "rank": 5, "score": 252810.28258972376 }, { "content": "pub fn clear_buffer(buffer: TargetBuffer, draw_buffer: i32, values: &[f32]) {\n\n unsafe {\n\n gl::ClearBufferfv(buffer, draw_buffer, values.as_ptr());\n\n }\n\n}\n", "file_path": "src/gl/mod.rs", "rank": 6, "score": 230473.68859091436 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone)]\n\nstruct Key(String, String);\n\n\n\nmacro_rules! try_log {\n\n ($e:expr) => (\n\n match $e {\n\n Ok(val) => val,\n\n Err(err) => {\n\n error!(\"Error loading model {:?}\", err);\n\n return false;\n\n }\n\n }\n\n );\n\n (opt $e:expr) => (\n\n match $e {\n\n Ok(val) => val,\n\n Err(err) => {\n\n error!(\"Error loading model {:?}\", err);\n\n return None;\n\n }\n\n }\n", "file_path": "src/model/mod.rs", "rank": 7, "score": 220311.27670201898 }, { "content": "pub fn convert_legacy(c: &mut Component) {\n\n match *c {\n\n Component::Text(ref mut txt) => {\n\n if let Some(ref mut extra) = txt.modifier.extra.as_mut() {\n\n for e in extra.iter_mut() {\n\n convert_legacy(e);\n\n }\n\n }\n\n if txt.text.contains(LEGACY_CHAR) {\n\n let mut parts = Vec::new();\n\n let mut last = 0;\n\n let mut current = TextComponent::new(\"\");\n\n {\n\n let mut iter = txt.text.char_indices();\n\n while let Some((i, c)) = iter.next() {\n\n if c == LEGACY_CHAR {\n\n let next = match iter.next() {\n\n Some(val) => val,\n\n None => break,\n\n };\n", "file_path": "src/format.rs", "rank": 8, "score": 217823.42157557717 }, { "content": "/// Sets the size of the viewport of this context.\n\npub fn viewport(x: i32, y: i32, w: i32, h: i32) {\n\n unsafe {\n\n gl::Viewport(x, y, w, h);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 9, "score": 217538.81809528623 }, { "content": "/// Sets the color the color buffer should be cleared to\n\n/// when Clear is called with the color flag.\n\npub fn clear_color(r: f32, g: f32, b: f32, a: f32) {\n\n unsafe {\n\n gl::ClearColor(r, g, b, a);\n\n }\n\n}\n\n\n\n/// `ClearFlags` is a set of flags to mark what should be cleared during\n\n/// a Clear call.\n\npub enum ClearFlags {\n\n /// Marks the color buffer to be cleared\n\n Color,\n\n /// Marks the depth buffer to be cleared\n\n Depth,\n\n Internal(u32),\n\n}\n\n\n\nimpl ClearFlags {\n\n fn internal(self) -> u32 {\n\n match self {\n\n ClearFlags::Color => gl::COLOR_BUFFER_BIT,\n", "file_path": "src/gl/mod.rs", "rank": 10, "score": 215355.32081842108 }, { "content": "pub fn depth_mask(f: bool) {\n\n unsafe { gl::DepthMask(f as u8); }\n\n}\n\n\n\n/// `Func` is a function to be preformed on two values.\n\npub type Func = u32;\n\n\n\npub const NEVER: Func = gl::NEVER;\n\npub const LESS: Func = gl::LESS;\n\npub const LESS_OR_EQUAL: Func = gl::LEQUAL;\n\npub const GREATER: Func = gl::GREATER;\n\npub const ALWAYS: Func = gl::ALWAYS;\n\npub const EQUAL: Func = gl::EQUAL;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 11, "score": 212887.16660838114 }, { "content": "pub trait Serializable: Sized {\n\n fn read_from<R: io::Read>(buf: &mut R) -> Result<Self, Error>;\n\n fn write_to<W: io::Write>(&self, buf: &mut W) -> Result<(), Error>;\n\n}\n\n\n\nimpl Serializable for Vec<u8> {\n\n fn read_from<R: io::Read>(buf: &mut R) -> Result<Vec<u8>, Error> {\n\n let mut v = Vec::new();\n\n try!(buf.read_to_end(&mut v));\n\n Ok(v)\n\n }\n\n\n\n fn write_to<W: io::Write>(&self, buf: &mut W) -> Result<(), Error> {\n\n buf.write_all(&self[..]).map_err(|v| v.into())\n\n }\n\n}\n\n\n\nimpl Serializable for Option<nbt::NamedTag>{\n\n fn read_from<R: io::Read>(buf: &mut R) -> Result<Option<nbt::NamedTag>, Error> {\n\n let ty = try!(buf.read_u8());\n", "file_path": "src/protocol/mod.rs", "rank": 13, "score": 207619.36192080582 }, { "content": "fn is_snowy<W: WorldAccess>(world: &W, pos: Position) -> bool {\n\n match world.get_block(pos.shift(Direction::Up)) {\n\n Block::Snow{..} | Block::SnowLayer{..} => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 14, "score": 207531.20039258176 }, { "content": "fn can_burn<W: WorldAccess>(world: &W, pos: Position) -> bool {\n\n match world.get_block(pos) {\n\n Block::Planks{..} |\n\n Block::DoubleWoodenSlab{..} |\n\n Block::WoodenSlab{..} |\n\n Block::FenceGate{..} |\n\n Block::SpruceFenceGate{..} |\n\n Block::BirchFenceGate{..} |\n\n Block::JungleFenceGate{..} |\n\n Block::DarkOakFenceGate{..} |\n\n Block::AcaciaFenceGate{..} |\n\n Block::Fence{..} |\n\n Block::SpruceFence{..} |\n\n Block::BirchFence{..} |\n\n Block::JungleFence{..} |\n\n Block::DarkOakFence{..} |\n\n Block::AcaciaFence{..} |\n\n Block::OakStairs{..} |\n\n Block::BirchStairs{..} |\n\n Block::SpruceStairs{..} |\n", "file_path": "blocks/src/lib.rs", "rank": 15, "score": 203837.81419513887 }, { "content": "pub fn draw_elements(ty: DrawType, count: i32, dty: Type, offset: usize) {\n\n unsafe {\n\n gl::DrawElements(ty, count, dty, offset as *const gl::types::GLvoid);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 16, "score": 190894.081459684 }, { "content": "fn update_repeater_state<W: WorldAccess>(world: &W, pos: Position, facing: Direction) -> bool {\n\n let f = |dir| {\n\n match world.get_block(pos.shift(dir)) {\n\n Block::RepeaterPowered{..} => true,\n\n _ => false,\n\n }\n\n };\n\n\n\n f(facing.clockwise()) || f(facing.counter_clockwise())\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 17, "score": 188645.21066770982 }, { "content": "pub fn multi_draw_elements(ty: DrawType, count: &[i32], dty: Type, offsets: &[usize]) {\n\n unsafe {\n\n gl::MultiDrawElements(ty, count.as_ptr(), dty, offsets.as_ptr() as *const _, count.len() as i32);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 18, "score": 187781.23755738145 }, { "content": "fn fence_gate_update_state<W: WorldAccess>(world: &W, pos: Position, facing: Direction) -> bool {\n\n match world.get_block(pos.shift(facing.clockwise())) {\n\n Block::CobblestoneWall{..} => return true,\n\n _ => (),\n\n }\n\n\n\n match world.get_block(pos.shift(facing.counter_clockwise())) {\n\n Block::CobblestoneWall{..} => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 19, "score": 185663.0759688363 }, { "content": "pub fn trace_ray<F, R>(world: &world::World, max: f64, s: cgmath::Vector3<f64>, d: cgmath::Vector3<f64>, collide_func: F) -> Option<R>\n\n where F: Fn(&world::World, Position, cgmath::Vector3<f64>, cgmath::Vector3<f64>,) -> (bool, Option<R>) {\n\n struct Gen {\n\n count: i32,\n\n base: f64,\n\n d: f64,\n\n }\n\n impl Gen {\n\n fn new(start: f64, mut d: f64) -> Gen {\n\n let base = if d > 0.0 {\n\n (start.ceil() - start) / d\n\n } else if d < 0.0 {\n\n d = d.abs();\n\n (start - start.floor()) / d\n\n } else {\n\n 0.0\n\n };\n\n Gen {\n\n count: 0,\n\n base: base,\n", "file_path": "src/server/target.rs", "rank": 20, "score": 182767.64068556574 }, { "content": "fn update_door_state<W: WorldAccess>(world: &W, pos: Position, ohalf: DoorHalf, ofacing: Direction, ohinge: Side, oopen: bool, opowered: bool) -> (Direction, Side, bool, bool) {\n\n let oy = if ohalf == DoorHalf::Upper { -1 } else { 1 };\n\n\n\n match world.get_block(pos + (0, oy, 0)) {\n\n Block::WoodenDoor{half, facing, hinge, open, powered} |\n\n Block::SpruceDoor{half, facing, hinge, open, powered} |\n\n Block::BirchDoor{half, facing, hinge, open, powered} |\n\n Block::JungleDoor{half, facing, hinge, open, powered} |\n\n Block::AcaciaDoor{half, facing, hinge, open, powered} |\n\n Block::DarkOakDoor{half, facing, hinge, open, powered} |\n\n Block::IronDoor{half, facing, hinge, open, powered} => {\n\n if half != ohalf {\n\n if ohalf == DoorHalf::Upper {\n\n return (facing, ohinge, open, opowered);\n\n } else {\n\n return (ofacing, hinge, oopen, powered);\n\n }\n\n }\n\n },\n\n _ => {},\n\n }\n\n\n\n (ofacing, ohinge, oopen, opowered)\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 21, "score": 181171.22726803776 }, { "content": "fn build_map(out: &mut Vec<PathBuf>, path: &Path) {\n\n let files = fs::read_dir(path).unwrap();\n\n for entry in files {\n\n let entry = entry.unwrap();\n\n if fs::metadata(entry.path()).unwrap().is_dir() {\n\n build_map(out, &entry.path());\n\n } else {\n\n out.push(entry.path());\n\n }\n\n }\n\n}\n", "file_path": "resources/build.rs", "rank": 22, "score": 174753.2197805423 }, { "content": "pub trait PacketType {\n\n fn packet_id(&self) -> i32;\n\n\n\n fn write<W: io::Write>(self, buf: &mut W) -> Result<(), Error>;\n\n}\n", "file_path": "src/protocol/mod.rs", "rank": 23, "score": 173113.5051640001 }, { "content": "pub fn register_vars(vars: &mut console::Vars) {\n\n vars.register(CL_USERNAME);\n\n vars.register(CL_UUID);\n\n vars.register(AUTH_TOKEN);\n\n vars.register(AUTH_CLIENT_TOKEN);\n\n}\n", "file_path": "src/auth.rs", "rank": 24, "score": 170181.09133746233 }, { "content": "pub fn add_systems(m: &mut ecs::Manager) {\n\n let sys = systems::UpdateLastPosition::new(m);\n\n m.add_system(sys);\n\n\n\n player::add_systems(m);\n\n\n\n let sys = systems::ApplyVelocity::new(m);\n\n m.add_system(sys);\n\n let sys = systems::ApplyGravity::new(m);\n\n m.add_system(sys);\n\n let sys = systems::LerpPosition::new(m);\n\n m.add_render_system(sys);\n\n let sys = systems::LerpRotation::new(m);\n\n m.add_render_system(sys);\n\n let sys = systems::LightEntity::new(m);\n\n m.add_render_system(sys);\n\n\n\n block_entity::add_systems(m);\n\n}\n\n\n", "file_path": "src/entity/mod.rs", "rank": 25, "score": 170181.0913374623 }, { "content": "pub fn register_vars(vars: &mut console::Vars) {\n\n vars.register(R_MAX_FPS);\n\n vars.register(R_FOV);\n\n vars.register(R_VSYNC);\n\n vars.register(CL_MASTER_VOLUME);\n\n vars.register(CL_KEYBIND_FORWARD);\n\n vars.register(CL_KEYBIND_BACKWARD);\n\n vars.register(CL_KEYBIND_LEFT);\n\n vars.register(CL_KEYBIND_RIGHT);\n\n vars.register(CL_KEYBIND_OPEN_INV);\n\n vars.register(CL_KEYBIND_SNEAK);\n\n vars.register(CL_KEYBIND_SPRINT);\n\n vars.register(CL_KEYBIND_JUMP);\n\n}\n\n\n\n#[derive(Hash, PartialEq, Eq)]\n\npub enum Stevenkey {\n\n Forward,\n\n Backward,\n\n Left,\n", "file_path": "src/settings.rs", "rank": 26, "score": 170181.09133746233 }, { "content": "pub fn add_systems(m: &mut ecs::Manager) {\n\n let sys = MovementHandler::new(m);\n\n m.add_system(sys);\n\n let sys = PlayerRenderer::new(m);\n\n m.add_render_system(sys);\n\n}\n\n\n", "file_path": "src/entity/player.rs", "rank": 27, "score": 170181.09133746233 }, { "content": "fn flood_fill(snapshot: &world::Snapshot, visited: &mut Set, x: i32, y: i32, z: i32) -> u8 {\n\n use std::collections::VecDeque;\n\n\n\n let mut next_position = VecDeque::with_capacity(16 * 16);\n\n next_position.push_back((x, y, z));\n\n\n\n let mut touched = 0;\n\n while let Some((x, y, z)) = next_position.pop_front() {\n\n let idx = (x | (z << 4) | (y << 8)) as usize;\n\n if x < 0 || x > 15 || y < 0 || y > 15 || z < 0 || z > 15 || visited.get(idx) {\n\n continue;\n\n }\n\n visited.set(idx, true);\n\n\n\n if snapshot.get_block(x, y, z).get_material().should_cull_against {\n\n continue;\n\n }\n\n\n\n if x == 0 {\n\n touched |= 1 << Direction::West.index();\n", "file_path": "src/chunk_builder.rs", "rank": 28, "score": 166966.38671847514 }, { "content": "pub fn add_shaders(reg: &mut glsl::Registry) {\n\n reg.register(\"lookup_texture\", include_str!(\"shaders/lookup_texture.glsl\"));\n\n reg.register(\"get_light\", include_str!(\"shaders/get_light.glsl\"));\n\n\n\n reg.register(\"ui_vertex\", include_str!(\"shaders/ui_vertex.glsl\"));\n\n reg.register(\"ui_frag\", include_str!(\"shaders/ui_frag.glsl\"));\n\n\n\n reg.register(\"chunk_vertex\", include_str!(\"shaders/chunk_vertex.glsl\"));\n\n reg.register(\"chunk_frag\", include_str!(\"shaders/chunk_frag.glsl\"));\n\n\n\n reg.register(\"trans_vertex\", include_str!(\"shaders/trans_vertex.glsl\"));\n\n reg.register(\"trans_frag\", include_str!(\"shaders/trans_frag.glsl\"));\n\n\n\n reg.register(\"model_vertex\", include_str!(\"shaders/model_vertex.glsl\"));\n\n reg.register(\"model_frag\", include_str!(\"shaders/model_frag.glsl\"));\n\n\n\n reg.register(\"sun_vertex\", include_str!(\"shaders/sun_vertex.glsl\"));\n\n reg.register(\"sun_frag\", include_str!(\"shaders/sun_frag.glsl\"));\n\n\n\n reg.register(\"clouds_vertex\", include_str!(\"shaders/clouds_vertex.glsl\"));\n", "file_path": "src/render/shaders.rs", "rank": 29, "score": 166814.10215526 }, { "content": "pub fn add_systems(m: &mut ecs::Manager) {\n\n sign::add_systems(m);\n\n}\n\n\n\npub enum BlockEntityType {\n\n Sign\n\n}\n\n\n\nimpl BlockEntityType {\n\n pub fn get_block_entity(bl: Block) -> Option<BlockEntityType> {\n\n match bl {\n\n Block::StandingSign{..} | Block::WallSign{..} => Some(BlockEntityType::Sign),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn create_entity(&self, m: &mut ecs::Manager, pos: Position) -> ecs::Entity {\n\n let e = m.create_entity();\n\n m.add_component_direct(e, pos);\n\n match *self {\n\n BlockEntityType::Sign => sign::init_entity(m, e),\n\n }\n\n e\n\n }\n\n}\n", "file_path": "src/entity/block_entity/mod.rs", "rank": 30, "score": 163662.8206806871 }, { "content": "pub fn add_systems(m: &mut ecs::Manager) {\n\n let sys = SignRenderer::new(m);\n\n m.add_render_system(sys);\n\n}\n\n\n", "file_path": "src/entity/block_entity/sign.rs", "rank": 31, "score": 163662.8206806871 }, { "content": "fn check_collisions(world: &world::World, position: &mut TargetPosition, last_position: &Vector3<f64>, bounds: Aabb3<f64>) -> (Aabb3<f64>, bool) {\n\n let mut bounds = bounds.add_v(position.position);\n\n\n\n let dir = position.position - last_position;\n\n\n\n let min_x = (bounds.min.x - 1.0) as i32;\n\n let min_y = (bounds.min.y - 1.0) as i32;\n\n let min_z = (bounds.min.z - 1.0) as i32;\n\n let max_x = (bounds.max.x + 1.0) as i32;\n\n let max_y = (bounds.max.y + 1.0) as i32;\n\n let max_z = (bounds.max.z + 1.0) as i32;\n\n\n\n let mut hit = false;\n\n for y in min_y .. max_y {\n\n for z in min_z .. max_z {\n\n for x in min_x .. max_x {\n\n let block = world.get_block(BPosition::new(x, y, z));\n\n if block.get_material().collidable {\n\n for bb in block.get_collision_boxes() {\n\n let bb = bb.add_v(cgmath::Vector3::new(x as f64, y as f64, z as f64));\n", "file_path": "src/entity/player.rs", "rank": 32, "score": 159561.7846173016 }, { "content": "pub fn create_local(m: &mut ecs::Manager) -> ecs::Entity {\n\n let entity = m.create_entity();\n\n m.add_component_direct(entity, Position::new(0.0, 0.0, 0.0));\n\n let mut tpos = TargetPosition::new(0.0, 0.0, 0.0);\n\n tpos.lerp_amount = 1.0 / 3.0;\n\n m.add_component_direct(entity, tpos);\n\n m.add_component_direct(entity, Rotation::new(0.0, 0.0));\n\n m.add_component_direct(entity, Velocity::new(0.0, 0.0, 0.0));\n\n m.add_component_direct(entity, Gamemode::Survival);\n\n m.add_component_direct(entity, Gravity::new());\n\n m.add_component_direct(entity, PlayerMovement::new());\n\n m.add_component_direct(entity, Bounds::new(Aabb3::new(\n\n Point3::new(-0.3, 0.0, -0.3),\n\n Point3::new(0.3, 1.8, 0.3)\n\n )));\n\n m.add_component_direct(entity, PlayerModel::new(\"\", false, false, true));\n\n m.add_component_direct(entity, Light::new());\n\n entity\n\n}\n\n\n", "file_path": "src/entity/player.rs", "rank": 33, "score": 157435.94970027395 }, { "content": "pub fn draw_buffers(bufs: &[Attachment]) {\n\n unsafe {\n\n gl::DrawBuffers(\n\n bufs.len() as i32,\n\n bufs.as_ptr()\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 34, "score": 154128.58519475858 }, { "content": "pub fn draw_arrays(ty: DrawType, offset: usize, count: usize) {\n\n unsafe {\n\n gl::DrawArrays(ty, offset as i32, count as i32);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 35, "score": 152230.1522847914 }, { "content": "/// Disables the passed flag.\n\npub fn disable(f: Flag) {\n\n unsafe {\n\n gl::Disable(f);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 36, "score": 151905.9860659574 }, { "content": "/// Enables the passed flag.\n\npub fn enable(f: Flag) {\n\n unsafe {\n\n gl::Enable(f);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 37, "score": 151905.9860659574 }, { "content": "fn twos_compliment(data: &mut Vec<u8>) {\n\n let mut carry = true;\n\n for i in (0..data.len()).rev() {\n\n data[i] = !data[i];\n\n if carry {\n\n carry = data[i] == 0xFF;\n\n data[i] = data[i].wrapping_add(1);\n\n }\n\n }\n\n}\n", "file_path": "src/protocol/mojang.rs", "rank": 38, "score": 149221.90227854758 }, { "content": "pub fn depth_func(f: Func) {\n\n unsafe {\n\n gl::DepthFunc(f);\n\n }\n\n}\n\n\n\n/// Flag is a setting that can be enabled or disabled on the context.\n\npub type Flag = u32;\n\n\n\npub const DEPTH_TEST: Flag = gl::DEPTH_TEST;\n\npub const CULL_FACE_FLAG: Flag = gl::CULL_FACE;\n\npub const STENCIL_TEST: Flag = gl::STENCIL_TEST;\n\npub const BLEND: Flag = gl::BLEND;\n\npub const MULTISAMPLE: Flag = gl::MULTISAMPLE;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 39, "score": 149134.79583866015 }, { "content": "pub fn init_entity(m: &mut ecs::Manager, e: ecs::Entity) {\n\n m.add_component_direct(e, SignInfo {\n\n model: None,\n\n lines: [\n\n Component::Text(format::TextComponent::new(\"\")),\n\n Component::Text(format::TextComponent::new(\"\")),\n\n Component::Text(format::TextComponent::new(\"\")),\n\n Component::Text(format::TextComponent::new(\"\")),\n\n ],\n\n offset_x: 0.0,\n\n offset_y: 0.0,\n\n offset_z: 0.0,\n\n has_stand: false,\n\n rotation: 0.0,\n\n dirty: false,\n\n });\n\n}\n\n\n\npub struct SignInfo {\n\n model: Option<model::ModelKey>,\n", "file_path": "src/entity/block_entity/sign.rs", "rank": 40, "score": 148995.58192634422 }, { "content": "pub fn create_remote(m: &mut ecs::Manager, name: &str) -> ecs::Entity {\n\n let entity = m.create_entity();\n\n m.add_component_direct(entity, Position::new(0.0, 0.0, 0.0));\n\n m.add_component_direct(entity, TargetPosition::new(0.0, 0.0, 0.0));\n\n m.add_component_direct(entity, Rotation::new(0.0, 0.0));\n\n m.add_component_direct(entity, TargetRotation::new(0.0, 0.0));\n\n m.add_component_direct(entity, Velocity::new(0.0, 0.0, 0.0));\n\n m.add_component_direct(entity, Bounds::new(Aabb3::new(\n\n Point3::new(-0.3, 0.0, -0.3),\n\n Point3::new(0.3, 1.8, 0.3)\n\n )));\n\n m.add_component_direct(entity, PlayerModel::new(name, true, true, false));\n\n m.add_component_direct(entity, Light::new());\n\n entity\n\n}\n\n\n\n\n\npub struct PlayerModel {\n\n model: Option<model::ModelKey>,\n\n skin_url: Option<String>,\n", "file_path": "src/entity/player.rs", "rank": 41, "score": 146514.33831027552 }, { "content": "fn rotate_direction(val: Direction, offset: i32, rots: &[Direction], invalid: &[Direction]) -> Direction {\n\n for d in invalid {\n\n if *d == val {\n\n return val;\n\n }\n\n }\n\n let pos = rots.iter()\n\n .position(|v| *v == val)\n\n .unwrap_or(0) as i32;\n\n rots[(rots.len() as i32 + pos + offset) as usize % rots.len()]\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct StateModel {\n\n variants: HashMap<String, Variants, BuildHasherDefault<FNVHash>>,\n\n multipart: Vec<MultipartRule>,\n\n}\n\n\n\nimpl StateModel {\n\n pub fn get_variants(&self, name: &str) -> Option<&Variants> {\n\n self.variants.get(name)\n\n }\n\n}\n\n\n", "file_path": "src/model/mod.rs", "rank": 42, "score": 146042.4686921853 }, { "content": "pub trait Screen {\n\n // Called once\n\n fn init(&mut self, _renderer: &mut render::Renderer, _ui_container: &mut ui::Container) {\n\n }\n\n fn deinit(&mut self, _renderer: &mut render::Renderer, _ui_container: &mut ui::Container) {\n\n }\n\n\n\n // May be called multiple times\n\n fn on_active(&mut self, renderer: &mut render::Renderer, ui_container: &mut ui::Container);\n\n fn on_deactive(&mut self, renderer: &mut render::Renderer, ui_container: &mut ui::Container);\n\n\n\n // Called every frame the screen is active\n\n fn tick(&mut self,\n\n delta: f64,\n\n renderer: &mut render::Renderer,\n\n ui_container: &mut ui::Container) -> Option<Box<Screen>>;\n\n\n\n // Events\n\n fn on_scroll(&mut self, _x: f64, _y: f64) {\n\n }\n\n\n\n fn is_closable(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n", "file_path": "src/screen/mod.rs", "rank": 43, "score": 138637.97942237277 }, { "content": "/// A system processes entities\n\npub trait System {\n\n fn filter(&self) -> &Filter;\n\n fn update(&mut self, m: &mut Manager, world: &mut world::World, renderer: &mut render::Renderer);\n\n\n\n fn entity_added(&mut self, _m: &mut Manager, _e: Entity, _world: &mut world::World, _renderer: &mut render::Renderer) {\n\n }\n\n\n\n fn entity_removed(&mut self, _m: &mut Manager, _e: Entity, _world: &mut world::World, _renderer: &mut render::Renderer) {\n\n }\n\n}\n\n\n", "file_path": "src/ecs/mod.rs", "rank": 44, "score": 138637.97942237277 }, { "content": "pub trait Var {\n\n fn serialize(&self, val: &Box<Any>) -> String;\n\n fn deserialize(&self, input: &str) -> Box<Any>;\n\n fn description(&self) -> &'static str;\n\n fn can_serialize(&self) -> bool;\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Vars {\n\n names: HashMap<String, &'static str>,\n\n vars: HashMap<&'static str, Box<Var>>,\n\n var_values: HashMap<&'static str, RefCell<Box<Any>>>,\n\n}\n\n\n\nimpl Vars {\n\n pub fn new() -> Vars { Default::default() }\n\n\n\n pub fn register<T: Sized + Any>(&mut self, var: CVar<T>)\n\n where CVar<T>: Var\n\n {\n", "file_path": "src/console/mod.rs", "rank": 45, "score": 138637.97942237277 }, { "content": "fn can_connect_redstone<W: WorldAccess>(world: &W, pos: Position, dir: Direction) -> RedstoneSide {\n\n let shift_pos = pos.shift(dir);\n\n let block = world.get_block(shift_pos);\n\n\n\n if block.get_material().should_cull_against {\n\n let side_up = world.get_block(shift_pos.shift(Direction::Up));\n\n let up = world.get_block(pos.shift(Direction::Up));\n\n\n\n if match side_up { Block::RedstoneWire{..} => true, _ => false,} && !up.get_material().should_cull_against {\n\n return RedstoneSide::Up;\n\n }\n\n\n\n return RedstoneSide::None;\n\n }\n\n\n\n let side_down = world.get_block(shift_pos.shift(Direction::Down));\n\n if match block { Block::RedstoneWire{..} => true, _ => false,} || match side_down { Block::RedstoneWire{..} => true, _ => false,} {\n\n return RedstoneSide::Side;\n\n }\n\n RedstoneSide::None\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 46, "score": 138537.15490034778 }, { "content": "fn get_stair_info<W: WorldAccess>(world: &W, pos: Position) -> Option<(Direction, BlockHalf)> {\n\n match world.get_block(pos) {\n\n Block::OakStairs{facing, half, ..} |\n\n Block::StoneStairs{facing, half, ..} |\n\n Block::BrickStairs{facing, half, ..} |\n\n Block::StoneBrickStairs{facing, half, ..} |\n\n Block::NetherBrickStairs{facing, half, ..} |\n\n Block::SandstoneStairs{facing, half, ..} |\n\n Block::SpruceStairs{facing, half, ..} |\n\n Block::BirchStairs{facing, half, ..} |\n\n Block::JungleStairs{facing, half, ..} |\n\n Block::QuartzStairs{facing, half, ..} |\n\n Block::AcaciaStairs{facing, half, ..} |\n\n Block::DarkOakStairs{facing, half, ..} |\n\n Block::RedSandstoneStairs{facing, half, ..} |\n\n Block::PurpurStairs{facing, half, ..} => Some((facing, half)),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 47, "score": 138537.15490034778 }, { "content": "fn update_stair_shape<W: WorldAccess>(world: &W, pos: Position, facing: Direction) -> StairShape {\n\n if let Some((other_facing, _)) = get_stair_info(world, pos.shift(facing)) {\n\n if other_facing != facing && other_facing != facing.opposite() {\n\n if other_facing == facing.clockwise() {\n\n return StairShape::OuterRight;\n\n }\n\n\n\n return StairShape::OuterLeft;\n\n }\n\n }\n\n\n\n if let Some((other_facing, _)) = get_stair_info(world, pos.shift(facing.opposite())) {\n\n if other_facing != facing && other_facing != facing.opposite() {\n\n if other_facing == facing.clockwise() {\n\n return StairShape::InnerRight;\n\n }\n\n\n\n return StairShape::InnerLeft;\n\n }\n\n }\n\n\n\n StairShape::Straight\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 48, "score": 138537.15490034778 }, { "content": "pub trait ElementHolder {\n\n fn add(&mut self, el: Element, auto_free: bool);\n\n}\n\n\n\npub struct Container {\n\n elements: Vec<Element>,\n\n focusable_elements: Vec<WeakElement>,\n\n\n\n pub mode: Mode,\n\n last_mode: Mode,\n\n version: usize,\n\n\n\n last_sw: f64,\n\n last_sh: f64,\n\n last_width: f64,\n\n last_height: f64,\n\n}\n\n\n\nimpl Container {\n\n pub fn new() -> Container {\n", "file_path": "src/ui/mod.rs", "rank": 49, "score": 135632.81554005516 }, { "content": "pub trait WorldAccess {\n\n fn get_block(&self, pos: Position) -> Block;\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! create_ids {\n\n ($t:ty, ) => ();\n\n ($t:ty, prev($prev:ident), $name:ident) => (\n\n #[allow(non_upper_case_globals)]\n\n pub const $name: $t = $prev + 1;\n\n );\n\n ($t:ty, prev($prev:ident), $name:ident, $($n:ident),+) => (\n\n #[allow(non_upper_case_globals)]\n\n pub const $name: $t = $prev + 1;\n\n create_ids!($t, prev($name), $($n),+);\n\n );\n\n ($t:ty, $name:ident, $($n:ident),+) => (\n\n #[allow(non_upper_case_globals)]\n\n pub const $name: $t = 0;\n", "file_path": "blocks/src/lib.rs", "rank": 50, "score": 135632.81554005516 }, { "content": "pub fn test_block(world: &world::World, pos: Position, s: cgmath::Vector3<f64>, d: cgmath::Vector3<f64>) -> (bool, Option<(Position, block::Block, Direction, cgmath::Vector3<f64>)>) {\n\n let block = world.get_block(pos);\n\n let posf = cgmath::Vector3::new(pos.x as f64, pos.y as f64, pos.z as f64);\n\n for bound in block.get_collision_boxes() {\n\n let bound = bound.add_v(posf);\n\n if let Some(hit) = intersects_line(bound, s, d) {\n\n let cursor = hit - posf;\n\n let face = find_face(bound, hit);\n\n return (true, Some((pos, block, face, cursor)));\n\n }\n\n }\n\n (false, None)\n\n}\n\n\n", "file_path": "src/server/target.rs", "rank": 51, "score": 133304.4864738721 }, { "content": "fn get_lava_level(snapshot: &world::Snapshot, x: i32, y: i32, z: i32) -> Option<i32> {\n\n match snapshot.get_block(x, y, z) {\n\n block::Block::Lava{level} | block::Block::FlowingLava{level} => Some(level as i32),\n\n _ => None,\n\n }\n\n}\n", "file_path": "src/model/liquid.rs", "rank": 52, "score": 132548.31242380498 }, { "content": "fn get_water_level(snapshot: &world::Snapshot, x: i32, y: i32, z: i32) -> Option<i32> {\n\n match snapshot.get_block(x, y, z) {\n\n block::Block::Water{level} | block::Block::FlowingWater{level} => Some(level as i32),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "src/model/liquid.rs", "rank": 53, "score": 132548.31242380498 }, { "content": "pub fn unbind_framebuffer() {\n\n unsafe {\n\n gl::BindFramebuffer(gl::FRAMEBUFFER, 0);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 54, "score": 130250.21931221508 }, { "content": "// Helper methods\n\npub fn append_box(\n\n verts: &mut Vec<Vertex>,\n\n x: f32, y: f32, z: f32,\n\n w: f32, h: f32, d: f32, textures: [Option<super::Texture>; 6]\n\n) {\n\n append_box_texture_scale(verts, x, y, z, w, h, d, textures, [\n\n [1.0, 1.0],\n\n [1.0, 1.0],\n\n [1.0, 1.0],\n\n [1.0, 1.0],\n\n [1.0, 1.0],\n\n [1.0, 1.0]\n\n ]);\n\n}\n", "file_path": "src/render/model.rs", "rank": 55, "score": 130250.21931221508 }, { "content": "pub fn blit_framebuffer(\n\n sx0: i32, sy0: i32, sx1: i32, sy1: i32,\n\n dx0: i32, dy0: i32, dx1: i32, dy1: i32,\n\n mask: ClearFlags, filter: TextureValue) {\n\n unsafe {\n\n gl::BlitFramebuffer(\n\n sx0, sy0, sx1, sy1,\n\n dx0, dy0, dx1, dy1,\n\n mask.internal(), filter as u32\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 56, "score": 130250.21931221508 }, { "content": "fn pane_collision(north: bool, south: bool, east: bool, west: bool) -> Vec<Aabb3<f64>> {\n\n let mut collision = vec![Aabb3::new(\n\n Point3::new(7.0/16.0, 0.0, 7.0/16.0),\n\n Point3::new(9.0/16.0, 1.0, 9.0/16.0))\n\n ];\n\n\n\n if north {\n\n collision.push(Aabb3::new(\n\n Point3::new(7.0/16.0, 0.0, 0.0),\n\n Point3::new(9.0/16.0, 1.0, 9.0/16.0))\n\n );\n\n }\n\n\n\n if south {\n\n collision.push(Aabb3::new(\n\n Point3::new(7.0/16.0, 0.0, 7.0/16.0),\n\n Point3::new(9.0/16.0, 1.0, 1.0))\n\n );\n\n }\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 57, "score": 129047.23893143208 }, { "content": "fn fence_collision(north: bool, south: bool, west: bool, east: bool) -> Vec<Aabb3<f64>> {\n\n let mut collision = vec![Aabb3::new(\n\n Point3::new(3.0/8.0, 0.0, 3.0/8.0),\n\n Point3::new(5.0/8.0, 1.5, 5.0/8.0))\n\n ];\n\n\n\n if north {\n\n collision.push(Aabb3::new(\n\n Point3::new(3.0/8.0, 0.0, 0.0),\n\n Point3::new(5.0/8.0, 1.5, 3.0/8.0))\n\n );\n\n }\n\n\n\n if south {\n\n collision.push(Aabb3::new(\n\n Point3::new(3.0/8.0, 0.0, 5.0/8.0),\n\n Point3::new(5.0/8.0, 1.5, 1.0))\n\n );\n\n }\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 58, "score": 129047.23893143208 }, { "content": "pub trait Pack: Sync + Send {\n\n fn open(&self, name: &str) -> Option<Box<io::Read>>;\n\n}\n\n\n\npub struct Manager {\n\n packs: Vec<Box<Pack>>,\n\n version: usize,\n\n\n\n vanilla_chan: Option<mpsc::Receiver<bool>>,\n\n vanilla_assets_chan: Option<mpsc::Receiver<bool>>,\n\n vanilla_progress: Arc<Mutex<Progress>>,\n\n}\n\n\n\npub struct ManagerUI {\n\n progress_ui: Vec<ProgressUI>,\n\n num_tasks: isize,\n\n}\n\n\n", "file_path": "src/resources.rs", "rank": 59, "score": 128976.05789059703 }, { "content": "fn calculate_light(snapshot: &world::Snapshot, orig_x: i32, orig_y: i32, orig_z: i32,\n\n x: f64, y: f64, z: f64, face: Direction, smooth: bool, force: bool) -> (u16, u16) {\n\n use std::cmp::max;\n\n use world::block;\n\n let (ox, oy, oz) = face.get_offset();\n\n\n\n let s_block_light = snapshot.get_block_light(orig_x + ox, orig_y + oy, orig_z + oz);\n\n let s_sky_light = snapshot.get_sky_light(orig_x + ox, orig_y + oy, orig_z + oz);\n\n if !smooth {\n\n return ((s_block_light as u16) * 4000, (s_sky_light as u16) * 4000);\n\n }\n\n\n\n let mut block_light = 0u32;\n\n let mut sky_light = 0u32;\n\n let mut count = 0;\n\n\n\n let s_block_light = max(((s_block_light as i8) - 8), 0) as u8;\n\n let s_sky_light = max(((s_sky_light as i8) - 8), 0) as u8;\n\n\n\n let dx = (ox as f64) * 0.6;\n", "file_path": "src/model/mod.rs", "rank": 60, "score": 128027.94827415222 }, { "content": "pub fn unbind_framebuffer_read() {\n\n unsafe {\n\n gl::BindFramebuffer(gl::READ_FRAMEBUFFER, 0);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 61, "score": 127479.02908491783 }, { "content": "pub fn unbind_framebuffer_draw() {\n\n unsafe {\n\n gl::BindFramebuffer(gl::DRAW_FRAMEBUFFER, 0);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 62, "score": 127479.02908491783 }, { "content": "pub fn append_box_texture_scale(\n\n verts: &mut Vec<Vertex>,\n\n x: f32, y: f32, z: f32,\n\n w: f32, h: f32, d: f32,\n\n textures: [Option<super::Texture>; 6], texture_scale: [[f64; 2]; 6]) {\n\n for dir in Direction::all() {\n\n let tex = textures[dir.index()].clone();\n\n if tex.is_none() {\n\n continue;\n\n }\n\n let tex = tex.unwrap();\n\n for vert in BlockVertex::face_by_direction(dir) {\n\n let (rr, gg, bb) = if dir == Direction::West || dir == Direction::East {\n\n ((255.0 * 0.8) as u8, (255.0 * 0.8) as u8, (255.0 * 0.8) as u8)\n\n } else {\n\n (255, 255, 255)\n\n };\n\n verts.push(Vertex {\n\n x: vert.x * w + x,\n\n y: vert.y * h + y,\n", "file_path": "src/render/model.rs", "rank": 63, "score": 124904.25131285953 }, { "content": "fn fence_gate_data(facing: Direction, in_wall: bool, open: bool, powered: bool) -> Option<usize> {\n\n if in_wall || powered { return None; }\n\n\n\n Some(facing.horizontal_index() | (if open { 0x4 } else { 0x0 }))\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 64, "score": 122960.26583824417 }, { "content": "pub fn read_buffer(a: Attachment) {\n\n unsafe {\n\n gl::ReadBuffer(a);\n\n }\n\n}\n\n\n\npub type TargetBuffer = u32;\n\npub const COLOR: TargetBuffer = gl::COLOR;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 65, "score": 121381.14416566267 }, { "content": "/// Clears the buffers specified by the passed flags.\n\npub fn clear(flags: ClearFlags) {\n\n unsafe { gl::Clear(flags.internal()) }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 66, "score": 118806.36639360437 }, { "content": "/// Sets the texture slot with the passed id as the\n\n/// currently active one.\n\npub fn active_texture(id: u32) {\n\n unsafe {\n\n gl::ActiveTexture(gl::TEXTURE0 + id);\n\n }\n\n}\n\n\n\n/// `Factor` is used in blending\n\npub type Factor = u32;\n\npub const SRC_ALPHA: Factor = gl::SRC_ALPHA;\n\npub const ONE_MINUS_SRC_ALPHA: Factor = gl::ONE_MINUS_SRC_ALPHA;\n\npub const ONE_FACTOR: Factor = gl::ONE;\n\npub const ZERO_FACTOR: Factor = gl::ZERO;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 67, "score": 118806.36639360437 }, { "content": "/// Sets the face to be culled by the gpu.\n\npub fn cull_face(face: Face) {\n\n unsafe {\n\n gl::CullFace(face);\n\n }\n\n}\n\n\n\n// FaceDirection is used to specify an order of vertices, normally\n\n// used to set which is considered to be the front face.\n\npub type FaceDirection = u32;\n\npub const CLOCK_WISE: FaceDirection = gl::CW;\n\npub const COUNTER_CLOCK_WISE: FaceDirection = gl::CCW;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 68, "score": 118806.36639360437 }, { "content": "fn can_connect_glasspane(block: Block) -> bool {\n\n match block {\n\n Block::Glass{..} |\n\n Block::StainedGlass{..} |\n\n Block::GlassPane{..} |\n\n Block::StainedGlassPane{..} => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 69, "score": 118110.89503628784 }, { "content": "fn can_connect_fence(block: Block) -> bool {\n\n match block {\n\n Block::Fence{..} |\n\n Block::SpruceFence{..} |\n\n Block::BirchFence{..} |\n\n Block::JungleFence{..} |\n\n Block::DarkOakFence{..} |\n\n Block::AcaciaFence{..} |\n\n Block::FenceGate{..} |\n\n Block::SpruceFenceGate{..} |\n\n Block::BirchFenceGate{..} |\n\n Block::JungleFenceGate{..} |\n\n Block::DarkOakFenceGate{..} |\n\n Block::AcaciaFenceGate{..} => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 70, "score": 118110.89503628784 }, { "content": "fn update_double_plant_state<W: WorldAccess>(world: &W, pos: Position, ohalf: BlockHalf, ovariant: DoublePlantVariant) -> (BlockHalf, DoublePlantVariant) {\n\n if ohalf != BlockHalf::Upper { return (ohalf, ovariant); }\n\n\n\n match world.get_block(pos.shift(Direction::Down)) {\n\n Block::DoublePlant{variant, ..} => (ohalf, variant),\n\n _ => (ohalf, ovariant),\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 71, "score": 118072.76559893313 }, { "content": "/// Sets the direction of vertices used to specify the\n\n/// front face (e.g. for culling).\n\npub fn front_face(dir: FaceDirection) {\n\n unsafe { gl::FrontFace(dir) }\n\n}\n\n\n\n/// `Type` is a type of data used by various operations.\n\npub type Type = u32;\n\npub const UNSIGNED_BYTE: Type = gl::UNSIGNED_BYTE;\n\npub const UNSIGNED_SHORT: Type = gl::UNSIGNED_SHORT;\n\npub const UNSIGNED_INT: Type = gl::UNSIGNED_INT;\n\npub const SHORT: Type = gl::SHORT;\n\npub const FLOAT: Type = gl::FLOAT;\n\n\n\n/// `TextureTarget` is a target were a texture can be bound to\n\npub type TextureTarget = u32;\n\n\n\npub const TEXTURE_2D: TextureTarget = gl::TEXTURE_2D;\n\npub const TEXTURE_2D_MULTISAMPLE: TextureTarget = gl::TEXTURE_2D_MULTISAMPLE;\n\npub const TEXTURE_2D_ARRAY: TextureTarget = gl::TEXTURE_2D_ARRAY;\n\npub const TEXTURE_3D: TextureTarget = gl::TEXTURE_3D;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 72, "score": 116412.53402674322 }, { "content": "#[test]\n\nfn test_map() {\n\n let mut map = Map::new(4096, 4);\n\n for i in 0..4096 {\n\n for j in 0..16 {\n\n map.set(i, j);\n\n if map.get(i) != j {\n\n panic!(\"Fail\");\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/types/bit/map.rs", "rank": 73, "score": 114886.43654752795 }, { "content": "fn fence_gate_collision(facing: Direction, in_wall: bool, open: bool) -> Vec<Aabb3<f64>> {\n\n if open { return vec![]; }\n\n\n\n let (min_x, min_y, min_z, max_x, max_y, max_z) = if in_wall {\n\n match facing.axis() {\n\n Axis::Z => (0.0, 0.0, 3.0/8.0, 1.0, 13.0/16.0, 5.0/8.0),\n\n Axis::X => (3.0/8.0, 0.0, 0.0, 5.0/8.0, 13.0/16.0, 1.0),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n match facing.axis() {\n\n Axis::Z => (0.0, 0.0, 3.0/8.0, 1.0, 1.0, 5.0/8.0),\n\n Axis::X => (3.0/8.0, 0.0, 0.0, 5.0/8.0, 1.0, 1.0),\n\n _ => unreachable!(),\n\n }\n\n };\n\n\n\n vec![Aabb3::new(\n\n Point3::new(min_x, min_y, min_z),\n\n Point3::new(max_x, max_y, max_z)\n\n )]\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 74, "score": 114543.95328625417 }, { "content": "/// Inits the gl library. This should be called once a context is ready.\n\npub fn init(vid: & sdl2::VideoSubsystem) {\n\n gl::load_with(|s| vid.gl_get_proc_address(s) as *const _);\n\n}\n\n\n\n/// Dsed to specify how the vertices will be handled\n\n/// to draw.\n\npub type DrawType = u32;\n\n\n\n/// Treats each set of 3 vertices as a triangle\n\npub const TRIANGLES: DrawType = gl::TRIANGLES;\n\n/// Means the previous vertex connects to the next\n\n/// one in a continuous strip.\n\npub const LINE_STRIP: DrawType = gl::LINE_STRIP;\n\n/// Treats each set of 2 vertices as a line\n\npub const LINES: DrawType = gl::LINES;\n\n/// Treats each vertex as a point\n\npub const POINTS: DrawType = gl::POINTS;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 75, "score": 113840.77947861949 }, { "content": "#[test]\n\nfn test_map_odd() {\n\n for size in 1..16 {\n\n let mut map = Map::new(64 * 3, size);\n\n let max = (1 << size) - 1;\n\n for i in 0..64 * 3 {\n\n for j in 0..max {\n\n map.set(i, j);\n\n if map.get(i) != j {\n\n panic!(\"Index: {} wanted {} and got {}\", i, j, map.get(i));\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Map {\n\n pub fn new(len: usize, size: usize) -> Map {\n\n let mut map = Map {\n\n bit_size: size,\n\n length: len,\n", "file_path": "src/types/bit/map.rs", "rank": 76, "score": 112302.5436516313 }, { "content": "#[test]\n\nfn test_color_from() {\n\n let test = Color::from_string(&\"#FF0000\".to_owned());\n\n match test {\n\n Color::RGB(r, g, b) => assert!(r == 255 && g == 0 && b == 0),\n\n _ => panic!(\"Wrong type\"),\n\n }\n\n let test = Color::from_string(&\"#123456\".to_owned());\n\n match test {\n\n Color::RGB(r, g, b) => assert!(r == 0x12 && g == 0x34 && b == 0x56),\n\n _ => panic!(\"Wrong type\"),\n\n }\n\n let test = Color::from_string(&\"red\".to_owned());\n\n match test {\n\n Color::Red => {}\n\n _ => panic!(\"Wrong type\"),\n\n }\n\n let test = Color::from_string(&\"dark_blue\".to_owned());\n\n match test {\n\n Color::DarkBlue => {}\n\n _ => panic!(\"Wrong type\"),\n\n }\n\n}\n\n\n\nconst LEGACY_CHAR: char = '§';\n\n\n", "file_path": "src/format.rs", "rank": 77, "score": 109546.00405022848 }, { "content": "/// Sets the factors to be used when blending.\n\npub fn blend_func(s_factor: Factor, d_factor: Factor) {\n\n unsafe {\n\n gl::BlendFunc(s_factor, d_factor);\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 78, "score": 109295.85591946242 }, { "content": "struct FormatState<'a> {\n\n max_width: f64,\n\n lines: usize,\n\n offset: f64,\n\n width: f64,\n\n text: Vec<Element>,\n\n renderer: &'a render::Renderer,\n\n}\n\n\n\n\n\nimpl <'a> ElementHolder for FormatState<'a> {\n\n fn add(&mut self, el: Element, _: bool) {\n\n self.text.push(el);\n\n }\n\n}\n\n\n\nimpl <'a> FormatState<'a> {\n\n fn build(&mut self, c: &format::Component, color: format::Color) {\n\n match *c {\n\n format::Component::Text(ref txt) => {\n", "file_path": "src/ui/mod.rs", "rank": 79, "score": 108530.45869421761 }, { "content": "fn calculate_biome(snapshot: &world::Snapshot, x: i32, z: i32, img: &image::DynamicImage) -> (u8, u8, u8) {\n\n use std::cmp::{min, max};\n\n let mut count = 0;\n\n let mut r = 0;\n\n let mut g = 0;\n\n let mut b = 0;\n\n for xx in -1 .. 2 {\n\n for zz in -1 .. 2 {\n\n let bi = snapshot.get_biome(x+xx, z+zz);\n\n let color_index = bi.get_color_index();\n\n let ix = color_index & 0xFF;\n\n let iy = color_index >> 8;\n\n\n\n let ix = min(max(ix, 0), 255);\n\n let iy = min(max(iy, 0), 255);\n\n\n\n let col = img.get_pixel(ix as u32, iy as u32);\n\n let col = bi.process_color(col);\n\n r += col.data[0] as u32;\n\n g += col.data[1] as u32;\n\n b += col.data[2] as u32;\n\n count += 1;\n\n }\n\n }\n\n ((r/count) as u8, (g/count) as u8, (b/count) as u8)\n\n}\n\n\n", "file_path": "src/model/mod.rs", "rank": 80, "score": 106375.58874387186 }, { "content": "fn door_data(facing: Direction, half: DoorHalf, hinge: Side, open: bool, powered: bool) -> Option<usize> {\n\n match half {\n\n DoorHalf::Upper => {\n\n if facing == Direction::North && open {\n\n Some(0x8\n\n | (if hinge == Side::Right { 0x1 } else { 0x0 })\n\n | (if powered { 0x2 } else { 0x0 }))\n\n } else {\n\n None\n\n }\n\n },\n\n DoorHalf::Lower => {\n\n if hinge == Side::Left && !powered {\n\n Some(facing.clockwise().horizontal_index() | (if open { 0x4 } else { 0x0 }))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 81, "score": 105761.14709566544 }, { "content": "fn piston_collision(extended: bool, facing: Direction) -> Vec<Aabb3<f64>> {\n\n let (min_x, min_y, min_z, max_x, max_y, max_z) = if extended {\n\n match facing {\n\n Direction::Up => (0.0, 0.0, 0.0, 1.0, 0.75, 1.0),\n\n Direction::Down => (0.0, 0.25, 0.0, 1.0, 1.0, 1.0),\n\n Direction::North => (0.0, 0.0, 0.25, 1.0, 1.0, 1.0),\n\n Direction::South => (0.0, 0.0, 0.0, 1.0, 1.0, 0.75),\n\n Direction::West => (0.25, 0.0, 0.0, 1.0, 1.0, 0.75),\n\n Direction::East => (0.0, 0.0, 0.0, 0.75, 1.0, 1.0),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n (0.0, 0.0, 0.0, 1.0, 1.0, 1.0)\n\n };\n\n\n\n vec![Aabb3::new(\n\n Point3::new(min_x, min_y, min_z),\n\n Point3::new(max_x, max_y, max_z)\n\n )]\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 82, "score": 102643.7159116293 }, { "content": "pub fn create_program(vertex: &str, fragment: &str) -> gl::Program {\n\n let program = gl::Program::new();\n\n\n\n let v = gl::Shader::new(gl::VERTEX_SHADER);\n\n v.set_source(vertex);\n\n v.compile();\n\n\n\n if v.get_parameter(gl::COMPILE_STATUS) == 0 {\n\n println!(\"Src: {}\", vertex);\n\n panic!(\"Shader error: {}\", v.get_info_log());\n\n } else {\n\n let log = v.get_info_log();\n\n let log = log.trim().trim_matches('\\u{0}');\n\n if !log.is_empty() {\n\n println!(\"{}\", log);\n\n }\n\n }\n\n\n\n let f = gl::Shader::new(gl::FRAGMENT_SHADER);\n\n f.set_source(fragment);\n", "file_path": "src/render/shaders.rs", "rank": 83, "score": 101238.78694050272 }, { "content": "#[allow(unused_must_use)]\n\npub fn generate_element_buffer(size: usize) -> (Vec<u8>, gl::Type) {\n\n let mut ty = gl::UNSIGNED_SHORT;\n\n let mut data = if (size / 6) * 4 * 3 >= u16::max_value() as usize {\n\n ty = gl::UNSIGNED_INT;\n\n Vec::with_capacity(size * 4)\n\n } else {\n\n Vec::with_capacity(size * 2)\n\n };\n\n for i in 0..size / 6 {\n\n for val in &[0, 1, 2, 2, 1, 3] {\n\n if ty == gl::UNSIGNED_INT {\n\n data.write_u32::<NativeEndian>((i as u32) * 4 + val);\n\n } else {\n\n data.write_u16::<NativeEndian>((i as u16) * 4 + (*val as u16));\n\n }\n\n }\n\n }\n\n\n\n (data, ty)\n\n}\n", "file_path": "src/render/mod.rs", "rank": 84, "score": 99395.97473759139 }, { "content": "pub fn bind_frag_data_location(p: &Program, cn: u32, name: &str) {\n\n unsafe {\n\n let name_c = ffi::CString::new(name).unwrap();\n\n gl::BindFragDataLocation(p.0, cn, name_c.as_ptr());\n\n }\n\n}\n\n\n", "file_path": "src/gl/mod.rs", "rank": 85, "score": 99391.00606128552 }, { "content": "fn door_collision(facing: Direction, hinge: Side, open: bool) -> Vec<Aabb3<f64>> {\n\n use std::f64::consts::PI;\n\n let mut bounds = Aabb3::new(\n\n Point3::new(0.0, 0.0, 0.0),\n\n Point3::new(1.0, 1.0, 3.0 / 16.0)\n\n );\n\n let mut angle = match facing {\n\n Direction::South => 0.0,\n\n Direction::West => PI * 0.5,\n\n Direction::North => PI,\n\n Direction::East => PI * 1.5,\n\n _ => 0.0,\n\n };\n\n angle += if open {\n\n PI * 0.5\n\n } else {\n\n 0.0\n\n } * match hinge { Side::Left => 1.0, Side::Right => -1.0 };\n\n\n\n let c = angle.cos();\n", "file_path": "blocks/src/lib.rs", "rank": 86, "score": 95617.79744992379 }, { "content": "fn trapdoor_collision(facing: Direction, half: BlockHalf, open: bool) -> Vec<Aabb3<f64>> {\n\n let (min_x, min_y, min_z, max_x, max_y, max_z) = if open {\n\n match facing {\n\n Direction::North => (0.0, 0.0, 3.0/16.0, 1.0, 1.0, 1.0),\n\n Direction::South => (0.0, 0.0, 0.0, 1.0, 1.0, 3.0/16.0),\n\n Direction::West => (3.0/16.0, 0.0, 0.0, 1.0, 1.0, 1.0),\n\n Direction::East => (0.0, 0.0, 0.0, 3.0/16.0, 1.0, 1.0),\n\n _ => unreachable!(),\n\n }\n\n } else {\n\n match half {\n\n BlockHalf::Bottom => (0.0, 0.0, 0.0, 1.0, 3.0/16.0, 1.0),\n\n BlockHalf::Top => (0.0, 3.0/16.0, 0.0, 1.0, 1.0, 1.0),\n\n _ => unreachable!(),\n\n }\n\n };\n\n\n\n vec![Aabb3::new(\n\n Point3::new(min_x, min_y, min_z),\n\n Point3::new(max_x, max_y, max_z))\n\n ]\n\n}\n\n\n", "file_path": "blocks/src/lib.rs", "rank": 87, "score": 93973.60311836156 }, { "content": "fn calculate_relative_teleport(flag: TeleportFlag, flags: u8, base: f64, val: f64) -> f64 {\n\n if (flags & (flag as u8)) == 0 {\n\n val\n\n } else {\n\n base + val\n\n }\n\n}\n", "file_path": "src/server/mod.rs", "rank": 88, "score": 93037.83739087317 }, { "content": "pub fn blend_func_separate(s_factor_rgb: Factor, d_factor_rgb: Factor, s_factor_a: Factor, d_factor_a: Factor) {\n\n unsafe {\n\n gl::BlendFuncSeparate(s_factor_rgb, d_factor_rgb, s_factor_a, d_factor_a);\n\n }\n\n}\n\n\n\n// Face specifies a face to act on.\n\npub type Face = u32;\n\npub const BACK: Face = gl::BACK;\n\npub const FRONT: Face = gl::FRONT;\n\n\n", "file_path": "src/gl/mod.rs", "rank": 89, "score": 89668.6431925087 }, { "content": "#[derive(Clone)]\n\nenum Rule {\n\n Match(String, String),\n\n Or(Vec<Vec<Rule>>),\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Variants {\n\n models: Vec<Model>,\n\n}\n\n\n\nimpl Variants {\n\n fn choose_model<R: Rng>(&self, rng: &mut R) -> &Model {\n\n // TODO: Weighted random\n\n rng.choose(&self.models).unwrap()\n\n }\n\n}\n\n\n", "file_path": "src/model/mod.rs", "rank": 90, "score": 74558.08445320683 }, { "content": "struct Progress {\n\n tasks: Vec<Task>,\n\n}\n\n\n", "file_path": "src/resources.rs", "rank": 91, "score": 74444.07037925693 }, { "content": "struct Task {\n\n task_name: String,\n\n task_file: String,\n\n total: u64,\n\n progress: u64,\n\n}\n\n\n\nunsafe impl Sync for Manager {}\n\n\n\nimpl Manager {\n\n pub fn new() -> (Manager, ManagerUI) {\n\n let mut m = Manager {\n\n packs: Vec::new(),\n\n version: 0,\n\n vanilla_chan: None,\n\n vanilla_assets_chan: None,\n\n vanilla_progress: Arc::new(Mutex::new(Progress {\n\n tasks: vec![],\n\n })),\n\n };\n", "file_path": "src/resources.rs", "rank": 92, "score": 74444.07037925693 }, { "content": "#[derive(Clone, Copy, PartialEq, Eq)]\n\nenum LightType {\n\n Block,\n\n Sky\n\n}\n\n\n\nimpl LightType {\n\n fn get_light(self, world: &World, pos: Position) -> u8 {\n\n match self {\n\n LightType::Block => world.get_block_light(pos),\n\n LightType::Sky => world.get_sky_light(pos),\n\n }\n\n }\n\n fn set_light(self, world: &mut World, pos: Position, light: u8) {\n\n match self {\n\n LightType::Block => world.set_block_light(pos, light),\n\n LightType::Sky => world.set_sky_light(pos, light),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/world/mod.rs", "rank": 93, "score": 72926.07036877141 }, { "content": "#[derive(Debug)]\n\nenum BuiltinType {\n\n False,\n\n Generated,\n\n Entity,\n\n Compass,\n\n Clock\n\n}\n\n\n", "file_path": "src/model/mod.rs", "rank": 94, "score": 72926.07036877141 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum TeleportFlag {\n\n RelX = 0b00001,\n\n RelY = 0b00010,\n\n RelZ = 0b00100,\n\n RelYaw = 0b01000,\n\n RelPitch = 0b10000,\n\n}\n\n\n", "file_path": "src/server/mod.rs", "rank": 95, "score": 72926.07036877141 }, { "content": "struct ProgressUI {\n\n task_name: String,\n\n task_file: String,\n\n position: f64,\n\n closing: bool,\n\n progress: f64,\n\n\n\n background: ui::ImageRef,\n\n progress_bar: ui::ImageRef,\n\n}\n\n\n", "file_path": "src/resources.rs", "rank": 96, "score": 72678.49424872095 }, { "content": "struct Collection {\n\n shader: ModelShader,\n\n\n\n models: HashMap<ModelKey, Model, BuildHasherDefault<FNVHash>>,\n\n blend_s: gl::Factor,\n\n blend_d: gl::Factor,\n\n\n\n next_id: usize,\n\n}\n\n\n\npub struct Model {\n\n // For culling only\n\n pub x: f32,\n\n pub y: f32,\n\n pub z: f32,\n\n pub radius: f32,\n\n // Per a part\n\n pub matrix: Vec<Matrix4<f32>>,\n\n pub colors: Vec<[f32; 4]>,\n\n pub block_light: f32,\n", "file_path": "src/render/model.rs", "rank": 97, "score": 72678.49424872095 }, { "content": "#[derive(Clone)]\n\nstruct Face {\n\n cull_face: Direction,\n\n facing: Direction,\n\n vertices: Vec<BlockVertex>,\n\n vertices_texture: Vec<render::Texture>,\n\n indices: usize,\n\n shade: bool,\n\n tint_index: i32,\n\n}\n\n\n\nimpl Model {\n\n fn join(&mut self, other: &Model) {\n\n self.faces.extend_from_slice(&other.faces);\n\n }\n\n\n\n fn render<W: Write>(&self, factory: &Factory, snapshot: &world::Snapshot, x: i32, y: i32, z: i32, buf: &mut W) -> usize {\n\n let this = snapshot.get_block(x, y, z);\n\n let this_mat = this.get_material();\n\n let mut indices = 0;\n\n\n", "file_path": "src/model/mod.rs", "rank": 98, "score": 72678.49424872095 }, { "content": "#[derive(Clone)]\n\nstruct Model {\n\n faces: Vec<Face>,\n\n ambient_occlusion: bool,\n\n weight: f64,\n\n}\n\n\n", "file_path": "src/model/mod.rs", "rank": 99, "score": 72678.49424872095 } ]
Rust
src/tests.rs
ecstatic-morse/hrtb-logic
579b811bbdaf240e6439238d34dcd37454369a33
use insta::assert_display_snapshot; use crate::{short::*, Formula, Var}; macro_rules! vars { ($($x:ident),*) => { $( const $x: Var = var(stringify!($x).as_bytes()[0] as char); )* } } macro_rules! props { ($($x:ident),*) => { $( const $x: Formula = prop(stringify!($x)); )* } } vars!(A, B, C, D, E, X, Y, Z); props!(P, Q, R, S, T, U, V); #[test] fn nnf() { let nnf = Formula::negation_normal_form; assert_eq!(nnf(!!Q), Q); assert_eq!(nnf(!and(P, Q)), or(!P, !Q)); assert_eq!(nnf(!or(P, Q)), and(!P, !Q)); assert_eq!(nnf(!forall(A, P)), exists(A, !P)); assert_display_snapshot!(nnf(!forall(A, or(P, and(Q, R)))), @"∃'a.(¬P ∧ (¬Q ∨ ¬R))"); } #[test] fn dnf() { let dnf = Formula::disjunctive_normal_form; assert_display_snapshot!(dnf(and(or(P, Q), or(R, S))), @"(P ∧ R) ∨ (P ∧ S) ∨ (Q ∧ R) ∨ (Q ∧ S)"); assert_display_snapshot!(dnf(and(or(and(P, Q), R), S)), @"(P ∧ Q ∧ S) ∨ (R ∧ S)"); assert_display_snapshot!(dnf(and_([or(P, Q), or(R, S), or_([T, U, V])]))); } #[test] fn simp() { let simp = |mut f: Formula| { f.simplify(); f }; let top = || Formula::from(true); let bot = || Formula::from(false); assert_display_snapshot!(simp(and(top(), top())), @"True"); assert_display_snapshot!(simp(and(top(), bot())), @"False"); assert_display_snapshot!(simp(or(bot(), bot())), @"False"); assert_display_snapshot!(simp(or(top(), bot())), @"True"); } #[test] fn qe_atomless() { let _ = Y; let dnf = Formula::disjunctive_normal_form; let qe = |mut f: Formula| { f.eliminate_all_quantifiers(); f.simplify(); f.make_negation_normal_form(); f }; assert_display_snapshot!(qe(forall(B, subeq(B, A))), @"False"); assert_display_snapshot!(qe(forall(A, forall(B, subeq(B, A)))), @"False"); assert_display_snapshot!(qe(exists(B, and(subeq(A, B), subeq(B, C)))), @"'a ⊆ 'c"); assert_display_snapshot!(qe(forall(B, subeq(A, B))), @"'a ⊆ 'static"); assert_display_snapshot!( qe(exists(A, and_([subeq(B, A), subeq(C, A), subeq(A, D), subeq(A, E)]))), @"('b ⊆ 'd) ∧ ('b ⊆ 'e) ∧ ('c ⊆ 'd) ∧ ('c ⊆ 'e)" ); assert_display_snapshot!( qe(forall(A, implies(subeq(A, B), subeq(A, C)))), @"'b ⊆ 'c" ); assert_display_snapshot!( qe(forall(A, implies(and(subeq(A, B), subeq(A, C)), subeq(A, D)))), @"('b ⊆ 'd) ∨ ('c ⊆ 'd)" ); assert_display_snapshot!( qe(forall(A, implies(or(subeq(A, B), subeq(A, C)), subeq(A, D)))), @"('b ⊆ 'd) ∧ ('c ⊆ 'd)" ); assert_display_snapshot!( dnf(qe(forall(A, iff(or(subeq(A, B), subeq(A, C)), subeq(A, D))))), @"(('d ⊆ 'b) ∧ ('b ⊆ 'd) ∧ ('c ⊆ 'd)) ∨ (('d ⊆ 'c) ∧ ('b ⊆ 'd) ∧ ('c ⊆ 'd))" ); assert_display_snapshot!( qe(exists(Z, forall(X, iff(subeq(X, Z), and(subeq(X, A), subeq(X, B)))))), @"('a ⊆ 'b) ∨ ('b ⊆ 'a)" ); assert_display_snapshot!( qe(forall(A, forall(C, forall(B, implies(and(subeq(A, B), subeq(B, C)), subeq(A, C)))))), @"True" ); assert_display_snapshot!( qe(forall(A, forall(C, implies(subne(A, C), exists(B, and(subne(A, B), subne(B, C))))))), @"True" ); assert_display_snapshot!( qe(forall(A, forall(B, or(subeq(A, B), !subeq(A, B))))), @"True" ); assert_display_snapshot!( qe(forall(A, forall(B, or(subeq(A, B), subeq(B, A))))), @"False" ); assert_display_snapshot!( qe(forall(A, forall(B, or(!subeq(A, B), !subeq(B, A))))), @"False" ); assert_display_snapshot!( qe(exists(Z, and(!subeq(A, Z), !subeq(Z, B)))), @"¬('a ⊆ 'static)" ); } #[test] fn is_dnf() { let is_dnf = |f: Formula| f.is_disjunctive_normal_form(); assert!(!is_dnf(and(or(P, Q), or(R, S)))); assert!(is_dnf(or(and(P, Q), and(R, S)))); } #[test] fn has_nested_connective() { let has_nested = |f: Formula| f.has_nested_connective(); assert!(!has_nested(and(or(P, Q), or(R, S)))); assert!(has_nested(or(and(P, Q), or(R, S)))); } mod prop { use super::*; use proptest::prelude::*; const PROPS: &'static str = "PQRSTUVWYZ"; impl Arbitrary for Formula { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with((): Self::Parameters) -> Self::Strategy { let mut props: Vec<_> = (0..PROPS.len()) .map(|i| PROPS.get(i..=i).unwrap()) .map(prop) .collect(); props.push(true.into()); props.push(false.into()); let leaf = proptest::sample::select(props).no_shrink(); leaf.prop_recursive(6, 64, 10, |inner| { prop_oneof![ inner.clone().prop_map(|form| not(form)), proptest::collection::vec(inner.clone(), 2..6).prop_map(Formula::Or), proptest::collection::vec(inner.clone(), 2..6).prop_map(Formula::And), ] }) .boxed() } } proptest! { #[test] fn nnf(form in any::<Formula>()) { let nnf = form.negation_normal_form(); prop_assert!(nnf.is_negation_normal_form()); } #[test] fn dnf(form in any::<Formula>()) { let dnf = form.disjunctive_normal_form(); prop_assert!(dnf.is_disjunctive_normal_form()); } #[test] fn trivial(mut form in any::<Formula>()) { form.simplify_all_trivial_connectives(); prop_assert!(!form.has_trivial_connectives()); } } }
use insta::assert_display_snapshot; use crate::{short::*, Formula, Var}; macro_rules! vars { ($($x:ident),*) => { $( const $x: Var = var(stringify!($x).as_bytes()[0] as char); )* } } macro_rules! props { ($($x:ident),*) => { $( const $x: Formula = prop(stringify!($x)); )* } } vars!(A, B, C, D, E, X, Y, Z); props!(P, Q, R, S, T, U, V); #[test] fn nnf() { let nnf = Formula::negation_normal_form; assert_eq!(nnf(!!Q), Q); assert_eq!(nnf(!and(P, Q)), or(!P, !Q)); assert_eq!(nnf(!or(P, Q)), and(!P, !Q)); assert_eq!(nnf(!forall(A, P)), exists(A, !P)); assert_display_snapshot!(nnf(!forall(A, or(P, and(Q, R)))), @"∃'a.(¬P ∧ (¬Q ∨ ¬R))"); } #[test] fn dnf() { let dnf = Formula::disjunctive_normal_form; assert_display_snapshot!(dnf(and(or(P, Q), or(R, S))), @"(P ∧ R) ∨ (P ∧ S) ∨ (Q ∧ R) ∨ (Q ∧ S)"); assert_display_snapshot!(dnf(and(or(and(P, Q), R), S)), @"(P ∧ Q ∧ S) ∨ (R ∧ S)"); assert_display_snapshot!(dnf(and_([or(P, Q), or(R, S), or_([T, U, V])]))); } #[test] fn simp() { let simp = |mut f: Formula| { f.simplify(); f }; let top = || Formula::from(true); let bot = || Formula::fro
#[test] fn qe_atomless() { let _ = Y; let dnf = Formula::disjunctive_normal_form; let qe = |mut f: Formula| { f.eliminate_all_quantifiers(); f.simplify(); f.make_negation_normal_form(); f }; assert_display_snapshot!(qe(forall(B, subeq(B, A))), @"False"); assert_display_snapshot!(qe(forall(A, forall(B, subeq(B, A)))), @"False"); assert_display_snapshot!(qe(exists(B, and(subeq(A, B), subeq(B, C)))), @"'a ⊆ 'c"); assert_display_snapshot!(qe(forall(B, subeq(A, B))), @"'a ⊆ 'static"); assert_display_snapshot!( qe(exists(A, and_([subeq(B, A), subeq(C, A), subeq(A, D), subeq(A, E)]))), @"('b ⊆ 'd) ∧ ('b ⊆ 'e) ∧ ('c ⊆ 'd) ∧ ('c ⊆ 'e)" ); assert_display_snapshot!( qe(forall(A, implies(subeq(A, B), subeq(A, C)))), @"'b ⊆ 'c" ); assert_display_snapshot!( qe(forall(A, implies(and(subeq(A, B), subeq(A, C)), subeq(A, D)))), @"('b ⊆ 'd) ∨ ('c ⊆ 'd)" ); assert_display_snapshot!( qe(forall(A, implies(or(subeq(A, B), subeq(A, C)), subeq(A, D)))), @"('b ⊆ 'd) ∧ ('c ⊆ 'd)" ); assert_display_snapshot!( dnf(qe(forall(A, iff(or(subeq(A, B), subeq(A, C)), subeq(A, D))))), @"(('d ⊆ 'b) ∧ ('b ⊆ 'd) ∧ ('c ⊆ 'd)) ∨ (('d ⊆ 'c) ∧ ('b ⊆ 'd) ∧ ('c ⊆ 'd))" ); assert_display_snapshot!( qe(exists(Z, forall(X, iff(subeq(X, Z), and(subeq(X, A), subeq(X, B)))))), @"('a ⊆ 'b) ∨ ('b ⊆ 'a)" ); assert_display_snapshot!( qe(forall(A, forall(C, forall(B, implies(and(subeq(A, B), subeq(B, C)), subeq(A, C)))))), @"True" ); assert_display_snapshot!( qe(forall(A, forall(C, implies(subne(A, C), exists(B, and(subne(A, B), subne(B, C))))))), @"True" ); assert_display_snapshot!( qe(forall(A, forall(B, or(subeq(A, B), !subeq(A, B))))), @"True" ); assert_display_snapshot!( qe(forall(A, forall(B, or(subeq(A, B), subeq(B, A))))), @"False" ); assert_display_snapshot!( qe(forall(A, forall(B, or(!subeq(A, B), !subeq(B, A))))), @"False" ); assert_display_snapshot!( qe(exists(Z, and(!subeq(A, Z), !subeq(Z, B)))), @"¬('a ⊆ 'static)" ); } #[test] fn is_dnf() { let is_dnf = |f: Formula| f.is_disjunctive_normal_form(); assert!(!is_dnf(and(or(P, Q), or(R, S)))); assert!(is_dnf(or(and(P, Q), and(R, S)))); } #[test] fn has_nested_connective() { let has_nested = |f: Formula| f.has_nested_connective(); assert!(!has_nested(and(or(P, Q), or(R, S)))); assert!(has_nested(or(and(P, Q), or(R, S)))); } mod prop { use super::*; use proptest::prelude::*; const PROPS: &'static str = "PQRSTUVWYZ"; impl Arbitrary for Formula { type Parameters = (); type Strategy = BoxedStrategy<Self>; fn arbitrary_with((): Self::Parameters) -> Self::Strategy { let mut props: Vec<_> = (0..PROPS.len()) .map(|i| PROPS.get(i..=i).unwrap()) .map(prop) .collect(); props.push(true.into()); props.push(false.into()); let leaf = proptest::sample::select(props).no_shrink(); leaf.prop_recursive(6, 64, 10, |inner| { prop_oneof![ inner.clone().prop_map(|form| not(form)), proptest::collection::vec(inner.clone(), 2..6).prop_map(Formula::Or), proptest::collection::vec(inner.clone(), 2..6).prop_map(Formula::And), ] }) .boxed() } } proptest! { #[test] fn nnf(form in any::<Formula>()) { let nnf = form.negation_normal_form(); prop_assert!(nnf.is_negation_normal_form()); } #[test] fn dnf(form in any::<Formula>()) { let dnf = form.disjunctive_normal_form(); prop_assert!(dnf.is_disjunctive_normal_form()); } #[test] fn trivial(mut form in any::<Formula>()) { form.simplify_all_trivial_connectives(); prop_assert!(!form.has_trivial_connectives()); } } }
m(false); assert_display_snapshot!(simp(and(top(), top())), @"True"); assert_display_snapshot!(simp(and(top(), bot())), @"False"); assert_display_snapshot!(simp(or(bot(), bot())), @"False"); assert_display_snapshot!(simp(or(top(), bot())), @"True"); }
function_block-function_prefixed
[ { "content": "pub fn exists(var: Var, form: Formula) -> Formula {\n\n Formula::Bind(QuantifierKind::Exists, var, P::new(form))\n\n}\n\n\n\npub const fn subeq(sub: Var, sup: Var) -> Formula {\n\n Formula::SubsetEq { sub, sup }\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 0, "score": 94200.57249840398 }, { "content": "pub fn forall(var: Var, form: Formula) -> Formula {\n\n Formula::Bind(QuantifierKind::ForAll, var, P::new(form))\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 1, "score": 94200.57249840398 }, { "content": "pub fn empty(var: Var) -> Formula {\n\n subeq(var, Var::EMPTY)\n\n}\n\n\n\npub const fn prop(s: &'static str) -> Formula {\n\n Formula::Prop(s)\n\n}\n", "file_path": "src/short.rs", "rank": 5, "score": 91950.62452611043 }, { "content": "/// a ⊂ b, expressed as a ⊆ b ∧ ¬(a ⊆ b ∧ b ⊆ a).\n\npub fn subne(sub: Var, sup: Var) -> Formula {\n\n and(subeq(sub, sup), !and(subeq(sub, sup), subeq(sup, sub)))\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 6, "score": 82832.39799242547 }, { "content": "pub fn and(a: Formula, b: Formula) -> Formula {\n\n Formula::And(vec![a, b])\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 7, "score": 78272.1653569203 }, { "content": "pub fn or(a: Formula, b: Formula) -> Formula {\n\n Formula::Or(vec![a, b])\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 8, "score": 78272.1653569203 }, { "content": "pub fn iff(a: Formula, b: Formula) -> Formula {\n\n and(implies(a.clone(), b.clone()), implies(b, a))\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 9, "score": 76524.90401844 }, { "content": "pub fn not(form: Formula) -> Formula {\n\n if let Formula::Trivial(sat) = form {\n\n return (!sat).into();\n\n }\n\n\n\n Formula::Not(P::new(form))\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 11, "score": 64434.45741498272 }, { "content": "pub fn implies(antecedent: Formula, consequent: Formula) -> Formula {\n\n or(not(antecedent), consequent)\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 12, "score": 63398.282819376866 }, { "content": "pub fn and_(forms: impl IntoIterator<Item = Formula>) -> Formula {\n\n Formula::And(forms.into_iter().collect())\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 13, "score": 55664.02881301531 }, { "content": "pub fn or_(forms: impl IntoIterator<Item = Formula>) -> Formula {\n\n Formula::Or(forms.into_iter().collect())\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 14, "score": 55664.02881301531 }, { "content": "struct HasVar(Var);\n\n\n\nimpl Visit for HasVar {\n\n type Break = FoundVar;\n\n\n\n fn visit_var(&mut self, var: &Var) -> Flow<Self::Break> {\n\n if *var == self.0 {\n\n Break(FoundVar)\n\n } else {\n\n Continue(())\n\n }\n\n }\n\n}\n", "file_path": "src/form.rs", "rank": 17, "score": 42725.33206946455 }, { "content": "type P<T> = Box<T>;\n", "file_path": "src/lib.rs", "rank": 18, "score": 31804.933710245998 }, { "content": "use std::mem;\n\nuse std::ops::ControlFlow::{Break, Continue};\n\n\n\nuse crate::{short::*, Formula};\n\n\n\nimpl Formula {\n\n pub fn negation_normal_form(mut self) -> Self {\n\n self.make_negation_normal_form();\n\n self\n\n }\n\n\n\n pub fn make_negation_normal_form(&mut self) {\n\n self.make_nnf_(false);\n\n }\n\n\n\n fn make_nnf_(&mut self, invert: bool) {\n\n match self {\n\n Self::Trivial(sat) => {\n\n if invert {\n\n *sat = !*sat;\n", "file_path": "src/nnf.rs", "rank": 19, "score": 23561.212053838935 }, { "content": " }\n\n }\n\n\n\n Self::Prop(..) | Self::SubsetEq { .. } => {\n\n if invert {\n\n *self = not(self.clone());\n\n }\n\n }\n\n\n\n Self::Not(form) => {\n\n form.make_nnf_(!invert);\n\n *self = form.take()\n\n }\n\n\n\n Self::And(list) => {\n\n list.iter_mut().for_each(|form| form.make_nnf_(invert));\n\n if invert {\n\n *self = Self::Or(mem::take(list));\n\n }\n\n }\n", "file_path": "src/nnf.rs", "rank": 20, "score": 23558.851047355725 }, { "content": " Self::Or(list) => {\n\n list.iter_mut().for_each(|form| form.make_nnf_(invert));\n\n if invert {\n\n *self = Self::And(mem::take(list));\n\n }\n\n }\n\n\n\n Self::Bind(kind, _, form) => {\n\n if invert {\n\n *kind = kind.dual();\n\n }\n\n\n\n form.make_nnf_(invert)\n\n }\n\n }\n\n }\n\n\n\n /// Returns `true` if this formula is in negation normal form, i.e. if it has only atomic\n\n /// formulas inside a `Not`.\n\n pub fn is_negation_normal_form(&self) -> bool {\n", "file_path": "src/nnf.rs", "rank": 21, "score": 23556.410389221455 }, { "content": " let inside_negation = false;\n\n self.visit_with_stateful(inside_negation, |form, &in_neg| {\n\n if in_neg && !form.is_atomic() {\n\n return Break(());\n\n }\n\n\n\n Continue(form.is_not())\n\n })\n\n .is_continue()\n\n }\n\n}\n", "file_path": "src/nnf.rs", "rank": 22, "score": 23552.621472675488 }, { "content": "#[allow(unused_imports)]\n\nuse std::ops::ControlFlow::{self as Flow, Break, Continue};\n\n\n\nuse crate::{Formula, Var};\n\n\n\nimpl Formula {\n\n /// Rewrites a connective to remove boolean constants.\n\n ///\n\n /// Repeatedly applies the following identities:\n\n ///\n\n /// - ¬⊥ → ⊤\n\n /// - ¬⊤ → ⊥\n\n /// - ⊥ ∨ P → P\n\n /// - ⊤ ∧ P → P\n\n /// - ⊤ ∨ P → ⊤\n\n /// - ⊥ ∧ P → ⊥\n\n ///\n\n /// Then, if only a single formula remains in a join, replace the join with that formula.\n\n pub fn simplify_trivial_connective(&mut self) {\n\n let (l, short_circ) = match self {\n", "file_path": "src/simp.rs", "rank": 23, "score": 23402.354494606672 }, { "content": "use std::fmt;\n\n\n\n/// A free or bound variable.\n\n#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Var(pub(crate) u32);\n\n\n\nimpl Var {\n\n /// The empty set, `'static`.\n\n pub const EMPTY: Self = Var(u32::MAX);\n\n}\n\n\n\nimpl fmt::Debug for Var {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n if *self == Var::EMPTY {\n\n return write!(f, \"'static\");\n\n }\n\n\n\n if self.0 <= u32::from(b'z' - b'a') {\n\n write!(f, \"'{}\", char::from(b'a' + self.0 as u8))\n\n } else {\n\n write!(f, \"_{}\", self.0)\n\n }\n\n }\n\n}\n", "file_path": "src/var.rs", "rank": 24, "score": 23399.707178092114 }, { "content": " Formula::Not(box Formula::Trivial(sat)) => {\n\n *self = (!*sat).into();\n\n return;\n\n }\n\n\n\n Formula::Or(l) => (l, true),\n\n Formula::And(l) => (l, false),\n\n\n\n _ => return,\n\n };\n\n\n\n // (⊥ ∨ P → P) ∧ (⊤ ∧ P → P)\n\n l.retain(|f| !matches!(f, Formula::Trivial(sat) if *sat != short_circ));\n\n\n\n // (⊤ ∨ P → ⊤) ∧ (⊥ ∧ P → ⊥)\n\n if l.iter()\n\n .any(|f| matches!(f, Formula::Trivial(sat) if *sat == short_circ))\n\n {\n\n *self = short_circ.into();\n\n return;\n", "file_path": "src/simp.rs", "rank": 25, "score": 23398.86631905513 }, { "content": " /// - ∅ ⊆ x → ⊤\n\n /// - x ⊆ x → ⊤\n\n pub fn simplify_subset_identity(&mut self) {\n\n #[rustfmt::skip]\n\n let new = match self {\n\n Formula::SubsetEq { sub: Var::EMPTY, sup: _ } => true.into(),\n\n Formula::SubsetEq { sub, sup } if sub == sup => true.into(),\n\n\n\n _ => return,\n\n };\n\n\n\n *self = new\n\n }\n\n\n\n pub fn simplify_all_subset_identities(&mut self) {\n\n let _: Flow<()> = self.visit_with_pre_mut(|form| {\n\n form.simplify_subset_identity();\n\n Continue(())\n\n });\n\n }\n", "file_path": "src/simp.rs", "rank": 26, "score": 23398.617349988 }, { "content": " Continue(())\n\n });\n\n }\n\n\n\n /// Returns `true` if this formula contains a literal false or true as part of a connective.\n\n #[cfg(test)]\n\n pub(crate) fn has_trivial_connectives(&mut self) -> bool {\n\n self.visit_with_pre(|form| match form {\n\n Formula::Or(l) | Formula::And(l) if l.iter().any(Formula::is_trivial) => Break(()),\n\n Formula::Not(box Formula::Trivial(_)) => Break(()),\n\n\n\n _ => Continue(()),\n\n })\n\n .is_break()\n\n }\n\n\n\n /// Replaces trivial subset relations with their truth values.\n\n ///\n\n /// Applies the following identities:\n\n ///\n", "file_path": "src/simp.rs", "rank": 27, "score": 23398.16817835735 }, { "content": "\n\n pub fn simplify(&mut self) {\n\n let _: Flow<()> = self.visit_with_post_mut(|form| {\n\n // NOTE: Order matters.\n\n form.simplify_subset_identity();\n\n form.simplify_trivial_connective();\n\n\n\n Continue(())\n\n });\n\n }\n\n}\n", "file_path": "src/simp.rs", "rank": 28, "score": 23395.50024915801 }, { "content": " }\n\n\n\n match l.len() {\n\n // If all formulas in the join were trivial but none short-circuited, the `retain`\n\n // above will remove them all. In that case, replace `self` with the appropriate\n\n // truth value.\n\n //\n\n // ⊤ ∧ ⊤ → ⊤\n\n // ⊥ ∨ ⊥ → ⊥\n\n 0 => *self = (!short_circ).into(),\n\n\n\n 1 => *self = l.pop().unwrap(),\n\n _ => {}\n\n }\n\n }\n\n\n\n /// Recursively rewrites a formula to remove any boolean constants inside a connective.\n\n pub fn simplify_all_trivial_connectives(&mut self) {\n\n let _: Flow<()> = self.visit_with_post_mut(|form| {\n\n form.simplify_trivial_connective();\n", "file_path": "src/simp.rs", "rank": 29, "score": 23394.986384230524 }, { "content": " Self::Trivial(_) | Self::Prop(..) | Self::SubsetEq { .. } => {}\n\n\n\n // No need to recurse into a negation. We're already in NNF.\n\n Self::Not(form) => debug_assert!(form.is_atomic()),\n\n\n\n Self::Or(list) => {\n\n list.iter_mut().for_each(Formula::make_dnf_);\n\n flatten_connective!(list, Or); // ((a ∨ b) ∨ c) → (a ∨ b ∨ c)\n\n }\n\n\n\n Self::And(conj) => {\n\n conj.iter_mut().for_each(Formula::make_dnf_);\n\n flatten_connective!(conj, And); // ((a ∧ b) ∧ c) → (a ∧ b ∧ c)\n\n\n\n // If we don't have any OR clauses, there's nothing to distribute. We're done.\n\n if !conj.iter().any(Formula::is_or) {\n\n return;\n\n }\n\n\n\n // Separate atomic formulas (and their negations) from ORs. After this, each\n", "file_path": "src/dnf.rs", "rank": 30, "score": 23320.205888336888 }, { "content": "use std::mem;\n\nuse std::ops::ControlFlow::{Break, Continue};\n\n\n\nuse contracts::*;\n\nuse itertools::Itertools;\n\n\n\nuse crate::{Formula, List};\n\n\n\nimpl Formula {\n\n pub fn disjunctive_normal_form(mut self) -> Self {\n\n self.make_disjunctive_normal_form();\n\n self\n\n }\n\n\n\n #[debug_requires(!self.has_quantifiers())]\n\n pub fn make_disjunctive_normal_form(&mut self) {\n\n self.make_negation_normal_form();\n\n self.make_dnf_();\n\n }\n\n\n", "file_path": "src/dnf.rs", "rank": 31, "score": 23319.90163691705 }, { "content": " let new_depth = match form {\n\n | Formula::Prop(_)\n\n | Formula::Trivial(_)\n\n | Formula::SubsetEq { .. }\n\n => return Continue(depth),\n\n\n\n Formula::Bind(..) => return Break(NotDnf),\n\n\n\n Formula::Not(_) => Depth::Not,\n\n Formula::And(_) => Depth::And,\n\n Formula::Or(_) => Depth::Or,\n\n };\n\n\n\n // If we want to allow nested connectives, this could be `<`.\n\n if new_depth <= depth {\n\n return Break(NotDnf);\n\n }\n\n\n\n Continue(new_depth)\n\n })\n\n .is_continue()\n\n }\n\n}\n", "file_path": "src/dnf.rs", "rank": 32, "score": 23318.42769905264 }, { "content": " *self = Self::Or(disj);\n\n }\n\n\n\n Self::Bind(..) => panic!(\"formula is not quantifier-free\"),\n\n }\n\n }\n\n\n\n pub fn is_disjunctive_normal_form(&self) -> bool {\n\n struct NotDnf;\n\n\n\n #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\n pub enum Depth {\n\n Top,\n\n Or,\n\n And,\n\n Not,\n\n }\n\n\n\n self.visit_with_stateful(Depth::Top, |form, &depth| {\n\n #[rustfmt::skip]\n", "file_path": "src/dnf.rs", "rank": 33, "score": 23315.905862198088 }, { "content": " pub fn make_dnf_(&mut self) {\n\n // Flattens one level of nested connectives, e.g. `And([And(...), ...])`\n\n macro_rules! flatten_connective {\n\n ($list:expr, $Kind:ident) => {{\n\n let len = $list.len();\n\n for i in 0..len {\n\n let form = &mut $list[i];\n\n if let Self::$Kind(inner) = form {\n\n let mut inner = mem::take(inner).into_iter();\n\n\n\n if let Some(head) = inner.next() {\n\n *form = head;\n\n $list.extend(inner);\n\n }\n\n }\n\n }\n\n }};\n\n }\n\n\n\n match self {\n", "file_path": "src/dnf.rs", "rank": 34, "score": 23315.682949433954 }, { "content": " // formula in `and_clauses` is an OR. We'll handle these separately.\n\n let atomics: List<_> = conj.drain_filter(|f| f.is_atomic() || f.is_not()).collect();\n\n debug_assert!(conj.iter().all(Formula::is_or));\n\n\n\n let disj: List<_> = conj\n\n .iter()\n\n .map(|or| {\n\n unwrap!(let Self::Or(list) = or);\n\n list.iter().cloned()\n\n })\n\n .multi_cartesian_product()\n\n .map(|mut list| {\n\n flatten_connective!(list, And);\n\n\n\n // Don't forget the atomics from the original conjunction.\n\n list.extend(atomics.iter().cloned());\n\n Formula::And(list)\n\n })\n\n .collect();\n\n\n", "file_path": "src/dnf.rs", "rank": 35, "score": 23314.66935832389 }, { "content": "struct FoundVar;\n", "file_path": "src/form.rs", "rank": 42, "score": 20502.912101111087 }, { "content": " impl<F, R> $Anon<$Cont<R, ()>, F>\n\n where F: FnMut(& $($mt)? Formula, &mut ()) -> Flow<R>\n\n {\n\n pub fn post(post: F) -> Self {\n\n Self { pre: $cont, post, state: () }\n\n }\n\n }\n\n\n\n impl<R, A, B, S> $Visit for $Anon<A, B, S>\n\n where A: FnMut(& $($mt)? Formula, &mut S) -> Flow<R>,\n\n B: FnMut(& $($mt)? Formula, &mut S) -> Flow<R>,\n\n {\n\n type Break = R;\n\n\n\n fn visit_formula(&mut self, form: & $($mt)? Formula) -> Flow<Self::Break> {\n\n (self.pre)(form, &mut self.state)?;\n\n self.super_formula(form)?;\n\n (self.post)(form, &mut self.state)\n\n }\n\n }\n\n };\n\n}\n\n\n\nmake_visitor!([]);\n\nmake_visitor!([mut]);\n", "file_path": "src/visit.rs", "rank": 43, "score": 14.665660153300053 }, { "content": " }\n\n\n\n type $Cont<R, S> = fn(& $($mt)? Formula, &mut S) -> Flow<R>;\n\n fn $cont<R, S>(_: & $($mt)? Formula, _: &mut S) -> Flow<R> { Continue(()) }\n\n\n\n /// An anonymous `Formula` visitor.\n\n pub struct $Anon<A, B, S = ()> {\n\n pub pre: A,\n\n pub post: B,\n\n pub state: S,\n\n }\n\n\n\n impl<F, R> $Anon<F, $Cont<R, ()>>\n\n where F: FnMut(& $($mt)? Formula, &mut ()) -> Flow<R>\n\n {\n\n pub fn pre(pre: F) -> Self {\n\n Self { pre, post: $cont, state: () }\n\n }\n\n }\n\n\n", "file_path": "src/visit.rs", "rank": 44, "score": 13.094275088004942 }, { "content": "#![feature(drain_filter, box_patterns, let_else, control_flow_enum)]\n\n\n\n#[macro_use]\n\nmod util;\n\n\n\nmod dnf;\n\nmod form;\n\nmod nnf;\n\nmod qe;\n\npub mod short;\n\nmod simp;\n\n#[cfg(test)]\n\nmod tests;\n\nmod var;\n\npub mod visit;\n\n\n\npub use form::{Formula, QuantifierKind};\n\npub use var::Var;\n\n\n", "file_path": "src/lib.rs", "rank": 45, "score": 11.873061052012936 }, { "content": "\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum Formula {\n\n /// An opaque proposition, used for testing.\n\n Prop(&'static str),\n\n\n\n Trivial(bool),\n\n Not(P<Formula>),\n\n And(List<Formula>),\n\n Or(List<Formula>),\n\n\n\n /// An existential or universal quantifier over some variable.\n\n Bind(QuantifierKind, Var, P<Formula>),\n\n\n\n SubsetEq {\n\n sub: Var,\n\n sup: Var,\n\n },\n\n}\n\n\n", "file_path": "src/form.rs", "rank": 46, "score": 11.380347623808778 }, { "content": "//! Terse constructors for various `Formula`s.\n\n\n\nuse crate::{Formula, QuantifierKind, Var, P};\n\n\n\npub const fn var(c: char) -> Var {\n\n let c = c.to_ascii_lowercase();\n\n assert!(c.is_ascii_lowercase());\n\n\n\n let x = c as u32 - 'a' as u32;\n\n Var(x)\n\n}\n\n\n", "file_path": "src/short.rs", "rank": 47, "score": 10.884283658455283 }, { "content": " pub fn visit_with_pre<R>(&self, mut f: impl FnMut(&Self) -> Flow<R>) -> Flow<R> {\n\n AnonFormVisitor::pre(|x: &Self, _: &mut ()| f(x)).visit_formula(self)\n\n }\n\n\n\n pub fn visit_with_pre_mut<R>(&mut self, mut f: impl FnMut(&mut Self) -> Flow<R>) -> Flow<R> {\n\n AnonFormVisitorMut::pre(|x: &mut Self, _| f(x)).visit_formula(self)\n\n }\n\n\n\n pub fn visit_with_post<R>(&self, mut f: impl FnMut(&Self) -> Flow<R>) -> Flow<R> {\n\n AnonFormVisitor::post(|x: &Self, _| f(x)).visit_formula(self)\n\n }\n\n\n\n pub fn visit_with_post_mut<R>(&mut self, mut f: impl FnMut(&mut Self) -> Flow<R>) -> Flow<R> {\n\n AnonFormVisitorMut::post(|x: &mut Self, _| f(x)).visit_formula(self)\n\n }\n\n\n\n pub fn visit_with_stateful<S, R>(\n\n &self,\n\n initial_state: S,\n\n mut f: impl FnMut(&Self, &S) -> Flow<R, S>,\n", "file_path": "src/form.rs", "rank": 48, "score": 10.022543770912462 }, { "content": " };\n\n\n\n ($Visit:ident $Anon:ident $Cont:ident $cont:ident [$($mt:tt)?]) => {\n\n pub trait $Visit {\n\n type Break;\n\n\n\n fn visit_formula(&mut self, form: & $($mt)? Formula) -> Flow<Self::Break> {\n\n self.super_formula(form)\n\n }\n\n\n\n fn visit_var(&mut self, _var: & $($mt)? Var) -> Flow<Self::Break> {\n\n Continue(())\n\n }\n\n\n\n fn super_formula(&mut self, form: & $($mt)? Formula) -> Flow<Self::Break> {\n\n match form {\n\n | Formula::Trivial(_)\n\n | Formula::Prop(_)\n\n => Continue(()),\n\n\n", "file_path": "src/visit.rs", "rank": 49, "score": 8.398229176321578 }, { "content": " pub fn visit_with_stateful_mut<S, R>(\n\n &mut self,\n\n initial_state: S,\n\n mut f: impl FnMut(&mut Self, &S) -> Flow<R, S>,\n\n ) -> Flow<R, ()> {\n\n let pre = |form: &mut Formula, stack: &mut Vec<S>| {\n\n let new_state: S = f(form, stack.last().unwrap())?;\n\n stack.push(new_state);\n\n Continue(())\n\n };\n\n\n\n let post = |_form: &mut Formula, stack: &mut Vec<S>| {\n\n stack.pop();\n\n Continue(())\n\n };\n\n\n\n AnonFormVisitorMut {\n\n pre,\n\n post,\n\n state: vec![initial_state],\n", "file_path": "src/form.rs", "rank": 50, "score": 7.816794404834856 }, { "content": "//! Quantifier elimination for an atomless Boolean algebra with order constraints.\n\n//!\n\n//! Thanks to Mario Carneiro for formalizing this approach and providing us with a simple QE\n\n//! algorithm.\n\n\n\nuse crate::{short::*, Formula, Var};\n\nuse contracts::*;\n\nuse itertools::Itertools;\n\nuse std::ops::ControlFlow::{self as Flow, Continue};\n\n\n\nimpl Formula {\n\n pub fn eliminate_all_quantifiers(&mut self) {\n\n let _: Flow<()> = self.visit_with_post_mut(|form| {\n\n if form.is_bind() {\n\n form.eliminate_quantifier();\n\n }\n\n\n\n Continue(())\n\n });\n\n }\n", "file_path": "src/qe/atomless.rs", "rank": 51, "score": 7.437104538386899 }, { "content": "\n\n #[requires(self.is_bind())]\n\n fn eliminate_quantifier(&mut self) {\n\n unwrap!(let Formula::Bind(kind, var, box mut form) = self.take());\n\n\n\n debug_assert!(!form.has_quantifiers());\n\n\n\n // ∀a.P ↔ ¬∃a.¬P\n\n // ^\n\n if kind.is_forall() {\n\n form = !form;\n\n }\n\n\n\n form.simplify();\n\n form.make_disjunctive_normal_form();\n\n\n\n match &mut form {\n\n Formula::Or(l) => l.iter_mut().for_each(|f| f.eliminate_existential(var)),\n\n _ => form.eliminate_existential(var),\n\n };\n", "file_path": "src/qe/atomless.rs", "rank": 52, "score": 6.88610430170691 }, { "content": "\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl Formula {\n\n pub fn take(&mut self) -> Self {\n\n mem::replace(self, prop(\"DUMMY\"))\n\n }\n\n\n\n #[rustfmt::skip]\n\n pub fn is_atomic(&self) -> bool {\n\n match self {\n\n | Formula::Trivial(_)\n\n | Formula::Prop(_)\n\n | Formula::SubsetEq { .. }\n\n => true,\n\n\n", "file_path": "src/form.rs", "rank": 53, "score": 6.665346659523393 }, { "content": "use std::ops::ControlFlow::{self as Flow, Break, Continue};\n\nuse std::{fmt, mem, ops};\n\n\n\nuse crate::visit::{AnonFormVisitor, AnonFormVisitorMut, Visit, VisitMut};\n\nuse crate::{short::*, List, Var, P};\n\n\n\n/// A quantifier, either \"for all\", or \"there exists\".\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum QuantifierKind {\n\n ForAll,\n\n Exists,\n\n}\n\n\n\nimpl fmt::Display for QuantifierKind {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let s = match self {\n\n Self::ForAll => \"∀\",\n\n Self::Exists => \"∃\",\n\n };\n\n\n", "file_path": "src/form.rs", "rank": 54, "score": 6.654640440885482 }, { "content": "//! Recursive iteration over `Formula`s using the visitor pattern.\n\n\n\n#[allow(unused_imports)]\n\nuse std::ops::ControlFlow::{self as Flow, Break, Continue};\n\n\n\nuse paste::paste;\n\n\n\nuse crate::{Formula, Var};\n\n\n\nmacro_rules! make_visitor {\n\n ([$($mt:tt)?]) => {\n\n paste! {\n\n make_visitor!(\n\n [<Visit $($mt:camel)?>]\n\n [<AnonFormVisitor $($mt:camel)?>]\n\n [<Cont $($mt:camel)?>]\n\n [<cont $(_ $mt)?>]\n\n [$($mt)?]\n\n );\n\n }\n", "file_path": "src/visit.rs", "rank": 55, "score": 5.7771393986254145 }, { "content": "\n\n // ∀a.P ↔ ¬∃a.¬P\n\n // ^\n\n if kind.is_forall() {\n\n form = !form\n\n }\n\n\n\n *self = form;\n\n }\n\n\n\n fn eliminate_existential(&mut self, exst: Var) {\n\n // Call the existentially quantified variable `z`.\n\n let z = exst;\n\n\n\n // Mario Carneiro's QE algorithm transforms an existentially quantified statement like this:\n\n //\n\n // ∃z, ⋀ᵢ aᵢ ⊆ z ∧ ⋀ᵢ z ⊆ bᵢ ∧ ⋀ᵢ ¬(cᵢ ⊆ z) ∧ ⋀ᵢ ¬(z ⊆ dᵢ)\n\n //\n\n // into the following:\n\n //\n", "file_path": "src/qe/atomless.rs", "rank": 56, "score": 5.755382830085992 }, { "content": "impl From<bool> for Formula {\n\n fn from(x: bool) -> Self {\n\n Formula::Trivial(x)\n\n }\n\n}\n\n\n\nimpl ops::Not for Formula {\n\n type Output = Self;\n\n\n\n fn not(self) -> Self::Output {\n\n not(self)\n\n }\n\n}\n\n\n\nimpl fmt::Display for Formula {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n fn needs_parens(form: &Formula) -> bool {\n\n match form {\n\n Formula::Trivial(_) | Formula::Prop(..) => false,\n\n Formula::Not(inner) => needs_parens(inner),\n", "file_path": "src/form.rs", "rank": 57, "score": 5.548015440134078 }, { "content": " ) -> Flow<R, ()> {\n\n let pre = |form: &Formula, stack: &mut Vec<S>| {\n\n let new_state: S = f(form, stack.last().unwrap())?;\n\n stack.push(new_state);\n\n Continue(())\n\n };\n\n\n\n let post = |_form: &Formula, stack: &mut Vec<S>| {\n\n stack.pop();\n\n Continue(())\n\n };\n\n\n\n AnonFormVisitor {\n\n pre,\n\n post,\n\n state: vec![initial_state],\n\n }\n\n .visit_formula(self)\n\n }\n\n\n", "file_path": "src/form.rs", "rank": 58, "score": 5.54743084473212 }, { "content": " Formula::SubsetEq { sub, sup } => {\n\n self.visit_var(sub)?;\n\n self.visit_var(sup)\n\n }\n\n\n\n Formula::Not(form) => self.visit_formula(form),\n\n\n\n Formula::Bind(_, var, form) => {\n\n self.visit_var(var)?;\n\n self.visit_formula(form)\n\n }\n\n Formula::And(list) | Formula::Or(list) => {\n\n for form in list {\n\n self.visit_formula(form)?;\n\n }\n\n\n\n Continue(())\n\n }\n\n }\n\n }\n", "file_path": "src/visit.rs", "rank": 59, "score": 5.522059416666103 }, { "content": " }\n\n .visit_formula(self)\n\n }\n\n\n\n /// True if any part of this formula mentions the given variable.\n\n ///\n\n /// This includes `Bind`.\n\n pub fn has_var(&self, var: Var) -> bool {\n\n matches!(HasVar(var).visit_formula(self), Break(FoundVar))\n\n }\n\n\n\n pub fn has_quantifiers(&self) -> bool {\n\n self.visit_with_pre(|form| {\n\n if form.is_bind() {\n\n Break(())\n\n } else {\n\n Continue(())\n\n }\n\n })\n\n .is_break()\n", "file_path": "src/form.rs", "rank": 60, "score": 4.862274681564227 }, { "content": " // ⋀ᵢ,ⱼ aᵢ ⊆ bⱼ ∧ ⋀ᵢ,ⱼ ¬(cᵢ ⊆ aⱼ) ∧ ⋀ᵢ ¬(cᵢ ⊆ ∅) ∧ ⋀ᵢ,ⱼ ¬(bᵢ ⊆ dⱼ)\n\n //\n\n // I'll try to explain this part-by-part, although I won't prove that this is sufficient to\n\n // show that z exists. Only that it is necessary.\n\n //\n\n // The intuition for the first term (a ⊆ b) is straightforward. If it does not hold, there\n\n // is no \"room\" for z in between a and b, and the quantifier is unsatisfiable.\n\n //\n\n // We'll prove that the second is necessary by contradiction.\n\n // Assume c ⊆ a. We want ∃z a ⊆ z ∧ ¬(c ⊆ z) to hold. By the transitive property, we can\n\n // substitute a with c in the first inequality, which would give us P ∧ ¬P.\n\n //\n\n // The third term is required because ¬(c ⊆ z) is trivially unsatisfiable if c is the empty set.\n\n //\n\n // For the final term, use the same approach as for the second. Since there is no global\n\n // upper bound, there is no analogue to the third term, ¬(c ⊆ z), involving d.\n\n\n\n let mut literals = match self.take() {\n\n Formula::And(l) => l,\n\n form => vec![form],\n", "file_path": "src/qe/atomless.rs", "rank": 61, "score": 4.612467498224894 }, { "content": " };\n\n\n\n let mut aa = vec![];\n\n let mut bb = vec![];\n\n let mut cc = vec![];\n\n let mut dd = vec![];\n\n\n\n literals.retain(|form| {\n\n // At this stage, any formula involving `z` is a subset relation (or its negation)\n\n // that fits one of the patterns below.\n\n match *form {\n\n // a ⊆ z\n\n Formula::SubsetEq { sub: a, sup } if sup == z => aa.push(a),\n\n // z ⊆ b\n\n Formula::SubsetEq { sub, sup: b } if sub == z => bb.push(b),\n\n // ¬(c ⊆ z)\n\n Formula::Not(box Formula::SubsetEq { sub: c, sup }) if sup == z => {\n\n cc.push(c)\n\n }\n\n // ¬(z ⊆ d)\n", "file_path": "src/qe/atomless.rs", "rank": 62, "score": 4.507963849029839 }, { "content": " _ => true,\n\n }\n\n }\n\n\n\n match self {\n\n Formula::Trivial(true) => write!(f, \"True\"),\n\n Formula::Trivial(false) => write!(f, \"False\"),\n\n Formula::Prop(s) => write!(f, \"{}\", s),\n\n\n\n Formula::SubsetEq { sub, sup } => write!(f, \"{:?} ⊆ {:?}\", sub, sup),\n\n\n\n Formula::Bind(kind, var, form @ box Formula::Bind(..)) => {\n\n write!(f, \"{}{:?}{}\", kind, var, form)\n\n }\n\n Formula::Bind(kind, var, form) => write!(f, \"{}{:?}.({})\", kind, var, form),\n\n\n\n Formula::Not(inner) if !needs_parens(&**inner) => write!(f, \"¬{}\", inner),\n\n Formula::Not(form) => write!(f, \"¬({})\", form),\n\n\n\n Formula::And(list) | Formula::Or(list) => {\n", "file_path": "src/form.rs", "rank": 63, "score": 4.083473868881269 }, { "content": " let sep = match self {\n\n Formula::And(_) => '∧',\n\n Formula::Or(_) => '∨',\n\n _ => unreachable!(),\n\n };\n\n\n\n let mut first = true;\n\n for form in list {\n\n if !first {\n\n write!(f, \" {} \", sep)?;\n\n } else {\n\n first = false;\n\n }\n\n\n\n if needs_parens(form) {\n\n write!(f, \"({})\", form)?;\n\n } else {\n\n write!(f, \"{}\", form)?;\n\n }\n\n }\n", "file_path": "src/form.rs", "rank": 64, "score": 4.06522841775579 }, { "content": " Formula::Not(box Formula::SubsetEq { sub, sup: d }) if sub == z => {\n\n dd.push(d)\n\n }\n\n\n\n // Formulas that don't involve `z` can be moved outside the quantifier unchanged.\n\n // Leave them as-is.\n\n _ => {\n\n debug_assert!(!form.has_var(z));\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n });\n\n\n\n macro_rules! mk_product {\n\n ($lower:ident, $upper:ident, $func:expr) => {\n\n literals.extend($lower.iter().cartesian_product($upper.iter()).map($func))\n\n };\n\n }\n", "file_path": "src/qe/atomless.rs", "rank": 65, "score": 3.5193899906673574 }, { "content": " | Formula::Not(_)\n\n | Formula::And(_)\n\n | Formula::Or(_)\n\n | Formula::Bind(..)\n\n => false,\n\n }\n\n }\n\n\n\n pub fn is_trivial(&self) -> bool {\n\n matches!(self, Self::Trivial(..))\n\n }\n\n\n\n pub fn is_not(&self) -> bool {\n\n matches!(self, Formula::Not(_))\n\n }\n\n\n\n pub fn is_or(&self) -> bool {\n\n matches!(self, Formula::Or(_))\n\n }\n\n\n", "file_path": "src/form.rs", "rank": 66, "score": 2.7738201346634304 }, { "content": " }\n\n\n\n /// Returns `true` if this contains an `And` immediately within another `And` or an `Or` within\n\n /// another `Or`.\n\n pub fn has_nested_connective(&self) -> bool {\n\n self.visit_with_pre(|form| match form {\n\n Formula::Or(l) if l.iter().any(Formula::is_or) => Break(()),\n\n Formula::And(l) if l.iter().any(Formula::is_and) => Break(()),\n\n _ => Continue(()),\n\n })\n\n .is_break()\n\n }\n\n}\n\n\n", "file_path": "src/form.rs", "rank": 67, "score": 2.5103551156851216 }, { "content": " pub fn is_and(&self) -> bool {\n\n matches!(self, Formula::And(_))\n\n }\n\n\n\n pub fn is_bind(&self) -> bool {\n\n matches!(self, Formula::Bind(..))\n\n }\n\n\n\n pub fn is_subseteq(&self) -> bool {\n\n matches!(self, Formula::SubsetEq { .. })\n\n }\n\n\n\n pub fn is_forall(&self) -> bool {\n\n matches!(self, Formula::Bind(QuantifierKind::ForAll, ..))\n\n }\n\n\n\n pub fn is_exists(&self) -> bool {\n\n matches!(self, Formula::Bind(QuantifierKind::Exists, ..))\n\n }\n\n\n", "file_path": "src/form.rs", "rank": 68, "score": 2.5039982471754323 }, { "content": "\n\n // a ⊆ b\n\n mk_product!(aa, bb, |(&a, &b)| subeq(a, b));\n\n\n\n // ¬(c ⊆ a)\n\n mk_product!(cc, aa, |(&c, &a)| !subeq(c, a));\n\n\n\n // ¬(b ⊆ d)\n\n mk_product!(bb, dd, |(&b, &d)| !subeq(b, d));\n\n\n\n // We only need to add the ¬(c ⊆ ∅) constraint if a is empty. Otherwise it follows\n\n // trivially from the second `mk_product`.\n\n if aa.is_empty() {\n\n literals.extend(cc.iter().map(|&c| !empty(c)));\n\n }\n\n\n\n let ret = match literals.len() {\n\n 0 => true.into(),\n\n 1 => literals.pop().unwrap(),\n\n _ => Formula::And(literals),\n\n };\n\n\n\n *self = ret;\n\n }\n\n}\n", "file_path": "src/qe/atomless.rs", "rank": 69, "score": 1.1662583802429776 } ]
Rust
src/util.rs
lovesh/pixel-signature
93102638c2e4e35f4c136fddbcb18c8abc923ebd
use crate::amcl_wrapper::group_elem::GroupElement; use crate::errors::PixelError; use amcl_wrapper::field_elem::FieldElement; use crate::{VerkeyGroup, SignatureGroup}; pub struct GeneratorSet(pub VerkeyGroup, pub Vec<SignatureGroup>); impl GeneratorSet { pub fn new(T: u128, prefix: &str) -> Result<Self, PixelError> { Ok(GeneratorSet( VerkeyGroup::from_msg_hash(prefix.as_bytes()), Self::create_generators(T, prefix)?, )) } pub fn create_generators(T: u128, prefix: &str) -> Result<Vec<SignatureGroup>, PixelError> { let l = calculate_l(T)? as usize; let mut params = Vec::with_capacity(l + 2); for i in 0..(l + 2) { let s: String = prefix.to_string() + &i.to_string(); params.push(SignatureGroup::from_msg_hash(s.as_bytes())); } Ok(params) } } pub fn calculate_l(T: u128) -> Result<u8, PixelError> { if (T < 3) || (T == u128::max_value()) { return Err(PixelError::InvalidMaxTimePeriod { T }); } if !(T + 1).is_power_of_two() { return Err(PixelError::NonPowerOfTwo { T }); } let mut l = 0; let mut t = T; while t != 0 { t = t >> 1; l += 1; } Ok(l) } /* // Note: This is different from paper as of 30/6/19. The formula in paper is incorrect. If node is left child of parent then this node's number is 1 more than parent's node number If node is right child of parent then this node's number is 1 + parent's node number + half of the number of children of the parent. A more verbose form of the code would ne if node is left_of(parent) { node_num(node) = 1 + node_num(parent) } else { node_num(node) = 1 + node_num(parent) + (2^ (l - depth(node)) - 2) / 2 node_num(node) = 1 + node_num(parent) + (2^ (l - depth(node) - 1)) } */ pub fn path_to_node_num(path: &[u8], l: u8) -> Result<u128, PixelError> { if (path.len() as u8) >= l { return Err(PixelError::InvalidPath { path: path.to_vec(), l, }); } let mut t = 1u128; for i in 1..(path.len() + 1) { t += 1 + (((1 << (l - i as u8)) as u128 - 1) * (path[i - 1] - 1) as u128) as u128; } Ok(t) } pub fn from_node_num_to_path(t: u128, l: u8) -> Result<Vec<u8>, PixelError> { if t > ((1 << l) - 1) as u128 { return Err(PixelError::InvalidNodeNum { t, l }); } if t == 1 { return Ok(vec![]); } else { let two_l_1 = (1 << (l - 1)) as u128; if t <= two_l_1 { let mut path = vec![1]; path.append(&mut from_node_num_to_path(t - 1, l - 1)?); return Ok(path); } else { let mut path = vec![2]; path.append(&mut from_node_num_to_path(t - two_l_1, l - 1)?); return Ok(path); } } } pub fn node_successor_paths(t: u128, l: u8) -> Result<Vec<Vec<u8>>, PixelError> { if t > ((1 << l) - 1) as u128 { return Err(PixelError::InvalidNodeNum { t, l }); } if t == 1 { return Ok(vec![]); } else { let mut curr_path = vec![]; let mut successors = vec![]; let path = from_node_num_to_path(t, l)?; for p in path { if p == 1 { let mut s = curr_path.clone(); s.push(2); successors.push(s); } curr_path.push(p) } successors.reverse(); return Ok(successors); } } pub fn calculate_path_factor_using_t_l( t: u128, l: u8, gens: &GeneratorSet, ) -> Result<SignatureGroup, PixelError> { let path = from_node_num_to_path(t, l)?; calculate_path_factor(path, gens) } pub fn calculate_path_factor(path: Vec<u8>, gens: &GeneratorSet) -> Result<SignatureGroup, PixelError> { if gens.1.len() < (path.len() + 2) { return Err(PixelError::NotEnoughGenerators { n: path.len() + 2 }); } let mut sigma_1_1 = gens.1[1].clone(); for (i, p) in path.iter().enumerate() { if *p == 1 { sigma_1_1 += &gens.1[2 + i] } else { sigma_1_1 += &gens.1[2 + i].double() } } Ok(sigma_1_1) } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; use std::iter::FromIterator; #[test] fn test_calculate_l() { assert!(calculate_l(u128::max_value()).is_err()); let valid_Ts: HashSet<u128> = HashSet::from_iter(vec![3, 7, 15, 31, 63].iter().cloned()); assert_eq!(calculate_l(3).unwrap(), 2); assert_eq!(calculate_l(7).unwrap(), 3); assert_eq!(calculate_l(15).unwrap(), 4); assert_eq!(calculate_l(31).unwrap(), 5); for i in 1..65 { if !valid_Ts.contains(&i) { assert!(calculate_l(i).is_err()); } } } #[test] fn test_path_to_node_num() { assert!(path_to_node_num(&[1, 2, 1], 3).is_err()); assert!(path_to_node_num(&[1, 2, 1, 1], 3).is_err()); assert!(path_to_node_num(&[1, 1, 2, 1], 4).is_err()); assert!(path_to_node_num(&[2, 1, 2, 1, 1], 4).is_err()); assert_eq!(path_to_node_num(&[], 3).unwrap(), 1); assert_eq!(path_to_node_num(&[1], 3).unwrap(), 2); assert_eq!(path_to_node_num(&[2], 3).unwrap(), 5); assert_eq!(path_to_node_num(&[2, 1], 3).unwrap(), 6); assert_eq!(path_to_node_num(&[2, 2], 3).unwrap(), 7); assert_eq!(path_to_node_num(&[1, 1], 3).unwrap(), 3); assert_eq!(path_to_node_num(&[1, 1, 1], 4).unwrap(), 4); assert_eq!(path_to_node_num(&[1, 1, 2], 4).unwrap(), 5); assert_eq!(path_to_node_num(&[1, 2], 4).unwrap(), 6); assert_eq!(path_to_node_num(&[1, 2, 1], 4).unwrap(), 7); assert_eq!(path_to_node_num(&[1, 2, 2], 4).unwrap(), 8); assert_eq!(path_to_node_num(&[2], 4).unwrap(), 9); } #[test] fn test_from_node_num_to_path() { assert!(from_node_num_to_path(8, 3).is_err()); assert!(from_node_num_to_path(9, 3).is_err()); assert!(from_node_num_to_path(10, 3).is_err()); assert!(from_node_num_to_path(16, 4).is_err()); assert!(from_node_num_to_path(17, 4).is_err()); assert!(from_node_num_to_path(20, 4).is_err()); assert_eq!(from_node_num_to_path(1, 3).unwrap(), Vec::<u8>::new()); assert_eq!(from_node_num_to_path(2, 3).unwrap(), vec![1]); assert_eq!(from_node_num_to_path(3, 3).unwrap(), vec![1, 1]); assert_eq!(from_node_num_to_path(4, 3).unwrap(), vec![1, 2]); assert_eq!(from_node_num_to_path(5, 3).unwrap(), vec![2]); assert_eq!(from_node_num_to_path(6, 3).unwrap(), vec![2, 1]); assert_eq!(from_node_num_to_path(7, 3).unwrap(), vec![2, 2]); assert_eq!(from_node_num_to_path(15, 4).unwrap(), vec![2, 2, 2]); assert_eq!(from_node_num_to_path(14, 4).unwrap(), vec![2, 2, 1]); assert_eq!(from_node_num_to_path(13, 4).unwrap(), vec![2, 2]); assert_eq!(from_node_num_to_path(10, 4).unwrap(), vec![2, 1]); assert_eq!(from_node_num_to_path(11, 4).unwrap(), vec![2, 1, 1]); assert_eq!(from_node_num_to_path(12, 4).unwrap(), vec![2, 1, 2]); assert_eq!(from_node_num_to_path(8, 4).unwrap(), vec![1, 2, 2]); } #[test] fn test_node_successors_7() { let T = 7; let l = calculate_l(T).unwrap(); let successors = node_successor_paths(1, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(2, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(3, l).unwrap(); assert_eq!(successors, vec![vec![1, 2], vec![2]]); let successors = node_successor_paths(4, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(5, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(6, l).unwrap(); assert_eq!(successors, vec![vec![2, 2]]); let successors = node_successor_paths(7, l).unwrap(); assert!(successors.is_empty()); } #[test] fn test_node_successors_15() { let T = 15; let l = calculate_l(T).unwrap(); let successors = node_successor_paths(1, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(2, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(3, l).unwrap(); assert_eq!(successors, vec![vec![1, 2], vec![2]]); let successors = node_successor_paths(4, l).unwrap(); assert_eq!(successors, vec![vec![1, 1, 2], vec![1, 2], vec![2]]); let successors = node_successor_paths(5, l).unwrap(); assert_eq!(successors, vec![vec![1, 2], vec![2]]); let successors = node_successor_paths(6, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(7, l).unwrap(); assert_eq!(successors, vec![vec![1, 2, 2], vec![2]]); let successors = node_successor_paths(9, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(10, l).unwrap(); assert_eq!(successors, vec![vec![2, 2]]); let successors = node_successor_paths(11, l).unwrap(); assert_eq!(successors, vec![vec![2, 1, 2], vec![2, 2]]); let successors = node_successor_paths(12, l).unwrap(); assert_eq!(successors, vec![vec![2, 2]]); let successors = node_successor_paths(15, l).unwrap(); assert!(successors.is_empty()); } }
use crate::amcl_wrapper::group_elem::GroupElement; use crate::errors::PixelError; use amcl_wrapper::field_elem::FieldElement; use crate::{VerkeyGroup, SignatureGroup}; pub struct GeneratorSet(pub VerkeyGroup, pub Vec<SignatureGroup>); impl GeneratorSet { pub fn new(T: u128, prefix: &str) -> Result<Self, PixelError> { Ok(GeneratorSet( VerkeyGroup::from_msg_hash(prefix.as_bytes()), Self::create_generators(T, prefix)?, )) } pub fn create_generators(T: u128, prefix: &str) -> Result<Vec<SignatureGroup>, PixelError> { let l = calculate_l(T)? as usize; let mut params = Vec::with_capacity(l + 2); for i in 0..(l + 2) { let s: String = prefix.to_string() + &i.to_string(); params.push(SignatureGroup::from_msg_hash(s.as_bytes())); } Ok(params) } } pub fn calculate_l(T: u128) -> Result<u8, PixelError> { if (T < 3) || (T == u128::max_value()) { return Err(PixelError::InvalidMaxTimePeriod { T }); } if !(T + 1).is_power_of_two() { return Err(PixelError::NonPowerOfTwo { T }); } let mut l = 0; let mut t = T; while t != 0 { t = t >> 1; l += 1; } Ok(l) } /* // Note: This is different from paper as of 30/6/19. The formula in paper is incorrect. If node is left child of parent then this node's number is 1 more than parent's node number If node is right child of parent then this node's number is 1 + parent's node number + half of the number of children of the parent. A more verbose form of the code would ne if node is left_of(parent) { node_num(node) = 1 + node_num(parent) } else { node_num(node) = 1 + node_num(parent) + (2^ (l - depth(node)) - 2) / 2 node_num(node) = 1 + node_num(parent) + (2^ (l - depth(node) - 1)) } */ pub fn path_to_node_num(path: &[u8], l: u8) -> Result<u128, PixelError> { if (path.len() as u8) >= l { return Err(PixelError::InvalidPath { path: path.to_vec(), l, }); } let mut t = 1u128; for i in 1..(path.len() + 1) { t += 1 + (((1 << (l - i as u8)) as u128 - 1) * (path[i - 1] - 1) as u128) as u128; } Ok(t) } pub fn from_node_num_to_path(t: u128, l: u8) -> Result<Vec<u8>, PixelError> { if t > ((1 << l) - 1) as u128 { return Err(PixelError::InvalidNodeNum { t, l }); } if t == 1 { return Ok(vec![]); } else { let two_l_1 = (1 << (l - 1)) as u128; if t <= two_l_1 { let mut path = vec![1]; path.append(&mut from_node_num_to_path(t - 1, l - 1)?); return Ok(path); } else { let mut path = vec![2]; path.append(&mut from_node_num_to_path(t - two_l_1, l - 1)?); return Ok(path); } } } pub fn node_successor_paths(t: u128, l: u8) -> Result<Vec<Vec<u8>>, PixelError> { if t > ((1 << l) - 1) as u128 { return Err(PixelError::InvalidNodeNum { t, l }); } if t == 1 { return Ok(vec![]); } else { let mut curr_path = vec![]; let mut successors = vec![]; let path = from_node_num_to_path(t, l)?; for p in path { if p == 1 { let mut s = curr_path.clone(); s.push(2); successors.push(s); } curr_path.push(p) } successors.reverse(); return Ok(successors); } } pub fn calculate_path_factor_using_t_l( t: u128, l: u8, gens: &GeneratorSet, ) -> Result<SignatureGroup, PixelError> { let path = from_node_num_to_path(t, l)?; calculate_path_factor(path, gens) }
#[cfg(test)] mod tests { use super::*; use std::collections::HashSet; use std::iter::FromIterator; #[test] fn test_calculate_l() { assert!(calculate_l(u128::max_value()).is_err()); let valid_Ts: HashSet<u128> = HashSet::from_iter(vec![3, 7, 15, 31, 63].iter().cloned()); assert_eq!(calculate_l(3).unwrap(), 2); assert_eq!(calculate_l(7).unwrap(), 3); assert_eq!(calculate_l(15).unwrap(), 4); assert_eq!(calculate_l(31).unwrap(), 5); for i in 1..65 { if !valid_Ts.contains(&i) { assert!(calculate_l(i).is_err()); } } } #[test] fn test_path_to_node_num() { assert!(path_to_node_num(&[1, 2, 1], 3).is_err()); assert!(path_to_node_num(&[1, 2, 1, 1], 3).is_err()); assert!(path_to_node_num(&[1, 1, 2, 1], 4).is_err()); assert!(path_to_node_num(&[2, 1, 2, 1, 1], 4).is_err()); assert_eq!(path_to_node_num(&[], 3).unwrap(), 1); assert_eq!(path_to_node_num(&[1], 3).unwrap(), 2); assert_eq!(path_to_node_num(&[2], 3).unwrap(), 5); assert_eq!(path_to_node_num(&[2, 1], 3).unwrap(), 6); assert_eq!(path_to_node_num(&[2, 2], 3).unwrap(), 7); assert_eq!(path_to_node_num(&[1, 1], 3).unwrap(), 3); assert_eq!(path_to_node_num(&[1, 1, 1], 4).unwrap(), 4); assert_eq!(path_to_node_num(&[1, 1, 2], 4).unwrap(), 5); assert_eq!(path_to_node_num(&[1, 2], 4).unwrap(), 6); assert_eq!(path_to_node_num(&[1, 2, 1], 4).unwrap(), 7); assert_eq!(path_to_node_num(&[1, 2, 2], 4).unwrap(), 8); assert_eq!(path_to_node_num(&[2], 4).unwrap(), 9); } #[test] fn test_from_node_num_to_path() { assert!(from_node_num_to_path(8, 3).is_err()); assert!(from_node_num_to_path(9, 3).is_err()); assert!(from_node_num_to_path(10, 3).is_err()); assert!(from_node_num_to_path(16, 4).is_err()); assert!(from_node_num_to_path(17, 4).is_err()); assert!(from_node_num_to_path(20, 4).is_err()); assert_eq!(from_node_num_to_path(1, 3).unwrap(), Vec::<u8>::new()); assert_eq!(from_node_num_to_path(2, 3).unwrap(), vec![1]); assert_eq!(from_node_num_to_path(3, 3).unwrap(), vec![1, 1]); assert_eq!(from_node_num_to_path(4, 3).unwrap(), vec![1, 2]); assert_eq!(from_node_num_to_path(5, 3).unwrap(), vec![2]); assert_eq!(from_node_num_to_path(6, 3).unwrap(), vec![2, 1]); assert_eq!(from_node_num_to_path(7, 3).unwrap(), vec![2, 2]); assert_eq!(from_node_num_to_path(15, 4).unwrap(), vec![2, 2, 2]); assert_eq!(from_node_num_to_path(14, 4).unwrap(), vec![2, 2, 1]); assert_eq!(from_node_num_to_path(13, 4).unwrap(), vec![2, 2]); assert_eq!(from_node_num_to_path(10, 4).unwrap(), vec![2, 1]); assert_eq!(from_node_num_to_path(11, 4).unwrap(), vec![2, 1, 1]); assert_eq!(from_node_num_to_path(12, 4).unwrap(), vec![2, 1, 2]); assert_eq!(from_node_num_to_path(8, 4).unwrap(), vec![1, 2, 2]); } #[test] fn test_node_successors_7() { let T = 7; let l = calculate_l(T).unwrap(); let successors = node_successor_paths(1, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(2, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(3, l).unwrap(); assert_eq!(successors, vec![vec![1, 2], vec![2]]); let successors = node_successor_paths(4, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(5, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(6, l).unwrap(); assert_eq!(successors, vec![vec![2, 2]]); let successors = node_successor_paths(7, l).unwrap(); assert!(successors.is_empty()); } #[test] fn test_node_successors_15() { let T = 15; let l = calculate_l(T).unwrap(); let successors = node_successor_paths(1, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(2, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(3, l).unwrap(); assert_eq!(successors, vec![vec![1, 2], vec![2]]); let successors = node_successor_paths(4, l).unwrap(); assert_eq!(successors, vec![vec![1, 1, 2], vec![1, 2], vec![2]]); let successors = node_successor_paths(5, l).unwrap(); assert_eq!(successors, vec![vec![1, 2], vec![2]]); let successors = node_successor_paths(6, l).unwrap(); assert_eq!(successors, vec![vec![2]]); let successors = node_successor_paths(7, l).unwrap(); assert_eq!(successors, vec![vec![1, 2, 2], vec![2]]); let successors = node_successor_paths(9, l).unwrap(); assert!(successors.is_empty()); let successors = node_successor_paths(10, l).unwrap(); assert_eq!(successors, vec![vec![2, 2]]); let successors = node_successor_paths(11, l).unwrap(); assert_eq!(successors, vec![vec![2, 1, 2], vec![2, 2]]); let successors = node_successor_paths(12, l).unwrap(); assert_eq!(successors, vec![vec![2, 2]]); let successors = node_successor_paths(15, l).unwrap(); assert!(successors.is_empty()); } }
pub fn calculate_path_factor(path: Vec<u8>, gens: &GeneratorSet) -> Result<SignatureGroup, PixelError> { if gens.1.len() < (path.len() + 2) { return Err(PixelError::NotEnoughGenerators { n: path.len() + 2 }); } let mut sigma_1_1 = gens.1[1].clone(); for (i, p) in path.iter().enumerate() { if *p == 1 { sigma_1_1 += &gens.1[2 + i] } else { sigma_1_1 += &gens.1[2 + i].double() } } Ok(sigma_1_1) }
function_block-full_function
[ { "content": "#[cfg(feature = \"VerkeyG1\")]\n\npub fn ate_multi_pairing(elems: Vec<(&SignatureGroup, &VerkeyGroup)>) -> GT {\n\n GT::ate_multi_pairing(\n\n elems\n\n .into_iter()\n\n .map(|(s, v)| (v, s))\n\n .collect::<Vec<(&VerkeyGroup, &SignatureGroup)>>(),\n\n )\n\n}\n\n\n\n#[macro_use]\n\nextern crate failure;\n\n\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\npub mod errors;\n\npub mod keys;\n\npub mod signature;\n\npub mod util;\n\npub mod threshold_sig;\n\n\n\n// TODO: Add a high level object that orchestrates key update and signing. Like if the signing has to\n\n// be done for t=x and current time in SigkeyManager is y<x, it should update time to t=x.\n", "file_path": "src/lib.rs", "rank": 6, "score": 73591.60415043446 }, { "content": "#[cfg(feature = \"VerkeyG1\")]\n\npub fn ate_2_pairing(\n\n g1: &SignatureGroup,\n\n g2: &VerkeyGroup,\n\n h1: &SignatureGroup,\n\n h2: &VerkeyGroup,\n\n) -> GT {\n\n GT::ate_2_pairing(g2, g1, h2, h1)\n\n}\n", "file_path": "src/lib.rs", "rank": 7, "score": 66402.45267201433 }, { "content": "#[cfg(test)]\n\npub fn setup<'a, R: RngCore + CryptoRng>(\n\n T: u128,\n\n prefix: &str,\n\n rng: &mut R,\n\n db: &'a mut dyn SigKeyDb,\n\n) -> Result<(GeneratorSet, Verkey, SigkeyManager, ProofOfPossession), PixelError> {\n\n let generators = GeneratorSet::new(T, prefix)?;\n\n let (keypair, sigkeys) = Keypair::new(T, &generators, rng, db)?;\n\n Ok((generators, keypair.ver_key, sigkeys, keypair.pop))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use rand::rngs::ThreadRng;\n\n // For benchmarking\n\n use std::time::{Duration, Instant};\n\n\n\n fn fast_forward_and_check<R: RngCore + CryptoRng>(\n\n set: &mut SigkeyManager,\n", "file_path": "src/keys.rs", "rank": 8, "score": 46961.61027449732 }, { "content": "/// USING TRUSTED THIRD PARTY ONLY FOR DEMONSTRATION, IN PRACTICE A DECENTRALIZED KEY GENERATION\n\n/// PROTOCOL WILL BE USED.\n\n/// Keygen done by trusted party using Shamir secret sharing. Creates signing and verification\n\n/// keys for each signer. The trusted party will know every signer's secret keys and the\n\n/// aggregate secret keys and can create signatures.\n\n/// Outputs 2 items, first is the shared secret and should be destroyed.\n\n/// The second contains the keys, 1 item corresponding to each signer.\n\npub fn trusted_party_SSS_keygen<R: RngCore + CryptoRng>(\n\n threshold: usize,\n\n total: usize,\n\n rng: &mut R,\n\n gen: &VerkeyGroup,\n\n gens: &[SignatureGroup],\n\n) -> Result<(FieldElement, Vec<Signer>), PixelError> {\n\n let (secret_x, x_shares) = get_shared_secret(threshold, total);\n\n Ok((secret_x, keygen_from_shares(total, x_shares, rng, gen, gens)?))\n\n}\n\n\n\npub struct ThresholdScheme {}\n\n\n\nimpl ThresholdScheme {\n\n /// Combine at least `threshold` number of signatures to create a threshold signature\n\n pub fn aggregate_sigs(threshold: usize, sigs: Vec<(usize, Signature)>) -> Signature {\n\n assert!(sigs.len() >= threshold);\n\n\n\n let mut sigma_1_bases = SignatureGroupVec::with_capacity(threshold);\n\n let mut sigma_1_exps = FieldElementVector::with_capacity(threshold);\n", "file_path": "src/threshold_sig.rs", "rank": 9, "score": 43196.27200093311 }, { "content": "/// Key-value database interface that needs to be implemented for storing signing keys.\n\n/// Signing key are db values whereas db keys are the time period for which the signing key needs to be used.\n\npub trait SigKeyDb {\n\n fn insert_key(&mut self, t: u128, sig_key: Sigkey);\n\n\n\n /// Removes key from database and zeroes it out\n\n fn remove_key(&mut self, t: u128);\n\n\n\n fn has_key(&self, t: u128) -> bool;\n\n\n\n fn get_key(&self, t: u128) -> Result<&Sigkey, PixelError>;\n\n\n\n /// Returns indices (time periods) for all present keys\n\n fn get_key_indices(&self) -> HashSet<u128>;\n\n}\n\n\n\n/// An in-memory database for storing signing keys. Uses hashmap. Should only be used for testing.\n\npub struct InMemorySigKeyDb {\n\n keys: HashMap<u128, Sigkey>,\n\n}\n\n\n\nimpl SigKeyDb for InMemorySigKeyDb {\n", "file_path": "src/keys.rs", "rank": 10, "score": 31984.220448933193 }, { "content": "/// Takes shares for secret and generate signing and verification keys\n\nfn keygen_from_shares<R: RngCore + CryptoRng>(\n\n num_signers: usize,\n\n mut master_secret_shares: HashMap<usize, FieldElement>,\n\n rng: &mut R,\n\n gen: &VerkeyGroup,\n\n gens: &[SignatureGroup],\n\n) -> Result<Vec<Signer>, PixelError> {\n\n let mut signers = vec![];\n\n\n\n for i in 0..num_signers {\n\n let id = i + 1;\n\n let x_i = master_secret_shares.remove(&id).unwrap();\n\n let master_secret = MasterSecret {value: x_i};\n\n let verkey = Verkey::from_master_secret(&master_secret, gen);\n\n let pop = Keypair::gen_pop(&verkey, &master_secret);\n\n let sigkey_initial = Sigkey::initial_secret_key(\n\n gen,\n\n gens,\n\n &master_secret,\n\n rng,\n", "file_path": "src/threshold_sig.rs", "rank": 11, "score": 24686.82032355164 }, { "content": " // Create signing keys for left and right child\n\n let c = sk.0.clone();\n\n let d = sk.1[0].clone();\n\n\n\n // key for left child\n\n let mut sk_left_prime_prime = vec![&d + &sk.1[1]];\n\n for i in 2..sk.1.len() {\n\n sk_left_prime_prime.push(sk.1[i].clone());\n\n }\n\n\n\n // key for right child\n\n let mut path_right = path.clone();\n\n path_right.push(2);\n\n let path_right_len = path_right.len();\n\n let node_num_right = path_to_node_num(&path_right, self.l)?;\n\n\n\n let r = FieldElement::random_using_rng(rng);\n\n // d * e_j^2\n\n let mut sk_right_prime_prime = vec![&d + (sk.1[1].double())];\n\n // h_0 * h_1^path[0] * h_2^path[1] * ... h_k^path[-1]\n", "file_path": "src/keys.rs", "rank": 13, "score": 24.322555047919415 }, { "content": " let nodes_to_remove = all_key_node_nums.difference(&node_num_to_keep);\n\n let mut removed = vec![];\n\n for n in nodes_to_remove {\n\n db.remove_key(*n);\n\n removed.push(n.clone())\n\n }\n\n self.t = t;\n\n Ok(removed)\n\n }\n\n\n\n /// Derive signing key denoted by path `key_path` using its predecessor node's signing key `pred_sk`\n\n fn derive_key<R: RngCore + CryptoRng>(\n\n key_path: &[u8],\n\n pred_sk: &Sigkey,\n\n pred_sk_path_len: usize,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n rng: &mut R,\n\n ) -> Result<Sigkey, PixelError> {\n\n let key_path_len = key_path.len();\n", "file_path": "src/keys.rs", "rank": 16, "score": 20.908167506572095 }, { "content": " NonPowerOfTwo { T: u128 },\n\n #[fail(display = \"Invalid path={:?} for l={}\", path, l)]\n\n InvalidPath { path: Vec<u8>, l: u8 },\n\n #[fail(display = \"Invalid node number={} for l={}\", t, l)]\n\n InvalidNodeNum { t: u128, l: u8 },\n\n #[fail(display = \"Provide at least {} generators\", n)]\n\n NotEnoughGenerators { n: usize },\n\n #[fail(display = \"Sigkey for time t={} not found\", t)]\n\n SigkeyNotFound { t: u128 },\n\n #[fail(\n\n display = \"Cannot update key to previous time={}, current time={}\",\n\n old_t, current_t\n\n )]\n\n SigkeyUpdateBackward { old_t: u128, current_t: u128 },\n\n #[fail(display = \"Sigkey alrady updated to desired time={}\", t)]\n\n SigkeyAlreadyUpdated { t: u128 },\n\n}\n", "file_path": "src/errors.rs", "rank": 18, "score": 18.113010848047445 }, { "content": " msg: &[u8],\n\n t: u128,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n sig_key: &Sigkey,\n\n rng: &mut R,\n\n ) -> Result<Self, PixelError> {\n\n if gens.1.len() < (l as usize + 2) {\n\n return Err(PixelError::NotEnoughGenerators { n: l as usize + 2 });\n\n }\n\n\n\n let r = FieldElement::random_using_rng(rng);\n\n Self::gen_sig(msg, t, l, gens, sig_key, r)\n\n }\n\n\n\n /// Creates new deterministic signature. Signature for same message and secret key will be equal\n\n pub fn new_deterministic(\n\n msg: &[u8],\n\n t: u128,\n\n l: u8,\n", "file_path": "src/signature.rs", "rank": 19, "score": 17.783482266611845 }, { "content": " .iter()\n\n .filter(|p| {\n\n let n = path_to_node_num(p, self.l).unwrap();\n\n !Self::has_key(n, db)\n\n })\n\n .collect();\n\n\n\n match Self::get_key(t, db) {\n\n Ok(_) => (), // Key and thus all needed successors already present\n\n Err(_) => {\n\n // Key absent. Calculate the highest predecessor path and key to derive necessary children.\n\n let pred_sk_path: Vec<u8> = if Self::has_key(1, db) {\n\n vec![]\n\n } else {\n\n let mut cur_path = vec![];\n\n for p in &t_path {\n\n cur_path.push(*p);\n\n if Self::has_key(path_to_node_num(&cur_path, self.l)?, db) {\n\n break;\n\n }\n", "file_path": "src/keys.rs", "rank": 20, "score": 17.666370017708502 }, { "content": " set.fast_forward_update(2u128, &gens, &mut rng, &mut db).unwrap();\n\n let sk_left = SigkeyManager::get_key(2u128, &db).unwrap();\n\n assert_eq!(sk_left.1.len() as u8, l);\n\n let sk_right = SigkeyManager::get_key(5u128, &db).unwrap();\n\n assert_eq!(sk_right.1.len() as u8, l);\n\n assert_eq!(set.t, 2);\n\n assert!(!SigkeyManager::has_key(1u128, &db));\n\n\n\n // t=3\n\n set.fast_forward_update(3u128, &gens, &mut rng, &mut db).unwrap();\n\n let sk_left = SigkeyManager::get_key(3u128, &db).unwrap();\n\n assert_eq!(sk_left.1.len() as u8, l - 1);\n\n let sk_right = SigkeyManager::get_key(4u128, &db).unwrap();\n\n assert_eq!(sk_right.1.len() as u8, l - 1);\n\n assert_eq!(set.t, 3);\n\n assert!(!SigkeyManager::has_key(2u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n // t=4\n\n set.fast_forward_update(4u128, &gens, &mut rng, &mut db).unwrap();\n", "file_path": "src/keys.rs", "rank": 21, "score": 17.48894213357228 }, { "content": " let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) = setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n // t=2\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n let sk_left = SigkeyManager::get_key(2u128, &db).unwrap();\n\n assert_eq!(sk_left.1.len() as u8, l);\n\n let sk_right = SigkeyManager::get_key(5u128, &db).unwrap();\n\n assert_eq!(sk_right.1.len() as u8, l);\n\n assert_eq!(set.t, 2);\n\n assert!(!SigkeyManager::has_key(1u128, &db));\n\n\n\n // t=3\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n let sk_left = SigkeyManager::get_key(3u128, &db).unwrap();\n\n assert_eq!(sk_left.1.len() as u8, l - 1);\n\n let sk_right = SigkeyManager::get_key(4u128, &db).unwrap();\n\n assert_eq!(sk_right.1.len() as u8, l - 1);\n\n assert_eq!(set.t, 3);\n\n assert!(!SigkeyManager::has_key(2u128, &db));\n", "file_path": "src/keys.rs", "rank": 22, "score": 17.29778181329042 }, { "content": " db.get_key(self.t)\n\n }\n\n\n\n /// Update time by 1\n\n pub fn simple_update<R: RngCore + CryptoRng>(\n\n &mut self,\n\n gens: &GeneratorSet,\n\n rng: &mut R,\n\n db: &mut dyn SigKeyDb\n\n ) -> Result<u128, PixelError> {\n\n let path = from_node_num_to_path(self.t, self.l)?;\n\n let path_len = path.len();\n\n let sk = self.get_current_key(db)?;\n\n // sk.1.len() + path_len == l+1\n\n debug_assert_eq!(self.l as usize + 1, sk.1.len() + path_len);\n\n\n\n // Index of key that will be removed\n\n let removed_key_idx: u128;\n\n\n\n if path_len < (self.l as usize - 1) {\n", "file_path": "src/keys.rs", "rank": 23, "score": 17.07824319887032 }, { "content": " set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n let sk_left = SigkeyManager::get_key(2u128, &db).unwrap();\n\n assert_eq!(sk_left.1.len() as u8, l);\n\n let sk_right = SigkeyManager::get_key(9u128, &db).unwrap();\n\n assert_eq!(sk_right.1.len() as u8, l);\n\n assert_eq!(set.t, 2);\n\n assert!(!SigkeyManager::has_key(1u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n // t=3\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n let sk_left = SigkeyManager::get_key(3u128, &db).unwrap();\n\n assert_eq!(sk_left.1.len() as u8, l - 1);\n\n let sk_right = SigkeyManager::get_key(6u128, &db).unwrap();\n\n assert_eq!(sk_right.1.len() as u8, l - 1);\n\n assert_eq!(set.t, 3);\n\n assert!(!SigkeyManager::has_key(2u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n // t=4\n", "file_path": "src/keys.rs", "rank": 24, "score": 16.995234386876 }, { "content": " let path_factor = calculate_path_factor(path_right, &gens)?;\n\n // d * e_j^2 * (h_0 * h_1^path[0] * h_2^path[1] * ... h_k^path[-1])^r\n\n sk_right_prime_prime[0] += (&path_factor * &r);\n\n\n\n for i in 2..sk.1.len() {\n\n let e = &sk.1[i] + (&gens.1[path_right_len + i] * &r);\n\n sk_right_prime_prime.push(e);\n\n }\n\n\n\n // Update the set with keys for both children and remove key corresponding to current time period\n\n db\n\n .insert_key(self.t + 1, Sigkey(c.clone(), sk_left_prime_prime));\n\n db.insert_key(\n\n node_num_right,\n\n Sigkey(&c + (&gens.0 * &r), sk_right_prime_prime),\n\n );\n\n removed_key_idx = self.t.clone();\n\n self.t = self.t + 1;\n\n } else {\n\n // Current node is at leaf, so remove current leaf. Already have rest of the keys.\n", "file_path": "src/keys.rs", "rank": 25, "score": 16.547200216795268 }, { "content": "![Binary tree with 7 nodes](./binary_tree_7.png) \n\n\n\nAbove is an example to support 7 time periods so `T` = 7. Each node is given a number denoted by *`t`* in italic font. The **`t`** in bold corresponds to the path from root to node \n\nwhere a 1 is appended to the path if node is on left of parent otherwise a 2 is appended. In beginning, key for `t` = 1 (root) is generated. When time passes \n\nto `t` = 2, key for node `t` = 2 is generated using the key for t=1 (root). Now key for t=1 needs to be removed. But it cannot be removed as only \n\nit can generate the key for node t=5 as t=5 has only 1 parent which is t=1. So key for t=5 is generated and then node for t=1 is removed.\n\nIn code, the t=2 and t=5 are called successors of node for t=1. \n\nWhen t=3, key for nodes t=3 and t=4 are generated and node for t=2 is removed. And so on.\n\n\n\n![Binary tree with 15 nodes](./binary_tree_15.png)\n\n\n\nAnother example to support 15 time periods, so `T` = 15. Each node is given a number corresponding to the time period. In beginning key for node 1 (root) is generated.\n\nThen when t=2, keys for node 2 and 9 are generated and node 1's key is removed. When t=3, key for node 2 is removed but only after generating keys for node 3 and 6. \n\nWhen t=4, keys for node 4 and 5 are generated since their parent node 3 needs to be removed. And so on. \n\nIn fast forward case, i.e. signer wants to advance to a time period not immediately next. Say the signer has signing key for t=1. He now wants to advance to t=3. He will derive the keys for \n\nnodes 3, 6 and 9 (no need to derive key for node 2) and then remove key for node 1. \n\n\n", "file_path": "Readme.md", "rank": 27, "score": 16.163478213547435 }, { "content": "pub struct ProofOfPossession {\n\n pub value: SignatureGroup,\n\n}\n\n\n\n/// Keypair consisting of a master secret, the corresponding verkey and the proof of possession\n\n/// Type GPrime denotes group for public key and type G denotes group for proof of possession.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct Keypair {\n\n pub ver_key: Verkey,\n\n pub pop: ProofOfPossession,\n\n}\n\n\n\nconst PrefixPoP: &[u8] = b\"PoP\";\n\n\n\nimpl<'a> Keypair {\n\n pub fn new<R: RngCore + CryptoRng>(\n\n T: u128,\n\n generators: &GeneratorSet,\n\n rng: &mut R,\n\n db: &'a mut dyn SigKeyDb,\n", "file_path": "src/keys.rs", "rank": 29, "score": 15.985968485359278 }, { "content": " t: u128,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n ) -> Result<bool, PixelError> {\n\n let h = &gens.1[0];\n\n let g2 = &gens.0;\n\n let y = verkey;\n\n let m = Self::hash_message(msg);\n\n let mut sigma_1_1 = calculate_path_factor_using_t_l(t, l, gens)?;\n\n sigma_1_1 += &gens.1[l as usize + 1] * m;\n\n\n\n // Check that e(sigma_1, g2) == e(h, y) * e(sigma_1_1, sigma_2)\n\n // This is equivalent to checking e(h, y) * e(sigma_1_1, sigma_2) * e(sigma_1, g2)^-1 == 1\n\n // Which comes out to be e(h, y) * e(sigma_1_1, sigma_2) * e(sigma_1, -g2) == 1 which can put in a multi-pairing.\n\n // -g2 can be precomputed if performance is critical\n\n // Similarly it might be better to precompute e(h, y) and do a 2-pairing than a 3-pairing\n\n let e = ate_multi_pairing(vec![\n\n (&sigma_1, &g2.negation()),\n\n (h, y),\n\n (&sigma_1_1, sigma_2),\n", "file_path": "src/signature.rs", "rank": 30, "score": 15.66094849804043 }, { "content": " return Err(PixelError::SigkeyUpdateBackward {\n\n old_t: t,\n\n current_t: self.t,\n\n });\n\n }\n\n if t == self.t {\n\n return Err(PixelError::SigkeyAlreadyUpdated { t });\n\n }\n\n\n\n if (t - self.t) == 1 {\n\n // Simple update is more efficient\n\n let removed = self.simple_update(gens, rng, db)?;\n\n return Ok(vec![removed]);\n\n }\n\n\n\n // Find key for t and all of t's successors\n\n let t_path = from_node_num_to_path(t, self.l)?;\n\n let successor_paths = node_successor_paths(t, self.l)?;\n\n // The set might already have keys for some successors, filter them out.\n\n let successors_to_update_paths: Vec<_> = successor_paths\n", "file_path": "src/keys.rs", "rank": 31, "score": 15.46789127522016 }, { "content": "# Pixel: Forward secure Multi-signatures\n\n\n\n1. Based on the paper [Pixel: Multi-signatures for Consensus](https://eprint.iacr.org/2019/514) \n\n1. Using [MIRACL's AMCL library](https://github.com/miracl/amcl).\n\n1. Using BLS12-381 curve.\n\n1. The groups (G1 or G2) between signature and verification key can be swapped by using compile time feature. \n\n1. Provides the simple key update (key for next time period) and fast forward key update (key for arbitrary time in future) mechanism.\n\n1. Provides the threshold signature mechanism. This is not mentioned in the paper but the idea is same as BLS signatures.\n\n\n\n## Overview\n\nForward security is achieved by dividing time into periods and each time period has an associated signing key. \n\nOnly signing keys of current time period and any necessary future time periods are kept. \n\nSigning keys are organized as nodes (both internal and leaves) of a full binary tree with the height of the tree being logarithmic to the maximum time period. \n\nSo for supporting `T` time periods, a tree of depth `d` is created since the total number of nodes in this tree will be 2<sup>`d+1`</sup> - 1. In the paper as well as in code, `d+1` is denoted by `l`.\n\nThe tree is then traversed in pre-order (root then left then right) manner and nodes are assigned numbers corresponding to time periods. In the beginning, signing key is generated for the root but as time passes, \n\nsigning key for children is generated by using a parent (immediate or grandparent) and keys for nodes earlier than the current time are removed.\n\n\n", "file_path": "Readme.md", "rank": 32, "score": 15.383853789827603 }, { "content": "\n\n // Hash(msg) -> FieldElement\n\n let m = Self::hash_message(msg);\n\n\n\n let sigma_2 = &c + (&gens.0 * &r);\n\n\n\n // e_l\n\n let e_l = sig_key.1[sig_key.1.len() - 1].clone();\n\n let pf = calculate_path_factor_using_t_l(t, l, gens)?;\n\n\n\n // sigma_1 = d + (e_l * &m) + (pf + (gens.1[l as usize + 1] * m))*r\n\n let mut sigma_1 = d;\n\n let mut points = SignatureGroupVec::with_capacity(3);\n\n let mut scalars = FieldElementVector::with_capacity(3);\n\n\n\n // (e_l * &m)\n\n points.push(e_l);\n\n scalars.push(m.clone());\n\n\n\n // gens.1[l as usize + 1] * (m * r)\n", "file_path": "src/signature.rs", "rank": 33, "score": 15.372471121471097 }, { "content": "\n\n pub fn verify_aggregated(\n\n &self,\n\n msg: &[u8],\n\n t: u128,\n\n l: u8,\n\n ver_keys: Vec<&Verkey>,\n\n gens: &GeneratorSet,\n\n ) -> Result<bool, PixelError> {\n\n let avk = Verkey::aggregate(ver_keys);\n\n self.verify(msg, t, l, gens, &avk)\n\n }\n\n\n\n /// Hash message in the field before signing or verification\n\n fn hash_message(message: &[u8]) -> FieldElement {\n\n // Fixme: This is not accurate and might affect the security proof but should work in practice\n\n FieldElement::from_msg_hash(message)\n\n }\n\n\n\n /// Generate random number for signature using message time period and signing key for that time period.\n", "file_path": "src/signature.rs", "rank": 34, "score": 14.973870294734317 }, { "content": " }\n\n cur_path\n\n };\n\n let pred_node_num = path_to_node_num(&pred_sk_path, self.l)?;\n\n let pred_sk = { Self::get_key(pred_node_num, db)? };\n\n let pred_sk_path_len = pred_sk_path.len();\n\n\n\n let keys = {\n\n let mut keys = vec![];\n\n // Calculate key for time t\n\n let sk_t =\n\n Self::derive_key(&t_path, pred_sk, pred_sk_path_len, self.l, gens, rng)?;\n\n keys.push((t, sk_t));\n\n\n\n for path in &successors_to_update_paths {\n\n let n = path_to_node_num(*path, self.l)?;\n\n keys.push((\n\n n,\n\n Self::derive_key(&path, pred_sk, pred_sk_path_len, self.l, gens, rng)?,\n\n ));\n", "file_path": "src/keys.rs", "rank": 35, "score": 14.764258657689055 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct Sigkey(pub VerkeyGroup, pub Vec<SignatureGroup>);\n\n\n\nimpl Sigkey {\n\n /// Create secret key for the beginning, i.e. t=1\n\n pub fn initial_secret_key<R: RngCore + CryptoRng>(\n\n gen: &VerkeyGroup,\n\n gens: &[SignatureGroup],\n\n master_secret: &MasterSecret,\n\n rng: &mut R,\n\n ) -> Result<Self, PixelError> {\n\n if gens.len() < 3 {\n\n return Err(PixelError::NotEnoughGenerators { n: 3 });\n\n }\n\n let r = FieldElement::random_using_rng(rng);\n\n // g^r\n\n let sk_prime = gen * &r;\n\n let mut sk_prime_prime = vec![];\n\n // h^x\n\n let h_x = &gens[0] * &master_secret.value;\n", "file_path": "src/keys.rs", "rank": 36, "score": 14.762865445641681 }, { "content": " }\n\n keys\n\n };\n\n\n\n for (i, k) in keys {\n\n db.insert_key(i, k);\n\n }\n\n }\n\n };\n\n\n\n // Remove all nodes except successors and the node for time t.\n\n let all_key_node_nums: HashSet<_> = db.get_key_indices();\n\n // Keep successors\n\n let mut node_num_to_keep: HashSet<u128> = successor_paths\n\n .iter()\n\n .map(|p| path_to_node_num(p, self.l).unwrap())\n\n .collect();\n\n // Keep the node for time being forwarded to\n\n node_num_to_keep.insert(t);\n\n // Remove all others\n", "file_path": "src/keys.rs", "rank": 37, "score": 14.287876475901166 }, { "content": "use rand::{CryptoRng, RngCore};\n\n\n\n\n\nuse amcl_wrapper::field_elem::{FieldElement, FieldElementVector};\n\nuse amcl_wrapper::group_elem::{GroupElement, GroupElementVector};\n\n\n\nuse crate::errors::PixelError;\n\nuse crate::keys::{Sigkey, Verkey};\n\nuse crate::util::{calculate_path_factor_using_t_l, from_node_num_to_path, GeneratorSet};\n\nuse crate::{SignatureGroupVec, SignatureGroup, VerkeyGroup, ate_multi_pairing};\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\npub struct Signature {\n\n pub sigma_1: SignatureGroup,\n\n pub sigma_2: VerkeyGroup,\n\n}\n\n\n\nimpl Signature {\n\n /// Creates new in-deterministic signature\n\n pub fn new<R: RngCore + CryptoRng>(\n", "file_path": "src/signature.rs", "rank": 38, "score": 14.12017995018429 }, { "content": " removed_key_idx = self.t.clone();\n\n self.t = self.t + 1;\n\n }\n\n db.remove_key(removed_key_idx);\n\n Ok(removed_key_idx)\n\n }\n\n\n\n /// Update time to given `t`\n\n pub fn fast_forward_update<R: RngCore + CryptoRng>(\n\n &mut self,\n\n t: u128,\n\n gens: &GeneratorSet,\n\n rng: &mut R,\n\n db: &mut dyn SigKeyDb\n\n ) -> Result<Vec<u128>, PixelError> {\n\n if t > ((1 << self.l) - 1) as u128 {\n\n return Err(PixelError::InvalidNodeNum { t, l: self.l });\n\n }\n\n\n\n if t < self.t {\n", "file_path": "src/keys.rs", "rank": 39, "score": 13.894601418808076 }, { "content": "}\n\n\n\nimpl MasterSecret {\n\n pub fn new<R: RngCore + CryptoRng>(rng: &mut R) -> Self {\n\n Self {\n\n value: FieldElement::random_using_rng(rng),\n\n }\n\n }\n\n\n\n pub fn from_bytes(sk_bytes: &[u8]) -> Result<Self, SerzDeserzError> {\n\n FieldElement::from_bytes(sk_bytes).map(|x| Self { value: x })\n\n }\n\n\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n self.value.to_bytes()\n\n }\n\n}\n\n\n\n// The public key can be in group G1 or G2.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n", "file_path": "src/keys.rs", "rank": 40, "score": 13.884587159808023 }, { "content": "use rand::{CryptoRng, RngCore};\n\n\n\nuse amcl_wrapper::errors::SerzDeserzError;\n\nuse amcl_wrapper::field_elem::FieldElement;\n\nuse amcl_wrapper::group_elem::{GroupElement, GroupElementVector};\n\n\n\nuse super::errors::PixelError;\n\nuse crate::util::{\n\n calculate_l, calculate_path_factor, from_node_num_to_path, node_successor_paths,\n\n path_to_node_num, GeneratorSet,\n\n};\n\n\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::mem;\n\nuse crate::{VerkeyGroup, SignatureGroup, ate_2_pairing};\n\n\n\n/// MasterSecret will be cleared on drop as FieldElement is cleared on drop\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct MasterSecret {\n\n pub value: FieldElement,\n", "file_path": "src/keys.rs", "rank": 41, "score": 13.789802722582905 }, { "content": " }\n\n }\n\n\n\n pub fn verify(\n\n &self,\n\n msg: &[u8],\n\n t: u128,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n verkey: &Verkey,\n\n ) -> Result<bool, PixelError> {\n\n if gens.1.len() < (l as usize + 2) {\n\n return Err(PixelError::NotEnoughGenerators { n: l as usize + 2 });\n\n }\n\n\n\n if self.is_identity() || verkey.is_identity() || !self.has_correct_oder() {\n\n return Ok(false);\n\n }\n\n Self::verify_naked(&self.sigma_1, &self.sigma_2, &verkey.value, msg, t, l, gens)\n\n }\n", "file_path": "src/signature.rs", "rank": 42, "score": 13.69574656079628 }, { "content": " keys_to_aggr: Vec<(usize, &Verkey)>,\n\n gen: &VerkeyGroup,\n\n ) {\n\n let threshold_vk = ThresholdScheme::aggregate_vk(threshold, keys_to_aggr);\n\n\n\n let expected_vk = gen * &secret_x;\n\n assert_eq!(expected_vk, threshold_vk.value);\n\n }\n\n\n\n fn check_signing_on_random_msgs(threshold: usize, signers: &[Signer], mut sigkey_dbs: Vec<InMemorySigKeyDb>, mut sk_managers: Vec<SigkeyManager>, T: u128, l: u8, gens: &GeneratorSet) {\n\n let mut rng = rand::thread_rng();\n\n for t in 1..=T {\n\n let msg = FieldElement::random().to_bytes();\n\n let mut sigs = vec![];\n\n for i in 0..threshold {\n\n let sk = sk_managers[i].get_current_key(&sigkey_dbs[i]).unwrap();\n\n let sig = Signature::new(&msg, t, l, &gens, sk, &mut rng).unwrap();\n\n assert!(sig.verify(&msg, t, l, gens, &signers[i].verkey).unwrap());\n\n sigs.push((signers[i].id, sig));\n\n }\n", "file_path": "src/threshold_sig.rs", "rank": 43, "score": 13.676216075606082 }, { "content": " let r = FieldElement::random_using_rng(rng);\n\n\n\n let c = pred_sk.0.clone();\n\n let mut d = pred_sk.1[0].clone();\n\n for i in pred_sk_path_len..key_path_len {\n\n if key_path[i] == 1 {\n\n d += &pred_sk.1[i - pred_sk_path_len + 1];\n\n } else {\n\n d += &pred_sk.1[i - pred_sk_path_len + 1].double();\n\n }\n\n }\n\n let path_factor = calculate_path_factor(key_path.to_vec(), &gens)?;\n\n d += (&path_factor * &r);\n\n\n\n let sk_t_prime = c + (&gens.0 * &r);\n\n let mut sk_t_prime_prime = vec![];\n\n sk_t_prime_prime.push(d);\n\n\n\n let pred_sk_len = pred_sk.1.len();\n\n let gen_len = gens.1.len();\n", "file_path": "src/keys.rs", "rank": 44, "score": 13.496914088797102 }, { "content": "\n\n let start = Instant::now();\n\n set.fast_forward_update(1048575, &gens, &mut rng, &mut db).unwrap();\n\n println!(\n\n \"For l={}, time to update key from t=65535 to t=1048575 is {:?}\",\n\n l,\n\n start.elapsed()\n\n );\n\n }\n\n }\n\n // TODO: More tests with random values using node_successors function.\n\n}\n", "file_path": "src/keys.rs", "rank": 46, "score": 12.928209311675204 }, { "content": "impl SigkeyManager {\n\n pub fn new(T: u128, l: u8, sigkey: Sigkey, db: &mut dyn SigKeyDb) -> Result<Self, PixelError> {\n\n let t = 1;\n\n db.insert_key(t.clone(), sigkey);\n\n Ok(Self { l, T, t})\n\n }\n\n\n\n pub fn load(T: u128, l: u8, t: u128) -> Result<Self, PixelError> {\n\n Ok(Self { l, T, t})\n\n }\n\n\n\n pub fn has_key(t: u128, db: &dyn SigKeyDb) -> bool {\n\n db.has_key(t)\n\n }\n\n\n\n pub fn get_key<'a>(t: u128, db: &'a dyn SigKeyDb) -> Result<&'a Sigkey, PixelError> {\n\n db.get_key(t)\n\n }\n\n\n\n pub fn get_current_key<'a>(&self, db: &'a dyn SigKeyDb) -> Result<&'a Sigkey, PixelError> {\n", "file_path": "src/keys.rs", "rank": 47, "score": 12.406550361926872 }, { "content": " gens: &GeneratorSet,\n\n sig_key: &Sigkey,\n\n ) -> Result<Self, PixelError> {\n\n if gens.1.len() < (l as usize + 2) {\n\n return Err(PixelError::NotEnoughGenerators { n: l as usize + 2 });\n\n }\n\n let r = Self::gen_sig_rand(msg, t, sig_key);\n\n Self::gen_sig(msg, t, l, gens, sig_key, r)\n\n }\n\n\n\n pub fn aggregate(sigs: Vec<&Self>) -> Self {\n\n let mut asig_1 = SignatureGroup::identity();\n\n let mut asig_2 = VerkeyGroup::identity();\n\n for s in sigs {\n\n asig_1 += &s.sigma_1;\n\n asig_2 += &s.sigma_2;\n\n }\n\n Self {\n\n sigma_1: asig_1,\n\n sigma_2: asig_2,\n", "file_path": "src/signature.rs", "rank": 48, "score": 12.158539120773073 }, { "content": " value: Self::msg_for_pop(vk) * &x.value,\n\n }\n\n }\n\n\n\n /// Verify proof of possession\n\n pub fn verify_pop(pop: &ProofOfPossession, vk: &Verkey, gen: &VerkeyGroup) -> bool {\n\n // check e(pop, gen) == e(hash(PoP||vk), vk) which is same as e(hash(PoP||vk), vk) * e(pop, gen)^-1 == 1\n\n // e(hash(PoP||vk), vk) * e(pop, gen)^-1 = e(hash(PoP||vk), vk) * e(pop, gen^-1)\n\n ate_2_pairing(&Self::msg_for_pop(vk), &vk.value, &pop.value, &(-gen)).is_one()\n\n }\n\n\n\n fn msg_for_pop(vk: &Verkey) -> SignatureGroup {\n\n let mut s = PrefixPoP.to_vec();\n\n s.extend_from_slice(&vk.to_bytes());\n\n SignatureGroup::from_msg_hash(&s)\n\n }\n\n}\n\n\n\n/// Secret key sk can be seen as (sk', sk'') where sk'' is itself a vector with initial (and max) length l+1\n\n/// Sigkey will be cleared on drop as both G1 and G2 elements are cleared on drop\n", "file_path": "src/keys.rs", "rank": 49, "score": 12.045868298928426 }, { "content": " fn gen_sig_rand(message: &[u8], t: u128, sig_key: &Sigkey) -> FieldElement {\n\n let mut bytes = message.to_vec();\n\n bytes.extend_from_slice(&sig_key.0.to_bytes());\n\n for i in &sig_key.1 {\n\n bytes.extend_from_slice(&i.to_bytes());\n\n }\n\n bytes.extend_from_slice(&t.to_le_bytes());\n\n FieldElement::from_msg_hash(&bytes)\n\n }\n\n\n\n fn gen_sig(\n\n msg: &[u8],\n\n t: u128,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n sig_key: &Sigkey,\n\n r: FieldElement,\n\n ) -> Result<Self, PixelError> {\n\n let c = sig_key.0.clone();\n\n let d = sig_key.1[0].clone();\n", "file_path": "src/signature.rs", "rank": 50, "score": 12.02600740046904 }, { "content": "pub struct Verkey {\n\n pub value: VerkeyGroup,\n\n}\n\n\n\nimpl Verkey {\n\n pub fn from_master_secret(master_secret: &MasterSecret, generator: &VerkeyGroup) -> Self {\n\n Self {\n\n value: generator * &master_secret.value,\n\n }\n\n }\n\n\n\n pub fn aggregate(ver_keys: Vec<&Self>) -> Self {\n\n let mut avk= VerkeyGroup::identity();\n\n for vk in ver_keys {\n\n avk += &vk.value;\n\n }\n\n Self { value: avk }\n\n }\n\n\n\n pub fn from_bytes(vk_bytes: &[u8]) -> Result<Verkey, SerzDeserzError> {\n", "file_path": "src/keys.rs", "rank": 51, "score": 11.518398951382613 }, { "content": " t: u128,\n\n vk: &Verkey,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n mut rng: &mut R,\n\n db: &dyn SigKeyDb\n\n ) {\n\n let sk = SigkeyManager::get_key(t, db).unwrap();\n\n let msg = \"Hello\".as_bytes();\n\n let sig = Signature::new(msg, t, l, &gens, &sk, &mut rng).unwrap();\n\n assert!(sig.verify(msg, t, l, &gens, &vk).unwrap());\n\n }\n\n\n\n fn fast_forward_sig_and_verify<R: RngCore + CryptoRng>(\n\n set: &mut SigkeyManager,\n\n t: u128,\n\n vk: &Verkey,\n\n l: u8,\n\n gens: &GeneratorSet,\n\n mut rng: &mut R,\n", "file_path": "src/signature.rs", "rank": 52, "score": 11.23248882682603 }, { "content": " }\n\n\n\n /// Create a verification key to verify a threshold signature. Such a key can be created\n\n /// once and persisted to be used for any threshold signature\n\n pub fn aggregate_vk(threshold: usize, keys: Vec<(usize, &Verkey)>) -> Verkey {\n\n assert!(keys.len() >= threshold);\n\n\n\n let mut vk_bases = VerkeyGroupVec::with_capacity(threshold);\n\n let mut vk_exps = FieldElementVector::with_capacity(threshold);\n\n\n\n let signer_ids = keys\n\n .iter()\n\n .take(threshold)\n\n .map(|(i, _)| *i)\n\n .collect::<HashSet<usize>>();\n\n for (id, vk) in keys.into_iter().take(threshold) {\n\n let l = Polynomial::lagrange_basis_at_0(signer_ids.clone(), id);\n\n vk_bases.push(vk.value.clone());\n\n vk_exps.push(l.clone());\n\n }\n", "file_path": "src/threshold_sig.rs", "rank": 53, "score": 11.109577372770218 }, { "content": "use rand::{CryptoRng, RngCore};\n\n\n\nuse crate::keys::{Verkey, Sigkey, MasterSecret, ProofOfPossession, Keypair};\n\nuse amcl_wrapper::field_elem::{FieldElement, FieldElementVector};\n\nuse amcl_wrapper::group_elem::{GroupElement, GroupElementVector};\n\nuse secret_sharing::polynomial::Polynomial;\n\nuse secret_sharing::shamir_secret_sharing::get_shared_secret;\n\nuse crate::{VerkeyGroup, SignatureGroup, VerkeyGroupVec, SignatureGroupVec};\n\nuse std::collections::{HashMap, HashSet};\n\nuse std::mem;\n\nuse crate::errors::PixelError;\n\nuse crate::signature::Signature;\n\n\n\npub struct Signer {\n\n pub id: usize,\n\n pub sigkey_initial: Sigkey,\n\n pub verkey: Verkey,\n\n pub pop: ProofOfPossession,\n\n}\n\n\n\n/// Takes shares for secret and generate signing and verification keys\n", "file_path": "src/threshold_sig.rs", "rank": 54, "score": 10.822199864147475 }, { "content": " assert!(sig4.verify(msg, t2, l, &gens, &vk).unwrap());\n\n assert!(sig4_deterministic.verify(msg, t2, l, &gens, &vk).unwrap());\n\n\n\n assert_eq!(sig3_deterministic, sig4_deterministic);\n\n assert_ne!(sig3, sig4);\n\n\n\n // deterministic signatures for different secret keys should differ\n\n assert_ne!(sig1_deterministic, sig3_deterministic);\n\n }\n\n\n\n #[test]\n\n fn test_sig_verify_post_simple_update_by_7() {\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n // t=2\n", "file_path": "src/signature.rs", "rank": 55, "score": 10.716003361439618 }, { "content": " // h_0^r\n\n let h0_r = &gens[1] * &r;\n\n // h^x * h_0^r\n\n sk_prime_prime.push(h_x + &h0_r);\n\n for i in 2..gens.len() {\n\n // h_i^r\n\n sk_prime_prime.push(&gens[i] * &r);\n\n }\n\n Ok(Self(sk_prime, sk_prime_prime))\n\n }\n\n}\n\n\n\n/// `T` denotes the maximum time period supported and `t` denotes the current time period.\n\n/// #[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct SigkeyManager {\n\n l: u8,\n\n T: u128,\n\n t: u128,\n\n}\n\n\n", "file_path": "src/keys.rs", "rank": 56, "score": 10.296593875069627 }, { "content": " t: u128,\n\n gens: &GeneratorSet,\n\n mut rng: &mut R,\n\n db: &mut dyn SigKeyDb\n\n ) {\n\n set.fast_forward_update(t, &gens, &mut rng, db).unwrap();\n\n assert_eq!(set.t, t);\n\n for i in 1..t {\n\n assert!(!SigkeyManager::has_key(i as u128, db));\n\n }\n\n assert!(SigkeyManager::has_key(t, db));\n\n }\n\n\n\n #[test]\n\n fn test_proof_of_possession() {\n\n let mut rng = rand::thread_rng();\n\n let T1 = 7;\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, verkey, _, PoP) =\n\n setup::<ThreadRng>(T1, \"test_pixel\", &mut rng, &mut db).unwrap();\n", "file_path": "src/keys.rs", "rank": 57, "score": 10.281125948836658 }, { "content": "\n\n fn get_key_indices(&self) -> HashSet<u128> {\n\n self.keys.keys().map(|k| *k).collect()\n\n }\n\n}\n\n\n\nimpl InMemorySigKeyDb {\n\n pub fn new() -> Self {\n\n let keys = HashMap::<u128, Sigkey>::new();\n\n Self { keys }\n\n }\n\n}\n\n\n\n/// Create master secret, verkey, PoP, SigkeyManager for t = 1, a set with only 1 key and proof of possession.\n", "file_path": "src/keys.rs", "rank": 58, "score": 10.196114187416288 }, { "content": " for i in (key_path_len + 1)..(l as usize + 1) {\n\n let j = l as usize - i + 1;\n\n let a = &pred_sk.1[pred_sk_len - j];\n\n let b = &(&gens.1[gen_len - j] * &r);\n\n let e = a + b;\n\n sk_t_prime_prime.push(e);\n\n }\n\n\n\n Ok(Sigkey(sk_t_prime, sk_t_prime_prime))\n\n }\n\n}\n\n\n\n/// Key-value database interface that needs to be implemented for storing signing keys.\n\n/// Signing key are db values whereas db keys are the time period for which the signing key needs to be used.\n", "file_path": "src/keys.rs", "rank": 59, "score": 9.996392130766578 }, { "content": " create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=3\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 3u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 6u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=4\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 4u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 5u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 6u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=5\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 5u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 6u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n", "file_path": "src/signature.rs", "rank": 60, "score": 9.934697466994765 }, { "content": " set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 2u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 5u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=3\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 3u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 4u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=4\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 4u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 5u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=5\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 5u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=6\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n", "file_path": "src/signature.rs", "rank": 61, "score": 9.827881130070963 }, { "content": "\n\n // t=6\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 6u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=7\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 7u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 8u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=8\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n\n\n // t=9\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 9u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=10\n", "file_path": "src/signature.rs", "rank": 62, "score": 9.79593220369669 }, { "content": " assert!(sig2.verify(msg, t1, l, &gens, &vk).unwrap());\n\n assert!(sig2_deterministic.verify(msg, t1, l, &gens, &vk).unwrap());\n\n\n\n // Deterministic sigs for same message and secret key should be equal\n\n assert_eq!(sig1_deterministic, sig2_deterministic);\n\n // In-deterministic sigs for same message and secret key should be different\n\n assert_ne!(sig1, sig2);\n\n\n\n // In-deterministic and deterministic signatures for t=2, doing the same checks as above\n\n let t2 = 2u128;\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n let sk2 = SigkeyManager::get_key(t2, &db).unwrap();\n\n\n\n let sig3 = Signature::new(msg, t2, l, &gens, &sk2, &mut rng).unwrap();\n\n let sig3_deterministic = Signature::new_deterministic(msg, t2, l, &gens, &sk2).unwrap();\n\n assert!(sig3.verify(msg, t2, l, &gens, &vk).unwrap());\n\n assert!(sig3_deterministic.verify(msg, t2, l, &gens, &vk).unwrap());\n\n\n\n let sig4 = Signature::new(msg, t2, l, &gens, &sk2, &mut rng).unwrap();\n\n let sig4_deterministic = Signature::new_deterministic(msg, t2, l, &gens, &sk2).unwrap();\n", "file_path": "src/signature.rs", "rank": 63, "score": 9.599985611205916 }, { "content": " set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 10u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 13u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=11\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 11u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 12u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 13u128, &vk, l, &gens, &mut rng, &db);\n\n }\n\n\n\n #[test]\n\n fn test_sig_verify_post_fast_forward_update_7() {\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n let mut t = 1u128;\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n", "file_path": "src/signature.rs", "rank": 64, "score": 9.554965580561419 }, { "content": " fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n }\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 4;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n t = 10;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(13u128, &db));\n\n\n\n t = 13;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n", "file_path": "src/keys.rs", "rank": 65, "score": 9.418443368331818 }, { "content": "\n\n // t=7\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 7);\n\n assert!(!SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(8u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n // t=8\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n\n\n // t=9\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n\n\n // t=10\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 10);\n\n assert!(!SigkeyManager::has_key(9u128, &db));\n\n assert!(SigkeyManager::has_key(13u128, &db));\n\n\n", "file_path": "src/keys.rs", "rank": 66, "score": 9.418413605477916 }, { "content": " assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(7u128, &db));\n\n }\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 3;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(4u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n t = 5;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n\n\n t = 7;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n }\n", "file_path": "src/keys.rs", "rank": 67, "score": 9.380889506631357 }, { "content": "\n\n let T = 15;\n\n let mut t;\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 3;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n t = 5;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n t = 9;\n", "file_path": "src/keys.rs", "rank": 68, "score": 9.26613190046056 }, { "content": "\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let mut t = 1u128;\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 2;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n t = 4;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n t = 6;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n", "file_path": "src/keys.rs", "rank": 69, "score": 9.252987768980129 }, { "content": " create_sig_and_verify::<ThreadRng>(&set, 6u128, &vk, l, &gens, &mut rng, &db);\n\n create_sig_and_verify::<ThreadRng>(&set, 7u128, &vk, l, &gens, &mut rng, &db);\n\n\n\n // t=7\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 7u128, &vk, l, &gens, &mut rng, &db);\n\n }\n\n\n\n #[test]\n\n fn test_sig_verify_post_simple_update_by_15() {\n\n let mut rng = rand::thread_rng();\n\n let T = 15;\n\n let l = calculate_l(T).unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n // t=2\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n create_sig_and_verify::<ThreadRng>(&set, 2u128, &vk, l, &gens, &mut rng, &db);\n", "file_path": "src/signature.rs", "rank": 70, "score": 9.215278885152008 }, { "content": " VerkeyGroup::from_bytes(vk_bytes).map(|value| Verkey { value })\n\n }\n\n\n\n pub fn to_bytes(&self) -> Vec<u8> {\n\n self.value.to_bytes()\n\n }\n\n\n\n pub fn is_identity(&self) -> bool {\n\n if self.value.is_identity() {\n\n println!(\"Verkey point at infinity\");\n\n return true;\n\n }\n\n return false;\n\n }\n\n}\n\n\n\n/// Proof of Possession of signing key. It is a signature on the verification key and can be\n\n/// group in G1 or G2. But it is in different group than Verkey.\n\n/// If Verkey is in G2 then proof of possession is in G1 and vice versa.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n", "file_path": "src/keys.rs", "rank": 71, "score": 9.20768514153154 }, { "content": " assert_eq!(set.t, 4);\n\n assert!(!SigkeyManager::has_key(3u128, &db));\n\n assert!(SigkeyManager::has_key(4u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n // t=5\n\n set.fast_forward_update(5u128, &gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 5);\n\n assert!(!SigkeyManager::has_key(4u128, &db));\n\n\n\n // t=6\n\n set.fast_forward_update(6u128, &gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 6);\n\n assert!(!SigkeyManager::has_key(5u128, &db));\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(7u128, &db));\n\n\n\n // t=7\n\n set.fast_forward_update(7u128, &gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 7);\n", "file_path": "src/keys.rs", "rank": 72, "score": 9.128309775147207 }, { "content": " let asig = Signature::aggregate(vec![&sig1, &sig2]);\n\n assert!(asig\n\n .verify_aggregated(msg, t, l, vec![&vk1, &vk2], &gens)\n\n .unwrap());\n\n }\n\n\n\n {\n\n t = 3;\n\n sigkey_set1.fast_forward_update(t, &gens, &mut rng, &mut db1).unwrap();\n\n sigkey_set2.fast_forward_update(t, &gens, &mut rng, &mut db2).unwrap();\n\n\n\n let msg = \"Hello\".as_bytes();\n\n let sk1 = SigkeyManager::get_key(t, &db1).unwrap();\n\n let sig1 = Signature::new(msg, t, l, &gens, &sk1, &mut rng).unwrap();\n\n let sk2 = SigkeyManager::get_key(t, &db2).unwrap();\n\n let sig2 = Signature::new(msg, t, l, &gens, &sk2, &mut rng).unwrap();\n\n\n\n let asig = Signature::aggregate(vec![&sig1, &sig2]);\n\n assert!(asig\n\n .verify_aggregated(msg, t, l, vec![&vk1, &vk2], &gens)\n", "file_path": "src/signature.rs", "rank": 73, "score": 9.07230214920077 }, { "content": " assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n // t=4\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 4);\n\n assert!(!SigkeyManager::has_key(3u128, &db));\n\n assert!(SigkeyManager::has_key(4u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n // t=5\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 5);\n\n assert!(!SigkeyManager::has_key(4u128, &db));\n\n\n\n // t=6\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 6);\n\n assert!(!SigkeyManager::has_key(5u128, &db));\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(7u128, &db));\n", "file_path": "src/keys.rs", "rank": 74, "score": 9.055461276301504 }, { "content": " assert!(SigkeyManager::has_key(4u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n }\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 4;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(4u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n }\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n", "file_path": "src/keys.rs", "rank": 75, "score": 9.04019178140641 }, { "content": "\n\n let threshold_sig = ThresholdScheme::aggregate_sigs(threshold, sigs);\n\n\n\n let threshold_vk = ThresholdScheme::aggregate_vk(\n\n threshold,\n\n signers\n\n .iter()\n\n .map(|s| (s.id, &s.verkey))\n\n .collect::<Vec<(usize, &Verkey)>>(),\n\n );\n\n\n\n assert!(threshold_sig.verify(&msg, t, l, gens, &threshold_vk).unwrap());\n\n for i in 0..threshold {\n\n sk_managers[i].simple_update(&gens, &mut rng, &mut sigkey_dbs[i]).unwrap();\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_verkey_aggregation_shamir_secret_sharing_keygen() {\n", "file_path": "src/threshold_sig.rs", "rank": 76, "score": 8.953545557564242 }, { "content": " set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 4);\n\n assert!(!SigkeyManager::has_key(3u128, &db));\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n // t=5\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 5);\n\n assert!(!SigkeyManager::has_key(4u128, &db));\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n\n\n\n // t=6\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 6);\n\n assert!(!SigkeyManager::has_key(5u128, &db));\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(9u128, &db));\n", "file_path": "src/keys.rs", "rank": 77, "score": 8.940295427102859 }, { "content": " t = 5;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n }\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 6;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(7u128, &db));\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_fast_forward_key_update_repeat_7() {\n\n // Create key and then fast forward update repeatedly\n", "file_path": "src/keys.rs", "rank": 78, "score": 8.823178724608882 }, { "content": " assert!(!SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(7u128, &db));\n\n }\n\n\n\n #[test]\n\n fn test_fast_forward_key_update_7() {\n\n // Create key and then fast forward update\n\n\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n\n\n let mut t = 1u128;\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 3;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n", "file_path": "src/keys.rs", "rank": 79, "score": 8.593149669396084 }, { "content": " .unwrap());\n\n }\n\n\n\n {\n\n t = 5;\n\n sigkey_set1.fast_forward_update(t, &gens, &mut rng, &mut db1).unwrap();\n\n sigkey_set2.fast_forward_update(t, &gens, &mut rng, &mut db2).unwrap();\n\n\n\n let msg = \"Hello\".as_bytes();\n\n let sk1 = SigkeyManager::get_key(t, &db1).unwrap();\n\n let sig1 = Signature::new(msg, t, l, &gens, &sk1, &mut rng).unwrap();\n\n let sk2 = SigkeyManager::get_key(t, &db2).unwrap();\n\n let sig2 = Signature::new(msg, t, l, &gens, &sk2, &mut rng).unwrap();\n\n\n\n let asig = Signature::aggregate(vec![&sig1, &sig2]);\n\n assert!(asig\n\n .verify_aggregated(msg, t, l, vec![&vk1, &vk2], &gens)\n\n .unwrap());\n\n }\n\n }\n", "file_path": "src/signature.rs", "rank": 80, "score": 8.554552763269857 }, { "content": " assert!(Keypair::verify_pop(&PoP, &verkey, &gens.0))\n\n }\n\n\n\n #[test]\n\n fn test_setup_with_less_number_of_genertors() {\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let generators = GeneratorSet::new(T, \"test_pixel\").unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n assert!(Keypair::new(3, &generators, &mut rng, &mut db).is_ok());\n\n assert!(Keypair::new(7, &generators, &mut rng, &mut db).is_ok());\n\n assert!(Keypair::new(8, &generators, &mut rng, &mut db).is_err());\n\n assert!(Keypair::new(9, &generators, &mut rng, &mut db).is_err());\n\n }\n\n\n\n #[test]\n\n fn test_setup() {\n\n let mut rng = rand::thread_rng();\n\n let T1 = 7;\n\n let l1 = calculate_l(T1).unwrap();\n", "file_path": "src/keys.rs", "rank": 82, "score": 8.1986374966914 }, { "content": " // t=11\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 11);\n\n assert!(!SigkeyManager::has_key(10u128, &db));\n\n assert!(SigkeyManager::has_key(12u128, &db));\n\n assert!(SigkeyManager::has_key(13u128, &db));\n\n }\n\n\n\n #[test]\n\n fn test_fast_forward_key_update_through_simple_key_update_7() {\n\n // Create key and then fast forward update time\n\n\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) = setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 1);\n\n\n\n // t=2\n", "file_path": "src/keys.rs", "rank": 83, "score": 8.087029362000234 }, { "content": "\n\n // t=7\n\n set.simple_update(&gens, &mut rng, &mut db).unwrap();\n\n assert_eq!(set.t, 7);\n\n assert!(!SigkeyManager::has_key(6u128, &db));\n\n assert!(SigkeyManager::has_key(7u128, &db));\n\n }\n\n\n\n #[test]\n\n fn test_simple_key_update_15() {\n\n // Create key and then update time by 1 repeatedly\n\n\n\n let mut rng = rand::thread_rng();\n\n\n\n let T = 15;\n\n let l = calculate_l(T).unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) = setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n // t=2\n", "file_path": "src/keys.rs", "rank": 84, "score": 8.078222933476258 }, { "content": " secret_x: FieldElement,\n\n signers: &[Signer],\n\n gen: &VerkeyGroup,\n\n ) {\n\n let threshold_vk = ThresholdScheme::aggregate_vk(\n\n threshold,\n\n signers\n\n .iter()\n\n .take(threshold)\n\n .map(|s| (s.id, &s.verkey))\n\n .collect::<Vec<(usize, &Verkey)>>(),\n\n );\n\n\n\n let expected_vk = gen * &secret_x;\n\n assert_eq!(expected_vk, threshold_vk.value);\n\n }\n\n\n\n fn check_threshold_key_gen_gaps_in_ids(\n\n threshold: usize,\n\n secret_x: FieldElement,\n", "file_path": "src/threshold_sig.rs", "rank": 85, "score": 7.938737205244195 }, { "content": "\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, _, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 4;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n assert!(SigkeyManager::has_key(5u128, &db));\n\n\n\n t = 7;\n\n fast_forward_and_check(&mut set, t, &gens, &mut rng, &mut db);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_fast_forward_key_update_15() {\n\n // Create key and then fast forward update\n\n\n\n let mut rng = rand::thread_rng();\n", "file_path": "src/keys.rs", "rank": 86, "score": 7.937974752972237 }, { "content": " fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_sig_verify_post_fast_forward_update_repeat_7() {\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n let mut t = 1u128;\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 2;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 4;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n", "file_path": "src/signature.rs", "rank": 87, "score": 7.906234613828481 }, { "content": " }\n\n\n\n #[test]\n\n fn test_sig_verify_post_fast_forward_update_repeat_65535() {\n\n let mut rng = rand::thread_rng();\n\n let T = 65535;\n\n let l = calculate_l(T).unwrap();\n\n let mut t = 1u128;\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 4;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 15;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n", "file_path": "src/signature.rs", "rank": 88, "score": 7.886292662267669 }, { "content": " t = 6;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n }\n\n\n\n #[test]\n\n fn test_sig_verify_post_fast_forward_update_repeat_15() {\n\n let mut rng = rand::thread_rng();\n\n let T = 15;\n\n let l = calculate_l(T).unwrap();\n\n let mut t = 1u128;\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n\n\n t = 3;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 8;\n", "file_path": "src/signature.rs", "rank": 89, "score": 7.886292662267669 }, { "content": " }\n\n if !self.sigma_2.has_correct_order() {\n\n println!(\"Signature point in G2 has incorrect order\");\n\n return false;\n\n }\n\n return true;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::keys::{setup, InMemorySigKeyDb, Keypair, SigkeyManager, SigKeyDb};\n\n use crate::util::calculate_l;\n\n use rand::rngs::ThreadRng;\n\n // For benchmarking\n\n use std::time::{Duration, Instant};\n\n\n\n pub fn create_sig_and_verify<R: RngCore + CryptoRng>(\n\n set: &SigkeyManager,\n", "file_path": "src/signature.rs", "rank": 91, "score": 7.732414747787349 }, { "content": " let mut db1 = InMemorySigKeyDb::new();\n\n let (_, _, set1, _) = setup::<ThreadRng>(T1, \"test_pixel\", &mut rng, &mut db1).unwrap();\n\n let sk1 = SigkeyManager::get_key(1u128, &db1).unwrap();\n\n assert_eq!(sk1.1.len() as u8, l1 + 1);\n\n\n\n let T2 = 15;\n\n let l2 = calculate_l(T2).unwrap();\n\n let mut db2 = InMemorySigKeyDb::new();\n\n let (_, _, set2, _) = setup::<ThreadRng>(T2, \"test_pixel\", &mut rng, &mut db2).unwrap();\n\n let sk2 = SigkeyManager::get_key(1u128, &db2).unwrap();\n\n assert_eq!(sk2.1.len() as u8, l2 + 1);\n\n }\n\n\n\n #[test]\n\n fn test_simple_key_update_7() {\n\n // Create key and then update time by 1 repeatedly\n\n\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n", "file_path": "src/keys.rs", "rank": 92, "score": 7.72506825462442 }, { "content": " db: &mut dyn SigKeyDb\n\n ) {\n\n set.fast_forward_update(t, &gens, &mut rng, db).unwrap();\n\n create_sig_and_verify(&set, t, &vk, l, &gens, &mut rng, db);\n\n }\n\n\n\n #[test]\n\n fn test_sig_verify_initial() {\n\n let mut rng = rand::thread_rng();\n\n let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, set, _) = setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n let t = 1u128;\n\n create_sig_and_verify::<ThreadRng>(&set, t, &vk, l, &gens, &mut rng, &db);\n\n }\n\n\n\n #[test]\n\n fn test_sig_deterministic() {\n\n let mut rng = rand::thread_rng();\n", "file_path": "src/signature.rs", "rank": 93, "score": 7.6333105022209224 }, { "content": "/*error_chain! {\n\n errors {\n\n MoreThanSupported(T: u128) {\n\n description(\"Time more than the maximum supported value\")\n\n display(\"T should be less than max value for u128 but was : {}\", T)\n\n }\n\n }\n\n}*/\n\n\n\nuse failure::Error;\n\n\n\n#[derive(Debug, Fail)]\n\npub enum PixelError {\n\n #[fail(\n\n display = \"T should be >= 3 and less than max value for u128 but was : {}\",\n\n T\n\n )]\n\n InvalidMaxTimePeriod { T: u128 },\n\n //#[fail(display = \"T+1 should be power of 2 but T was : {}\", T+1)]\n\n #[fail(display = \"T+1 should be power of 2\")]\n", "file_path": "src/errors.rs", "rank": 94, "score": 7.586729613810039 }, { "content": " let mut t = 1u128;\n\n\n\n let mut db1 = InMemorySigKeyDb::new();\n\n let (gens, vk1, mut sigkey_set1, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db1).unwrap();\n\n\n\n let mut db2 = InMemorySigKeyDb::new();\n\n let (keypair2, mut sigkey_set2) = Keypair::new(T, &gens, &mut rng, &mut db2).unwrap();\n\n let vk2 = keypair2.ver_key;\n\n\n\n create_sig_and_verify::<ThreadRng>(&sigkey_set1, t, &vk1, l, &gens, &mut rng, &db1);\n\n create_sig_and_verify::<ThreadRng>(&sigkey_set2, t, &vk2, l, &gens, &mut rng, &db2);\n\n\n\n {\n\n let msg = \"Hello\".as_bytes();\n\n let sk1 = SigkeyManager::get_key(t, &db1).unwrap();\n\n let sig1 = Signature::new(msg, t, l, &gens, &sk1, &mut rng).unwrap();\n\n let sk2 = SigkeyManager::get_key(t, &db2).unwrap();\n\n let sig2 = Signature::new(msg, t, l, &gens, &sk2, &mut rng).unwrap();\n\n\n", "file_path": "src/signature.rs", "rank": 95, "score": 7.562808342576361 }, { "content": "\n\n // threshold verkey = vk_1^l_1 * vk_2^l_2 * ... vk_i^l_i for i in threshold\n\n\n\n Verkey {\n\n value: vk_bases.multi_scalar_mul_var_time(vk_exps.as_ref()).unwrap(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use rand::rngs::ThreadRng;\n\n use crate::util::{GeneratorSet, calculate_l};\n\n use crate::signature::Signature;\n\n use crate::keys::{InMemorySigKeyDb, SigkeyManager};\n\n\n\n fn check_threshold_key_gen(\n\n threshold: usize,\n", "file_path": "src/threshold_sig.rs", "rank": 96, "score": 7.477082699180644 }, { "content": "#![allow(non_snake_case)]\n\n\n\nextern crate rand;\n\n#[macro_use]\n\nextern crate amcl_wrapper;\n\n\n\nuse amcl_wrapper::extension_field_gt::GT;\n\n\n\n#[cfg(all(feature = \"VerkeyG1\", feature = \"VerkeyG2\"))]\n\ncompile_error!(\"features `VerkeyG1` and `VerkeyG2` are mutually exclusive\");\n\n\n\n// For feature VerkeyG2, verification key is in G2 and all but one element of signature are in G1\n\n#[cfg(feature = \"VerkeyG2\")]\n\npub type SignatureGroup = amcl_wrapper::group_elem_g1::G1;\n\n#[cfg(feature = \"VerkeyG2\")]\n\npub type SignatureGroupVec = amcl_wrapper::group_elem_g1::G1Vector;\n\n#[cfg(feature = \"VerkeyG2\")]\n\npub type VerkeyGroup = amcl_wrapper::group_elem_g2::G2;\n\n#[cfg(feature = \"VerkeyG2\")]\n\npub type VerkeyGroupVec = amcl_wrapper::group_elem_g2::G2Vector;\n\n#[cfg(feature = \"VerkeyG2\")]\n", "file_path": "src/lib.rs", "rank": 97, "score": 7.394058138919298 }, { "content": " let T = 7;\n\n let l = calculate_l(T).unwrap();\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n\n let t1 = 1u128;\n\n let msg = \"Hello\".as_bytes();\n\n let sk1 = SigkeyManager::get_key(t1, &db).unwrap();\n\n\n\n // In-deterministic and deterministic signatures for t=1\n\n\n\n let sig1 = Signature::new(msg, t1, l, &gens, &sk1, &mut rng).unwrap();\n\n let sig1_deterministic = Signature::new_deterministic(msg, t1, l, &gens, &sk1).unwrap();\n\n // In-deterministic sigs should verify\n\n assert!(sig1.verify(msg, t1, l, &gens, &vk).unwrap());\n\n assert!(sig1_deterministic.verify(msg, t1, l, &gens, &vk).unwrap());\n\n\n\n let sig2 = Signature::new(msg, t1, l, &gens, &sk1, &mut rng).unwrap();\n\n let sig2_deterministic = Signature::new_deterministic(msg, t1, l, &gens, &sk1).unwrap();\n\n // Deterministic sigs should verify\n", "file_path": "src/signature.rs", "rank": 98, "score": 7.249471995829227 }, { "content": " t = 16;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 32;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 1024;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 4095;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n\n\n t = 65535;\n\n fast_forward_sig_and_verify(&mut set, t, &vk, l, &gens, &mut rng, &mut db);\n\n }\n\n\n\n {\n\n let mut db = InMemorySigKeyDb::new();\n\n let (gens, vk, mut set, _) =\n\n setup::<ThreadRng>(T, \"test_pixel\", &mut rng, &mut db).unwrap();\n", "file_path": "src/signature.rs", "rank": 99, "score": 6.614410362572178 } ]
Rust
src/distance_.rs
huonw/hamming
4b528d75cfa2e102b6edc6c60b629cd986215437
fn naive(x: &[u8], y: &[u8]) -> u64 { assert_eq!(x.len(), y.len()); x.iter().zip(y).fold(0, |a, (b, c)| a + (*b ^ *c).count_ones() as u64) } #[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Clone)] pub struct DistanceError { _x: () } pub fn distance_fast(x: &[u8], y: &[u8]) -> Result<u64, DistanceError> { assert_eq!(x.len(), y.len()); const M1: u64 = 0x5555555555555555; const M2: u64 = 0x3333333333333333; const M4: u64 = 0x0F0F0F0F0F0F0F0F; const M8: u64 = 0x00FF00FF00FF00FF; type T30 = [u64; 30]; let (head1, thirty1, tail1) = unsafe { ::util::align_to::<_, T30>(x) }; let (head2, thirty2, tail2) = unsafe { ::util::align_to::<_, T30>(y) }; if head1.len() != head2.len() { return Err(DistanceError { _x: () }); } debug_assert_eq!(thirty1.len(), thirty2.len()); let mut count = naive(head1, head2) + naive(tail1, tail2); for (array1, array2) in thirty1.iter().zip(thirty2) { let mut acc = 0; for j_ in 0..10 { let j = j_ * 3; let mut count1 = array1[j] ^ array2[j]; let mut count2 = array1[j + 1] ^ array2[j + 1]; let mut half1 = array1[j + 2] ^ array2[j + 2]; let mut half2 = half1; half1 &= M1; half2 = (half2 >> 1) & M1; count1 -= (count1 >> 1) & M1; count2 -= (count2 >> 1) & M1; count1 += half1; count2 += half2; count1 = (count1 & M2) + ((count1 >> 2) & M2); count1 += (count2 & M2) + ((count2 >> 2) & M2); acc += (count1 & M4) + ((count1 >> 4) & M4); } acc = (acc & M8) + ((acc >> 8) & M8); acc = acc + (acc >> 16); acc = acc + (acc >> 32); count += acc & 0xFFFF; } Ok(count) } pub fn distance(x: &[u8], y: &[u8]) -> u64 { distance_fast(x, y) .ok() .unwrap_or_else(|| naive(x, y)) } #[cfg(test)] mod tests { use quickcheck as qc; use rand; #[test] fn naive_smoke() { let tests: &[(&[u8], &[u8], u64)] = &[ (&[], &[], 0), (&[0], &[0], 0), (&[0], &[0xFF], 8), (&[0b10101010], &[0b01010101], 8), (&[0b11111010], &[0b11110101], 4), (&[0; 10], &[0; 10], 0), (&[0xFF; 10], &[0x0F; 10], 4 * 10), (&[0x3B; 10000], &[0x3B; 10000], 0), (&[0x77; 10000], &[0x3B; 10000], 3 * 10000), ]; for &(x, y, expected) in tests { assert_eq!(super::naive(x, y), expected); } } #[test] fn distance_fast_qc() { fn prop(v: Vec<u8>, w: Vec<u8>, misalign: u8) -> qc::TestResult { let l = ::std::cmp::min(v.len(), w.len()); if l < misalign as usize { return qc::TestResult::discard() } let x = &v[misalign as usize..l]; let y = &w[misalign as usize..l]; qc::TestResult::from_bool(super::distance_fast(x, y).unwrap() == super::naive(x, y)) } qc::QuickCheck::new() .gen(qc::StdGen::new(rand::thread_rng(), 10_000)) .quickcheck(prop as fn(Vec<u8>,Vec<u8>,u8) -> qc::TestResult) } #[test] fn distance_fast_smoke_huge() { let v = vec![0b1001_1101; 10234567]; let w = vec![0b1111_1111; v.len()]; assert_eq!(super::distance_fast(&v, &v).unwrap(), 0); assert_eq!(super::distance_fast(&v, &w).unwrap(), 3 * w.len() as u64); } #[test] fn distance_smoke() { let v = vec![0; 10000]; let w = vec![0xFF; v.len()]; for len_ in 0..99 { let len = len_ * 10; for i in 0..8 { for j in 0..8 { assert_eq!(super::distance(&v[i..i+len], &w[j..j+len]), len as u64 * 8) } } } } }
fn naive(x: &[u8], y: &[u8]) -> u64 { assert_eq!(x.len(), y.len()); x.iter().zip(y).fold(0, |a, (b, c)| a + (*b ^ *c).count_ones() as u64) } #[derive(Debug, PartialEq, Eq, Ord, PartialOrd, Hash, Clone)] pub struct DistanceError { _x: () } pub fn distance_fast(x: &[u8], y: &[u8]) -> Result<u64, DistanceError> { assert_eq!(x.len(), y.len()); const M1: u64 = 0x5555555555555555; const M2: u64 = 0x3333333333333333; const M4: u64 = 0x0F0F0F0F0F0F0F0F; const M8: u64 = 0x00FF00FF00FF00FF; type T30 = [u64; 30]; let (head1, thirty1, tail1) = unsafe { ::util::align_to::<_, T30>(x) }; let (head2, thirty2, tail2) = unsafe { ::util::align_to::<_, T30>(y) }; if head1.len() != head2.len() { return Err(DistanceError { _x: () }); } debug_assert_eq!(thirty1.len(), thirty2.len()); let mut count = naive(head1, head2) + naive(tail1, tail2); for (array1, array2) in thirty1.iter().zip(thirty2) { let mut acc = 0; for j_ in 0..10 { let j = j_ * 3; let mut count1 = array1[j] ^ array2[j]; let mut count2 = array1[j + 1] ^ array2[j + 1]; let mut half1 = array1[j + 2] ^ array2[j + 2]; let mut half2 = half1; half1 &= M1; half2 = (half2 >> 1) & M1; count1 -= (count1 >> 1) & M1; count2 -= (count2 >> 1) & M1; count1 += half1; count2 += half2; count1 = (count1 & M2) + ((count1 >> 2) & M2); count1 += (count2 & M2) + ((count2 >> 2) & M2); acc += (count1 & M4) + ((count1 >> 4) & M4); } acc = (acc & M8) + ((acc >> 8) & M8); acc = acc + (acc >> 16); acc = acc + (acc >> 32); count += acc & 0xFFFF; } Ok(count) } pub fn distance(x: &[u8], y: &[u8]) -> u64 { distance_fast(x, y) .ok() .unwrap_or_else(|| naive(x, y)) } #[cfg(test)] mod tests { use quickcheck as qc; use rand; #[test] fn naive_smoke() { let tests: &[(&[u8], &[u8], u64)] = &[ (&[], &[], 0), (&[0], &[0], 0), (&[0], &[0xFF], 8), (&[0b10101010], &[0b01010101], 8), (&[0b11111010], &[0b11110101], 4), (&[0; 10], &[0; 10], 0), (&[0xFF; 10], &[0x0F; 10], 4 * 10), (&[0x3B; 10000], &[0x3B; 10000], 0), (&[0x77; 10000], &[0x3B; 10000], 3 * 10000), ]; for &(x, y, expected) in tests { assert_eq!(super::naive(x, y), expected); } } #[test] fn distance_fast_qc() { fn prop(v: Vec<u8>, w: Vec<u8>, misalign: u8) -> qc::TestResult {
qc::QuickCheck::new() .gen(qc::StdGen::new(rand::thread_rng(), 10_000)) .quickcheck(prop as fn(Vec<u8>,Vec<u8>,u8) -> qc::TestResult) } #[test] fn distance_fast_smoke_huge() { let v = vec![0b1001_1101; 10234567]; let w = vec![0b1111_1111; v.len()]; assert_eq!(super::distance_fast(&v, &v).unwrap(), 0); assert_eq!(super::distance_fast(&v, &w).unwrap(), 3 * w.len() as u64); } #[test] fn distance_smoke() { let v = vec![0; 10000]; let w = vec![0xFF; v.len()]; for len_ in 0..99 { let len = len_ * 10; for i in 0..8 { for j in 0..8 { assert_eq!(super::distance(&v[i..i+len], &w[j..j+len]), len as u64 * 8) } } } } }
let l = ::std::cmp::min(v.len(), w.len()); if l < misalign as usize { return qc::TestResult::discard() } let x = &v[misalign as usize..l]; let y = &w[misalign as usize..l]; qc::TestResult::from_bool(super::distance_fast(x, y).unwrap() == super::naive(x, y)) }
function_block-function_prefix_line
[ { "content": "/// Computes the [Hamming\n\n/// weight](https://en.wikipedia.org/wiki/Hamming_weight) of `x`, that\n\n/// is, the population count, or number of 1.\n\n///\n\n/// This is a highly optimised version of the following naive version:\n\n///\n\n/// ```rust\n\n/// fn naive(x: &[u8]) -> u64 {\n\n/// x.iter().fold(0, |a, b| a + b.count_ones() as u64)\n\n/// }\n\n/// ```\n\n///\n\n/// This uses Lauradoux Cédric's [tree-merging\n\n/// approach](http://web.archive.org/web/20120411185540/http://perso.citi.insa-lyon.fr/claurado/hamming.html)\n\n/// (as implemented by Kim Walisch in\n\n/// [primesieve](http://primesieve.org/)) and achieves on the order of\n\n/// 1-2 cycles per byte.\n\n///\n\n/// # Performance Comparison\n\n///\n\n/// | length | `naive` (ns) | `weight` (ns) | `naive`/`weight` |\n\n/// |--:|--:|--:|--:|\n\n/// | 1 | 5 | 16 | 0.31 |\n\n/// | 10 | 29 | 51 | 0.56 |\n\n/// | 100 | 284 | 392 | 0.72 |\n\n/// | 1,000 | 2,780 | 340 | 8.2 |\n\n/// | 10,000 | 27,700 | 2,300 | 12 |\n\n/// | 100,000 | 276,000 | 17,900 | 15 |\n\n/// | 1,000,000 | 2,770,000 | 172,000 | 16 |\n\n///\n\n/// # Example\n\n///\n\n/// ```rust\n\n/// assert_eq!(hamming::weight(&[1, 0xFF, 1, 0xFF]), 1 + 8 + 1 + 8);\n\n/// ```\n\npub fn weight(x: &[u8]) -> u64 {\n\n const M1: u64 = 0x5555555555555555;\n\n const M2: u64 = 0x3333333333333333;\n\n const M4: u64 = 0x0F0F0F0F0F0F0F0F;\n\n const M8: u64 = 0x00FF00FF00FF00FF;\n\n\n\n type T30 = [u64; 30];\n\n let (head, thirty, tail) = unsafe {\n\n ::util::align_to::<_, T30>(x)\n\n };\n\n\n\n let mut count = naive(head) + naive(tail);\n\n for array in thirty {\n\n let mut acc = 0;\n\n for j_ in 0..10 {\n\n let j = j_ * 3;\n\n let mut count1 = array[j];\n\n let mut count2 = array[j + 1];\n\n let mut half1 = array[j + 2];\n\n let mut half2 = half1;\n", "file_path": "src/weight_.rs", "rank": 2, "score": 98515.96648770975 }, { "content": "fn distance_bench<F: 'static + FnMut(&[u8], &[u8]) -> u64>(mut f: F) -> impl FnMut(&mut Bencher, &usize) {\n\n move |b, n| {\n\n let data = vec![0xFF; *n];\n\n b.iter(|| {\n\n let d1 = criterion::black_box(&data);\n\n let d2 = criterion::black_box(&data);\n\n f(d1, d2)\n\n })\n\n }\n\n}\n\n\n\n\n\n\n\ncreate_benchmarks! {\n\n fn weight(SIZES) {\n\n \"naive\" => weight_bench(naive_weight),\n\n \"weight\" => weight_bench(hamming::weight),\n\n }\n\n fn distance(SIZES) {\n\n \"naive\" => distance_bench(naive_distance),\n\n \"distance\" => distance_bench(hamming::distance),\n\n }\n\n}\n\n\n\ncriterion_group!(benches, weight, distance);\n\ncriterion_main!(benches);\n", "file_path": "benches/benches.rs", "rank": 3, "score": 87593.36665001817 }, { "content": "fn weight_bench<F: 'static + FnMut(&[u8]) -> u64>(mut f: F) -> impl FnMut(&mut Bencher, &usize) {\n\n move |b, n| {\n\n let data = vec![0xFF; *n];\n\n b.iter(|| f(criterion::black_box(&data)))\n\n }\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 4, "score": 82719.08067549561 }, { "content": "fn naive_distance(x: &[u8], y: &[u8]) -> u64 {\n\n assert_eq!(x.len(), y.len());\n\n x.iter().zip(y).fold(0, |a, (b, c)| a + (*b ^ *c).count_ones() as u64)\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 6, "score": 74405.82691411051 }, { "content": "fn naive(x: &[u8]) -> u64 {\n\n x.iter().fold(0, |a, b| a + b.count_ones() as u64)\n\n}\n", "file_path": "src/weight_.rs", "rank": 7, "score": 74180.90714830093 }, { "content": "fn naive_weight(x: &[u8]) -> u64 {\n\n x.iter().fold(0, |a, b| a + b.count_ones() as u64)\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 8, "score": 72080.91154018656 }, { "content": " half1 &= M1;\n\n half2 = (half2 >> 1) & M1;\n\n count1 -= (count1 >> 1) & M1;\n\n count2 -= (count2 >> 1) & M1;\n\n count1 += half1;\n\n count2 += half2;\n\n count1 = (count1 & M2) + ((count1 >> 2) & M2);\n\n count1 += (count2 & M2) + ((count2 >> 2) & M2);\n\n acc += (count1 & M4) + ((count1 >> 4) & M4);\n\n }\n\n acc = (acc & M8) + ((acc >> 8) & M8);\n\n acc = acc + (acc >> 16);\n\n acc = acc + (acc >> 32);\n\n count += acc & 0xFFFF;\n\n }\n\n count\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/weight_.rs", "rank": 10, "score": 23.776032440341286 }, { "content": " use quickcheck as qc;\n\n use rand;\n\n #[test]\n\n fn naive_smoke() {\n\n let tests = [(&[0u8] as &[u8], 0),\n\n (&[1], 1),\n\n (&[0xFF], 8),\n\n (&[0xFF; 10], 8 * 10),\n\n (&[1; 1000], 1000)];\n\n for &(v, expected) in &tests {\n\n assert_eq!(super::naive(v), expected);\n\n }\n\n }\n\n #[test]\n\n fn weight_qc() {\n\n fn prop(v: Vec<u8>, misalign: u8) -> qc::TestResult {\n\n let misalign = misalign as usize % 16;\n\n if misalign > v.len() {\n\n return qc::TestResult::discard();\n\n }\n", "file_path": "src/weight_.rs", "rank": 12, "score": 16.032101796908652 }, { "content": " let data = &v[misalign..];\n\n qc::TestResult::from_bool(super::weight(data) == super::naive(data))\n\n }\n\n qc::QuickCheck::new()\n\n .gen(qc::StdGen::new(rand::thread_rng(), 10_000))\n\n .quickcheck(prop as fn(Vec<u8>,u8) -> qc::TestResult)\n\n }\n\n #[test]\n\n fn weight_huge() {\n\n let v = vec![0b1001_1101; 10234567];\n\n assert_eq!(super::weight(&v),\n\n v[0].count_ones() as u64 * v.len() as u64);\n\n }\n\n}\n", "file_path": "src/weight_.rs", "rank": 13, "score": 13.185801136725754 }, { "content": "#![cfg_attr(not(test), no_std)]\n\n\n\n#[cfg(test)] extern crate core;\n\n#[cfg(test)] extern crate quickcheck;\n\n#[cfg(test)] extern crate rand;\n\n\n\nmod weight_;\n\npub use weight_::weight;\n\n\n\nmod distance_;\n\npub use distance_::{distance, distance_fast};\n\n\n\nmod util;\n", "file_path": "src/lib.rs", "rank": 15, "score": 10.633337903350208 }, { "content": " align_to_test(0, 10, &[], &[0x03020100, 0x07060504], &[8, 9]);\n\n align_to_test(0, 5, &[], &[0x03020100], &[4]);\n\n\n\n align_to_test(1, 8, &[1, 2, 3], &[0x07060504], &[]);\n\n align_to_test(3, 9, &[3], &[0x07060504], &[8]);\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n #[cfg_attr(not(debug_assertions), ignore)]\n\n fn align_to_smaller() {\n\n let _ = unsafe { align_to::<u64, u8>(&[]) };\n\n }\n\n\n\n #[test]\n\n #[should_panic]\n\n #[cfg_attr(not(debug_assertions), ignore)]\n\n fn align_to_nondivisible() {\n\n let _ = unsafe { align_to::<[u8; 2], [u8; 3]>(&[]) };\n\n }\n\n}\n", "file_path": "src/util.rs", "rank": 17, "score": 7.116247781971742 }, { "content": " fn align_to_test(from: usize, to: usize,\n\n true_head: &[u8], true_le_middle: &[u32], true_tail: &[u8]) {\n\n let true_middle =\n\n true_le_middle.iter()\n\n .map(|x| u32::from_le(*x))\n\n .collect::<Vec<_>>();\n\n\n\n let array_and_tuple = (0u64, [0u8, 1, 2, 3, 4, 5, 6, 7, 8, 9]);\n\n let array = &array_and_tuple.1;\n\n // the array should be aligned appropriately\n\n assert!((array.as_ptr() as usize) % 4 == 0);\n\n\n\n let (head, middle, tail) = unsafe { align_to::<_, u32>(&array[from..to]) };\n\n assert_eq!(head, true_head);\n\n assert_eq!(middle, true_middle.as_slice());\n\n assert_eq!(tail, true_tail);\n\n }\n\n\n\n #[test]\n\n fn align_to_empty() {\n", "file_path": "src/util.rs", "rank": 18, "score": 6.655514801733119 }, { "content": " // can't fit a single U in\n\n if mem::size_of_val(x) < size + byte_distance {\n\n return (x, &[], &[])\n\n }\n\n\n\n let (head, middle) = x.split_at(byte_distance / orig_size);\n\n\n\n assert!(middle.as_ptr() as usize % alignment == 0);\n\n let cast_middle =\n\n slice::from_raw_parts(middle.as_ptr() as *const U,\n\n middle.len() / size_ratio);\n\n let tail = &middle[cast_middle.len() * size_ratio..];\n\n\n\n (head, cast_middle, tail)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/util.rs", "rank": 19, "score": 3.7562532373155992 }, { "content": "use core::{slice, mem};\n\n\n\n/// Reinterpret as much of `x` as a slice of (correctly aligned) `U`s\n\n/// as possible. (Same as `slice::align_to` but available in earlier\n\n/// compilers.)\n\n#[inline(never)] // critical for autovectorization in `weight`.\n\npub unsafe fn align_to<T, U>(x: &[T]) -> (&[T], &[U], &[T]) {\n\n let orig_size = mem::size_of::<T>();\n\n let size = mem::size_of::<U>();\n\n\n\n debug_assert!(orig_size < size && size % orig_size == 0);\n\n let size_ratio = size / orig_size;\n\n\n\n let alignment = mem::align_of::<U>();\n\n\n\n let ptr = x.as_ptr() as usize;\n\n // round up to the nearest multiple\n\n let aligned = (ptr + alignment - 1) / alignment * alignment;\n\n let byte_distance = aligned - ptr;\n\n\n", "file_path": "src/util.rs", "rank": 20, "score": 2.975946469242847 }, { "content": "//! A crate to count ones and xor bytes, fast (aka popcount, hamming\n\n//! weight and hamming distance).\n\n//!\n\n//! # Installation\n\n//!\n\n//! Add this to your `Cargo.toml`:\n\n//!\n\n//! ```toml\n\n//! [dependencies]\n\n//! hamming = \"0.1\"\n\n//! ```\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```rust\n\n//! assert_eq!(hamming::weight(&[1, 0xFF, 1, 0xFF]), 1 + 8 + 1 + 8);\n\n//! assert_eq!(hamming::distance(&[1, 0xFF], &[0xFF, 1]), 7 + 7);\n\n//! ```\n\n\n\n#![deny(warnings)]\n", "file_path": "src/lib.rs", "rank": 21, "score": 2.9483109690124456 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\nextern crate hamming;\n\n\n\nuse criterion::{Criterion, Bencher, ParameterizedBenchmark, PlotConfiguration, AxisScale};\n\n\n\nconst SIZES: [usize; 7] = [1, 10, 100, 1000, 10_000, 100_000, 1_000_000];\n\n\n\nmacro_rules! create_benchmarks {\n\n ($(\n\n fn $group_id: ident($input: expr) {\n\n $first_name: expr => $first_func: expr,\n\n $($rest_name: expr => $rest_func: expr,)*\n\n }\n\n )*) => {\n\n $(\n\n fn $group_id(c: &mut Criterion) {\n\n let input = $input;\n\n\n\n let plot_config =\n", "file_path": "benches/benches.rs", "rank": 22, "score": 2.8071720126314 }, { "content": " align_to_test(0, 0, &[], &[], &[]);\n\n align_to_test(1, 1, &[], &[], &[]);\n\n align_to_test(2, 2, &[], &[], &[]);\n\n align_to_test(3, 3, &[], &[], &[]);\n\n }\n\n\n\n #[test]\n\n fn align_to_short() {\n\n align_to_test(0, 1, &[0], &[], &[]);\n\n align_to_test(1, 2, &[1], &[], &[]);\n\n align_to_test(2, 3, &[2], &[], &[]);\n\n align_to_test(3, 4, &[3], &[], &[]);\n\n\n\n align_to_test(0, 2, &[0, 1], &[], &[]);\n\n align_to_test(1, 3, &[1, 2], &[], &[]);\n\n align_to_test(2, 4, &[2, 3], &[], &[]);\n\n align_to_test(3, 5, &[3, 4], &[], &[]);\n\n\n\n align_to_test(0, 3, &[0, 1, 2], &[], &[]);\n\n align_to_test(1, 4, &[1, 2, 3], &[], &[]);\n", "file_path": "src/util.rs", "rank": 23, "score": 2.3317116434731147 }, { "content": " align_to_test(2, 5, &[2, 3, 4], &[], &[]);\n\n align_to_test(3, 6, &[3, 4, 5], &[], &[]);\n\n }\n\n\n\n #[test]\n\n fn align_to_exact() {\n\n align_to_test(0, 4, &[], &[0x03020100], &[]);\n\n align_to_test(0, 8, &[], &[0x03020100, 0x07060504], &[]);\n\n }\n\n\n\n #[test]\n\n fn align_to_offset() {\n\n align_to_test(1, 5, &[1, 2, 3, 4], &[], &[]);\n\n align_to_test(2, 6, &[2, 3, 4, 5], &[], &[]);\n\n align_to_test(3, 7, &[3, 4, 5, 6], &[], &[]);\n\n align_to_test(1, 7, &[1, 2, 3, 4, 5, 6], &[], &[]);\n\n }\n\n\n\n #[test]\n\n fn align_to_overlap() {\n", "file_path": "src/util.rs", "rank": 24, "score": 2.2925739214908325 }, { "content": " PlotConfiguration::default()\n\n .summary_scale(AxisScale::Logarithmic);\n\n let bench = ParameterizedBenchmark::new(\n\n $first_name, $first_func, input.into_iter().cloned())\n\n $( .with_function($rest_name, $rest_func) )*\n\n .plot_config(plot_config);\n\n c.bench(stringify!($group_id), bench);\n\n }\n\n )*\n\n }\n\n}\n\n\n", "file_path": "benches/benches.rs", "rank": 25, "score": 1.5118399091760306 } ]
Rust
indy-credx/src/error.rs
animo/indy-shared-rs
063f7de369da0c3032225283773edf816a3eac21
use std::error::Error as StdError; use std::fmt::{self, Display, Formatter}; use std::result::Result as StdResult; use crate::ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; pub type Result<T> = std::result::Result<T, Error>; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ErrorKind { Input, IOError, InvalidState, Unexpected, CredentialRevoked, InvalidUserRevocId, ProofRejected, RevocationRegistryFull, } impl ErrorKind { pub fn as_str(&self) -> &'static str { match self { Self::Input => "Input error", Self::IOError => "IO error", Self::InvalidState => "Invalid state", Self::Unexpected => "Unexpected error", Self::CredentialRevoked => "Credential revoked", Self::InvalidUserRevocId => "Invalid revocation accumulator index", Self::ProofRejected => "Proof rejected", Self::RevocationRegistryFull => "Revocation registry full", } } } impl Display for ErrorKind { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } #[derive(Debug)] pub struct Error { kind: ErrorKind, pub cause: Option<Box<dyn StdError + Send + Sync + 'static>>, pub message: Option<String>, } impl Error { pub fn from_msg<T: Into<String>>(kind: ErrorKind, msg: T) -> Self { Self { kind, cause: None, message: Some(msg.into()), } } pub fn from_opt_msg<T: Into<String>>(kind: ErrorKind, msg: Option<T>) -> Self { Self { kind, cause: None, message: msg.map(Into::into), } } pub fn kind(&self) -> ErrorKind { self.kind } pub fn with_cause<T: Into<Box<dyn StdError + Send + Sync>>>(mut self, err: T) -> Self { self.cause = Some(err.into()); self } } impl fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match (self.kind, &self.message) { (ErrorKind::Input, None) => write!(f, "{}", self.kind), (ErrorKind::Input, Some(msg)) => f.write_str(msg), (kind, None) => write!(f, "{}", kind), (kind, Some(msg)) => write!(f, "{}: {}", kind, msg), }?; if let Some(ref source) = self.cause { write!(f, " [{}]", source)?; } Ok(()) } } impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { self.cause .as_ref() .map(|err| unsafe { std::mem::transmute(&**err) }) } } impl PartialEq for Error { fn eq(&self, other: &Self) -> bool { self.kind == other.kind && self.message == other.message } } impl From<ErrorKind> for Error { fn from(kind: ErrorKind) -> Self { Self { kind, cause: None, message: None, } } } impl From<indy_utils::ValidationError> for Error { fn from(err: indy_utils::ValidationError) -> Self { Error::from_opt_msg(ErrorKind::Input, err.context) } } impl From<std::io::Error> for Error { fn from(err: std::io::Error) -> Self { Error::from(ErrorKind::IOError).with_cause(err) } } impl From<serde_json::Error> for Error { fn from(err: serde_json::Error) -> Self { Error::from(ErrorKind::Input).with_cause(err) } } impl From<UrsaCryptoError> for Error { fn from(err: UrsaCryptoError) -> Self { let message = err.to_string(); let kind = match err.kind() { UrsaCryptoErrorKind::InvalidState => ErrorKind::InvalidState, UrsaCryptoErrorKind::InvalidStructure => ErrorKind::Input, UrsaCryptoErrorKind::IOError => ErrorKind::IOError, UrsaCryptoErrorKind::InvalidRevocationAccumulatorIndex => ErrorKind::InvalidUserRevocId, UrsaCryptoErrorKind::RevocationAccumulatorIsFull => ErrorKind::RevocationRegistryFull, UrsaCryptoErrorKind::ProofRejected => ErrorKind::ProofRejected, UrsaCryptoErrorKind::CredentialRevoked => ErrorKind::CredentialRevoked, UrsaCryptoErrorKind::InvalidParam(_) => ErrorKind::Input, }; Error::from_msg(kind, message) } } impl<M> From<(ErrorKind, M)> for Error where M: fmt::Display + Send + Sync + 'static, { fn from((kind, msg): (ErrorKind, M)) -> Error { Error::from_msg(kind, msg.to_string()) } } macro_rules! err_msg { () => { $crate::error::Error::from($crate::error::ErrorKind::Input) }; ($kind:ident) => { $crate::error::Error::from($crate::error::ErrorKind::$kind) }; ($kind:ident, $($args:tt)+) => { $crate::error::Error::from_msg($crate::error::ErrorKind::$kind, format!($($args)+)) }; ($($args:tt)+) => { $crate::error::Error::from_msg($crate::error::ErrorKind::Input, format!($($args)+)) }; } macro_rules! err_map { ($($params:tt)*) => { |err| err_msg!($($params)*).with_cause(err) }; } pub trait ResultExt<T, E> { fn map_err_string(self) -> StdResult<T, String>; fn map_input_err<F, M>(self, mapfn: F) -> Result<T> where F: FnOnce() -> M, M: fmt::Display + Send + Sync + 'static; fn with_err_msg<M>(self, kind: ErrorKind, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static; fn with_input_err<M>(self, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static; } impl<T, E> ResultExt<T, E> for StdResult<T, E> where E: std::error::Error + Send + Sync + 'static, { fn map_err_string(self) -> StdResult<T, String> { self.map_err(|err| err.to_string()) } fn map_input_err<F, M>(self, mapfn: F) -> Result<T> where F: FnOnce() -> M, M: fmt::Display + Send + Sync + 'static, { self.map_err(|err| Error::from_msg(ErrorKind::Input, mapfn().to_string()).with_cause(err)) } fn with_err_msg<M>(self, kind: ErrorKind, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static, { self.map_err(|err| Error::from_msg(kind, msg.to_string()).with_cause(err)) } #[inline] fn with_input_err<M>(self, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static, { self.map_err(|err| Error::from_msg(ErrorKind::Input, msg.to_string()).with_cause(err)) } }
use std::error::Error as StdError; use std::fmt::{self, Display, Formatter}; use std::result::Result as StdResult; use crate::ursa::errors::{UrsaCryptoError, UrsaCryptoErrorKind}; pub type Result<T> = std::result::Result<T, Error>; #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ErrorKind { Input, IOError, InvalidState, Unexpected, CredentialRevoked, InvalidUserRevocId, ProofRejected, RevocationRegistryFull, } impl ErrorKind { pub fn as_str(&self) -> &'static str {
} } impl Display for ErrorKind { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } #[derive(Debug)] pub struct Error { kind: ErrorKind, pub cause: Option<Box<dyn StdError + Send + Sync + 'static>>, pub message: Option<String>, } impl Error { pub fn from_msg<T: Into<String>>(kind: ErrorKind, msg: T) -> Self { Self { kind, cause: None, message: Some(msg.into()), } } pub fn from_opt_msg<T: Into<String>>(kind: ErrorKind, msg: Option<T>) -> Self { Self { kind, cause: None, message: msg.map(Into::into), } } pub fn kind(&self) -> ErrorKind { self.kind } pub fn with_cause<T: Into<Box<dyn StdError + Send + Sync>>>(mut self, err: T) -> Self { self.cause = Some(err.into()); self } } impl fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match (self.kind, &self.message) { (ErrorKind::Input, None) => write!(f, "{}", self.kind), (ErrorKind::Input, Some(msg)) => f.write_str(msg), (kind, None) => write!(f, "{}", kind), (kind, Some(msg)) => write!(f, "{}: {}", kind, msg), }?; if let Some(ref source) = self.cause { write!(f, " [{}]", source)?; } Ok(()) } } impl StdError for Error { fn source(&self) -> Option<&(dyn StdError + 'static)> { self.cause .as_ref() .map(|err| unsafe { std::mem::transmute(&**err) }) } } impl PartialEq for Error { fn eq(&self, other: &Self) -> bool { self.kind == other.kind && self.message == other.message } } impl From<ErrorKind> for Error { fn from(kind: ErrorKind) -> Self { Self { kind, cause: None, message: None, } } } impl From<indy_utils::ValidationError> for Error { fn from(err: indy_utils::ValidationError) -> Self { Error::from_opt_msg(ErrorKind::Input, err.context) } } impl From<std::io::Error> for Error { fn from(err: std::io::Error) -> Self { Error::from(ErrorKind::IOError).with_cause(err) } } impl From<serde_json::Error> for Error { fn from(err: serde_json::Error) -> Self { Error::from(ErrorKind::Input).with_cause(err) } } impl From<UrsaCryptoError> for Error { fn from(err: UrsaCryptoError) -> Self { let message = err.to_string(); let kind = match err.kind() { UrsaCryptoErrorKind::InvalidState => ErrorKind::InvalidState, UrsaCryptoErrorKind::InvalidStructure => ErrorKind::Input, UrsaCryptoErrorKind::IOError => ErrorKind::IOError, UrsaCryptoErrorKind::InvalidRevocationAccumulatorIndex => ErrorKind::InvalidUserRevocId, UrsaCryptoErrorKind::RevocationAccumulatorIsFull => ErrorKind::RevocationRegistryFull, UrsaCryptoErrorKind::ProofRejected => ErrorKind::ProofRejected, UrsaCryptoErrorKind::CredentialRevoked => ErrorKind::CredentialRevoked, UrsaCryptoErrorKind::InvalidParam(_) => ErrorKind::Input, }; Error::from_msg(kind, message) } } impl<M> From<(ErrorKind, M)> for Error where M: fmt::Display + Send + Sync + 'static, { fn from((kind, msg): (ErrorKind, M)) -> Error { Error::from_msg(kind, msg.to_string()) } } macro_rules! err_msg { () => { $crate::error::Error::from($crate::error::ErrorKind::Input) }; ($kind:ident) => { $crate::error::Error::from($crate::error::ErrorKind::$kind) }; ($kind:ident, $($args:tt)+) => { $crate::error::Error::from_msg($crate::error::ErrorKind::$kind, format!($($args)+)) }; ($($args:tt)+) => { $crate::error::Error::from_msg($crate::error::ErrorKind::Input, format!($($args)+)) }; } macro_rules! err_map { ($($params:tt)*) => { |err| err_msg!($($params)*).with_cause(err) }; } pub trait ResultExt<T, E> { fn map_err_string(self) -> StdResult<T, String>; fn map_input_err<F, M>(self, mapfn: F) -> Result<T> where F: FnOnce() -> M, M: fmt::Display + Send + Sync + 'static; fn with_err_msg<M>(self, kind: ErrorKind, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static; fn with_input_err<M>(self, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static; } impl<T, E> ResultExt<T, E> for StdResult<T, E> where E: std::error::Error + Send + Sync + 'static, { fn map_err_string(self) -> StdResult<T, String> { self.map_err(|err| err.to_string()) } fn map_input_err<F, M>(self, mapfn: F) -> Result<T> where F: FnOnce() -> M, M: fmt::Display + Send + Sync + 'static, { self.map_err(|err| Error::from_msg(ErrorKind::Input, mapfn().to_string()).with_cause(err)) } fn with_err_msg<M>(self, kind: ErrorKind, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static, { self.map_err(|err| Error::from_msg(kind, msg.to_string()).with_cause(err)) } #[inline] fn with_input_err<M>(self, msg: M) -> Result<T> where M: fmt::Display + Send + Sync + 'static, { self.map_err(|err| Error::from_msg(ErrorKind::Input, msg.to_string()).with_cause(err)) } }
match self { Self::Input => "Input error", Self::IOError => "IO error", Self::InvalidState => "Invalid state", Self::Unexpected => "Unexpected error", Self::CredentialRevoked => "Credential revoked", Self::InvalidUserRevocId => "Invalid revocation accumulator index", Self::ProofRejected => "Proof rejected", Self::RevocationRegistryFull => "Revocation registry full", }
if_condition
[ { "content": "pub fn set_last_error(error: Option<Error>) -> ErrorCode {\n\n trace!(\"credx_set_last_error\");\n\n let code = match error.as_ref() {\n\n Some(err) => err.kind().into(),\n\n None => ErrorCode::Success,\n\n };\n\n *LAST_ERROR.write().unwrap() = error;\n\n code\n\n}\n", "file_path": "indy-credx/src/ffi/error.rs", "rank": 0, "score": 179869.1671671981 }, { "content": "/// Build an encoded verkey\n\npub fn build_full_verkey(dest: &str, key: &str) -> Result<EncodedVerKey, ConversionError> {\n\n EncodedVerKey::from_str_qualified(key, Some(dest), None, None)\n\n}\n\n\n\n/// A raw signing key used for generating transaction signatures\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]\n\npub struct PrivateKey {\n\n pub key: Vec<u8>,\n\n pub alg: KeyType,\n\n}\n\n\n\nimpl PrivateKey {\n\n pub fn new<K: AsRef<[u8]>>(key: K, alg: Option<KeyType>) -> Self {\n\n Self {\n\n key: key.as_ref().to_vec(),\n\n alg: alg.unwrap_or_default(),\n\n }\n\n }\n\n\n\n #[cfg(feature = \"ed25519\")]\n", "file_path": "indy-utils/src/keys/mod.rs", "rank": 1, "score": 176138.91663548688 }, { "content": "/// Split a qualifiable identifier into its method and value components\n\npub fn split<'a>(prefix: &str, val: &'a str) -> (Option<&'a str>, &'a str) {\n\n match REGEX.captures(&val) {\n\n None => (None, val),\n\n Some(caps) => {\n\n if caps.get(1).map(|m| m.as_str()) == Some(prefix) {\n\n (\n\n Some(caps.get(2).unwrap().as_str()),\n\n caps.get(3).unwrap().as_str(),\n\n )\n\n } else {\n\n (None, val)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "indy-utils/src/qualifiable.rs", "rank": 2, "score": 168430.52678350147 }, { "content": "/// Combine a prefix, method, and value into a qualified identifier\n\npub fn combine(prefix: &str, method: Option<&str>, entity: &str) -> String {\n\n match method {\n\n Some(method) => format!(\"{}:{}:{}\", prefix, method, entity),\n\n _ => entity.to_owned(),\n\n }\n\n}\n\n\n", "file_path": "indy-utils/src/qualifiable.rs", "rank": 3, "score": 165608.68842825148 }, { "content": "type DynError = Box<dyn StdError + Send + Sync + 'static>;\n\n\n\nmacro_rules! define_error {\n\n ($name:tt, $short:expr, $doc:tt) => {\n\n #[derive(Debug, Error)]\n\n #[doc=$doc]\n\n pub struct $name {\n\n pub context: Option<String>,\n\n pub source: Option<DynError>,\n\n }\n\n\n\n impl $name {\n\n pub fn from_msg<T: Into<String>>(msg: T) -> Self {\n\n Self::from(msg.into())\n\n }\n\n\n\n pub fn from_err<E>(err: E) -> Self\n\n where\n\n E: StdError + Send + Sync + 'static,\n\n {\n", "file_path": "indy-utils/src/error.rs", "rank": 4, "score": 164157.0876108795 }, { "content": "/// Check if an identifier is qualified by a prefix and method\n\npub fn is_fully_qualified(entity: &str) -> bool {\n\n REGEX.captures(entity).is_some()\n\n}\n\n\n", "file_path": "indy-utils/src/qualifiable.rs", "rank": 5, "score": 162062.4030119038 }, { "content": "pub fn get_current_error_json() -> String {\n\n if let Some(err) = Option::take(&mut *LAST_ERROR.write().unwrap()) {\n\n let message = err.to_string();\n\n let code = ErrorCode::from(err.kind()) as usize;\n\n serde_json::json!({\"code\": code, \"message\": message}).to_string()\n\n } else {\n\n r#\"{\"code\":0,\"message\":null}\"#.to_owned()\n\n }\n\n}\n\n\n", "file_path": "indy-credx/src/ffi/error.rs", "rank": 6, "score": 160213.09492041435 }, { "content": "pub fn attr_common_view(attr: &str) -> String {\n\n attr.replace(\" \", \"\").to_lowercase()\n\n}\n\n\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 7, "score": 156651.6592564422 }, { "content": "pub fn catch_error<F>(f: F) -> ErrorCode\n\nwhere\n\n F: FnOnce() -> Result<()> + UnwindSafe,\n\n{\n\n match catch_unwind(f) {\n\n Ok(Ok(_)) => ErrorCode::Success,\n\n Ok(Err(err)) => {\n\n // lib error\n\n set_last_error(Some(err))\n\n }\n\n Err(_) => {\n\n // panic error\n\n let err = err_msg!(Unexpected, \"Panic during execution\");\n\n set_last_error(Some(err))\n\n }\n\n }\n\n}\n\n\n", "file_path": "indy-credx/src/ffi/error.rs", "rank": 8, "score": 152343.21353852103 }, { "content": "pub fn tmp_file_path(file_name: &str) -> PathBuf {\n\n let mut path = tmp_path();\n\n path.push(file_name);\n\n path\n\n}\n\n\n", "file_path": "indy-test-utils/src/environment.rs", "rank": 9, "score": 151753.96675234748 }, { "content": "pub fn encode_credential_attribute(raw_value: &str) -> Result<String> {\n\n if let Ok(val) = raw_value.parse::<i32>() {\n\n Ok(val.to_string())\n\n } else {\n\n let digest = SHA256::digest(raw_value.as_bytes());\n\n #[cfg(target_endian = \"big\")]\n\n let digest = {\n\n let mut d = digest;\n\n d.reverse();\n\n d\n\n };\n\n Ok(BigNumber::from_bytes(&digest)?.to_dec()?)\n\n }\n\n}\n\n\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 10, "score": 147176.4783360569 }, { "content": "fn _convert_value_to_unqualified(tag_name: &str, tag_value: &str) -> String {\n\n match tag_name {\n\n \"issuer_did\" | \"schema_issuer_did\" => DidValue(tag_value.to_string()).to_unqualified().0,\n\n \"schema_id\" => SchemaId(tag_value.to_string()).to_unqualified().0,\n\n \"cred_def_id\" => {\n\n CredentialDefinitionId(tag_value.to_string())\n\n .to_unqualified()\n\n .0\n\n }\n\n \"rev_reg_id\" => {\n\n RevocationRegistryId(tag_value.to_string())\n\n .to_unqualified()\n\n .0\n\n }\n\n _ => tag_value.to_string(),\n\n }\n\n}\n\n\n", "file_path": "indy-data-types/src/anoncreds/pres_request.rs", "rank": 11, "score": 143809.62628862474 }, { "content": "pub trait TailsReaderImpl: std::fmt::Debug + Send {\n\n fn hash(&mut self) -> Result<Vec<u8>>;\n\n fn read(&mut self, size: usize, offset: usize) -> Result<Vec<u8>>;\n\n}\n\n\n\nimpl RevocationTailsAccessor for TailsReader {\n\n fn access_tail(\n\n &self,\n\n tail_id: u32,\n\n accessor: &mut dyn FnMut(&Tail),\n\n ) -> std::result::Result<(), UrsaCryptoError> {\n\n trace!(\"access_tail >>> tail_id: {:?}\", tail_id);\n\n\n\n let tail_bytes = self\n\n .inner\n\n .borrow_mut()\n\n .read(\n\n TAIL_SIZE,\n\n TAIL_SIZE * tail_id as usize + TAILS_BLOB_TAG_SZ as usize,\n\n )\n", "file_path": "indy-credx/src/services/tails.rs", "rank": 12, "score": 134633.637768302 }, { "content": "#[cfg(feature = \"ed25519\")]\n\npub fn generate_did(\n\n seed: Option<&[u8]>,\n\n version: Option<usize>,\n\n) -> Result<(ShortDidValue, PrivateKey, VerKey), crate::ConversionError> {\n\n let sk = match seed {\n\n Some(seed) => PrivateKey::from_seed(seed)?,\n\n None => PrivateKey::generate(Some(KeyType::ED25519))?,\n\n };\n\n\n\n let pk = sk.public_key()?;\n\n let did = match version {\n\n Some(1) | None => Ok(base58::encode(&pk.as_ref()[..16])),\n\n Some(2) => {\n\n let mut hasher = Sha256::new();\n\n Digest::update(&mut hasher, &pk.as_ref());\n\n let hash = hasher.finalize();\n\n Ok(base58::encode(&hash[..16]))\n\n }\n\n _ => Err(\"Version must be one of 1,2\"),\n\n }?;\n", "file_path": "indy-utils/src/did.rs", "rank": 13, "score": 131799.46990433478 }, { "content": "/// Generate the normalized form of a ledger transaction request for signing\n\npub fn serialize_signature(v: &SJsonValue) -> Result<String, ValidationError> {\n\n let _type = v[\"operation\"][\"type\"].clone();\n\n _serialize_signature(v, true, _type.as_str())\n\n}\n\n\n", "file_path": "indy-utils/src/txn_signature.rs", "rank": 14, "score": 125452.14645731552 }, { "content": "pub fn create_credential(\n\n cred_def: &CredentialDefinition,\n\n cred_def_private: &CredentialDefinitionPrivate,\n\n cred_offer: &CredentialOffer,\n\n cred_request: &CredentialRequest,\n\n cred_values: CredentialValues,\n\n revocation_config: Option<CredentialRevocationConfig>,\n\n) -> Result<(\n\n Credential,\n\n Option<RevocationRegistry>,\n\n Option<RevocationRegistryDelta>,\n\n)> {\n\n trace!(\"create_credential >>> cred_def: {:?}, cred_def_private: {:?}, cred_offer.nonce: {:?}, cred_request: {:?},\\\n\n cred_values: {:?}, revocation_config: {:?}\",\n\n cred_def, secret!(&cred_def_private), &cred_offer.nonce, &cred_request, secret!(&cred_values), revocation_config,\n\n );\n\n\n\n let cred_public_key = match cred_def {\n\n CredentialDefinition::CredentialDefinitionV1(cd) => {\n\n cd.get_public_key().map_err(err_map!(\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 15, "score": 124903.27987418149 }, { "content": "pub fn create_schema(\n\n origin_did: &DidValue,\n\n schema_name: &str,\n\n schema_version: &str,\n\n attr_names: AttributeNames,\n\n seq_no: Option<u32>,\n\n) -> Result<Schema> {\n\n trace!(\"create_schema >>> origin_did: {:?}, schema_name: {:?}, schema_version: {:?}, attr_names: {:?}\",\n\n origin_did, schema_name, schema_version, attr_names);\n\n\n\n origin_did.validate()?;\n\n let schema_id = SchemaId::new(&origin_did, schema_name, schema_version);\n\n let schema = SchemaV1 {\n\n id: schema_id,\n\n name: schema_name.to_string(),\n\n version: schema_version.to_string(),\n\n attr_names,\n\n seq_no,\n\n };\n\n Ok(Schema::SchemaV1(schema))\n\n}\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 16, "score": 124903.27987418149 }, { "content": "#[allow(dead_code)]\n\npub fn recover_credential(\n\n rev_reg_def: &RevocationRegistryDefinition,\n\n rev_reg: &RevocationRegistry,\n\n cred_rev_idx: u32,\n\n tails_reader: &TailsReader,\n\n) -> Result<(RevocationRegistry, RevocationRegistryDelta)> {\n\n trace!(\n\n \"recover >>> rev_reg_def: {:?}, rev_reg: {:?}, cred_rev_idx: {:?}\",\n\n rev_reg_def,\n\n rev_reg,\n\n secret!(&cred_rev_idx)\n\n );\n\n\n\n let max_cred_num = match rev_reg_def {\n\n RevocationRegistryDefinition::RevocationRegistryDefinitionV1(v1) => v1.value.max_cred_num,\n\n };\n\n let mut rev_reg = match rev_reg {\n\n RevocationRegistry::RevocationRegistryV1(v1) => v1.value.clone(),\n\n };\n\n let rev_reg_delta =\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 17, "score": 124903.27987418149 }, { "content": "pub fn create_presentation(\n\n pres_req: &PresentationRequest,\n\n credentials: PresentCredentials,\n\n self_attested: Option<HashMap<String, String>>,\n\n master_secret: &MasterSecret,\n\n schemas: &HashMap<SchemaId, &Schema>,\n\n cred_defs: &HashMap<CredentialDefinitionId, &CredentialDefinition>,\n\n) -> Result<Presentation> {\n\n trace!(\"create_proof >>> credentials: {:?}, pres_req: {:?}, credentials: {:?}, self_attested: {:?}, master_secret: {:?}, schemas: {:?}, cred_defs: {:?}\",\n\n credentials, pres_req, credentials, &self_attested, secret!(&master_secret), schemas, cred_defs);\n\n\n\n if credentials.is_empty()\n\n && self_attested\n\n .as_ref()\n\n .map(HashMap::is_empty)\n\n .unwrap_or(true)\n\n {\n\n return Err(err_msg!(\n\n \"No credential mapping or self-attested attributes presented\"\n\n ));\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 18, "score": 124903.27987418149 }, { "content": "pub fn process_credential(\n\n credential: &mut Credential,\n\n cred_request_metadata: &CredentialRequestMetadata,\n\n master_secret: &MasterSecret,\n\n cred_def: &CredentialDefinition,\n\n rev_reg_def: Option<&RevocationRegistryDefinition>,\n\n) -> Result<()> {\n\n trace!(\"process_credential >>> credential: {:?}, cred_request_metadata: {:?}, master_secret: {:?}, cred_def: {:?}, rev_reg_def: {:?}\",\n\n credential, cred_request_metadata, secret!(&master_secret), cred_def, rev_reg_def);\n\n\n\n let cred_def = match cred_def {\n\n CredentialDefinition::CredentialDefinitionV1(cd) => cd,\n\n };\n\n let credential_pub_key = CredentialPublicKey::build_from_parts(\n\n &cred_def.value.primary,\n\n cred_def.value.revocation.as_ref(),\n\n )?;\n\n let credential_values =\n\n build_credential_values(&credential.values.0, Some(&master_secret.value))?;\n\n let rev_pub_key = match rev_reg_def {\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 19, "score": 124903.27987418149 }, { "content": "pub fn revoke_credential(\n\n rev_reg_def: &RevocationRegistryDefinition,\n\n rev_reg: &RevocationRegistry,\n\n cred_rev_idx: u32,\n\n tails_reader: &TailsReader,\n\n) -> Result<(RevocationRegistry, RevocationRegistryDelta)> {\n\n trace!(\n\n \"revoke >>> rev_reg_def: {:?}, rev_reg: {:?}, cred_rev_idx: {:?}\",\n\n rev_reg_def,\n\n rev_reg,\n\n secret!(&cred_rev_idx)\n\n );\n\n\n\n let max_cred_num = match rev_reg_def {\n\n RevocationRegistryDefinition::RevocationRegistryDefinitionV1(v1) => v1.value.max_cred_num,\n\n };\n\n let mut rev_reg = match rev_reg {\n\n RevocationRegistry::RevocationRegistryV1(v1) => v1.value.clone(),\n\n };\n\n let rev_reg_delta =\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 20, "score": 124903.27987418149 }, { "content": "pub fn verify_presentation(\n\n presentation: &Presentation,\n\n pres_req: &PresentationRequest,\n\n schemas: &HashMap<SchemaId, &Schema>,\n\n cred_defs: &HashMap<CredentialDefinitionId, &CredentialDefinition>,\n\n rev_reg_defs: Option<&HashMap<RevocationRegistryId, &RevocationRegistryDefinition>>,\n\n rev_regs: Option<&HashMap<RevocationRegistryId, HashMap<u64, &RevocationRegistry>>>,\n\n) -> Result<bool> {\n\n trace!(\"verify >>> presentation: {:?}, pres_req: {:?}, schemas: {:?}, cred_defs: {:?}, rev_reg_defs: {:?} rev_regs: {:?}\",\n\n presentation, pres_req, schemas, cred_defs, rev_reg_defs, rev_regs);\n\n\n\n let pres_req = pres_req.value();\n\n let received_revealed_attrs: HashMap<String, Identifier> =\n\n received_revealed_attrs(&presentation)?;\n\n let received_unrevealed_attrs: HashMap<String, Identifier> =\n\n received_unrevealed_attrs(&presentation)?;\n\n let received_predicates: HashMap<String, Identifier> = received_predicates(&presentation)?;\n\n let received_self_attested_attrs: HashSet<String> = received_self_attested_attrs(&presentation);\n\n\n\n compare_attr_from_proof_and_request(\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 21, "score": 124903.27987418149 }, { "content": "pub fn create_credential_definition(\n\n origin_did: &DidValue,\n\n schema: &Schema,\n\n tag: &str,\n\n signature_type: SignatureType,\n\n config: CredentialDefinitionConfig,\n\n) -> Result<(\n\n CredentialDefinition,\n\n CredentialDefinitionPrivate,\n\n CredentialKeyCorrectnessProof,\n\n)> {\n\n trace!(\n\n \"create_credential_definition >>> schema: {:?}, config: {:?}\",\n\n schema,\n\n config\n\n );\n\n\n\n let schema = match schema {\n\n Schema::SchemaV1(s) => s,\n\n };\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 22, "score": 122851.82603435559 }, { "content": "pub fn update_revocation_registry(\n\n rev_reg_def: &RevocationRegistryDefinition,\n\n rev_reg: &RevocationRegistry,\n\n issued: BTreeSet<u32>,\n\n revoked: BTreeSet<u32>,\n\n tails_reader: &TailsReader,\n\n) -> Result<(RevocationRegistry, RevocationRegistryDelta)> {\n\n let rev_reg_def = match rev_reg_def {\n\n RevocationRegistryDefinition::RevocationRegistryDefinitionV1(v1) => v1,\n\n };\n\n let mut rev_reg = match rev_reg {\n\n RevocationRegistry::RevocationRegistryV1(v1) => v1.value.clone(),\n\n };\n\n let max_cred_num = rev_reg_def.value.max_cred_num;\n\n let delta = CryptoIssuer::update_revocation_registry(\n\n &mut rev_reg,\n\n max_cred_num,\n\n issued,\n\n revoked,\n\n tails_reader,\n\n )?;\n\n Ok((\n\n RevocationRegistry::RevocationRegistryV1(RevocationRegistryV1 { value: rev_reg }),\n\n RevocationRegistryDelta::RevocationRegistryDeltaV1(RevocationRegistryDeltaV1 {\n\n value: delta,\n\n }),\n\n ))\n\n}\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 23, "score": 122851.82603435559 }, { "content": "pub fn create_credential_offer(\n\n schema_id: &SchemaId,\n\n cred_def: &CredentialDefinition,\n\n correctness_proof: &CredentialKeyCorrectnessProof,\n\n) -> Result<CredentialOffer> {\n\n trace!(\"create_credential_offer >>> cred_def: {:?}\", cred_def);\n\n\n\n let nonce = Nonce::new().map_err(err_map!(Unexpected, \"Error creating nonce\"))?;\n\n\n\n let cred_def = match cred_def {\n\n CredentialDefinition::CredentialDefinitionV1(c) => c,\n\n };\n\n\n\n let key_correctness_proof = correctness_proof\n\n .try_clone()\n\n .map_err(err_map!(Unexpected))?;\n\n let credential_offer = CredentialOffer {\n\n schema_id: schema_id.clone(),\n\n cred_def_id: cred_def.id.clone(),\n\n key_correctness_proof: key_correctness_proof.value,\n\n nonce,\n\n method_name: None,\n\n };\n\n\n\n trace!(\"create_credential_offer <<< result: {:?}\", credential_offer);\n\n Ok(credential_offer)\n\n}\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 24, "score": 122851.82603435559 }, { "content": "pub fn create_credential_request(\n\n prover_did: &DidValue,\n\n cred_def: &CredentialDefinition,\n\n master_secret: &MasterSecret,\n\n master_secret_id: &str,\n\n credential_offer: &CredentialOffer,\n\n) -> Result<(CredentialRequest, CredentialRequestMetadata)> {\n\n trace!(\n\n \"create_credential_request >>> cred_def: {:?}, master_secret: {:?}, credential_offer: {:?}\",\n\n cred_def,\n\n secret!(&master_secret),\n\n credential_offer\n\n );\n\n\n\n let cred_def = match cred_def {\n\n CredentialDefinition::CredentialDefinitionV1(cd) => cd,\n\n };\n\n let credential_pub_key = CredentialPublicKey::build_from_parts(\n\n &cred_def.value.primary,\n\n cred_def.value.revocation.as_ref(),\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 25, "score": 122851.82603435559 }, { "content": "pub fn build_credential_values(\n\n credential_values: &HashMap<String, AttributeValues>,\n\n master_secret: Option<&CryptoMasterSecret>,\n\n) -> Result<CryptoCredentialValues> {\n\n trace!(\n\n \"build_credential_values >>> credential_values: {:?}\",\n\n credential_values\n\n );\n\n\n\n let mut credential_values_builder = issuer::Issuer::new_credential_values_builder()?;\n\n for (attr, values) in credential_values {\n\n credential_values_builder.add_dec_known(&attr_common_view(attr), &values.encoded)?;\n\n }\n\n if let Some(ms) = master_secret {\n\n credential_values_builder.add_value_hidden(\"master_secret\", &ms.value()?)?;\n\n }\n\n\n\n let res = credential_values_builder.finalize()?;\n\n\n\n trace!(\"build_credential_values <<< res: {:?}\", res);\n\n\n\n Ok(res)\n\n}\n\n\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 26, "score": 122851.82603435559 }, { "content": "fn precess_filed(filed: &str, filter_value: &str, tag_value: &str) -> Result<()> {\n\n if filter_value == tag_value {\n\n Ok(())\n\n } else {\n\n Err(err_msg!(\n\n ProofRejected,\n\n \"\\\"{}\\\" values are different: expected: \\\"{}\\\", actual: \\\"{}\\\"\",\n\n filed,\n\n tag_value,\n\n filter_value,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 27, "score": 122141.67245287611 }, { "content": "pub fn make_credential_definition_id(\n\n origin_did: &DidValue,\n\n schema_id: &SchemaId,\n\n schema_seq_no: Option<u32>,\n\n tag: &str,\n\n signature_type: SignatureType,\n\n) -> Result<CredentialDefinitionId> {\n\n let schema_id = match (origin_did.get_method(), schema_id.get_method()) {\n\n (None, Some(_)) => {\n\n return Err(err_msg!(\n\n \"Cannot use an unqualified Origin DID with fully qualified Schema ID\",\n\n ));\n\n }\n\n (method, _) => schema_id.default_method(method),\n\n };\n\n let schema_infix_id = schema_seq_no\n\n .map(|n| SchemaId(n.to_string()))\n\n .unwrap_or(schema_id.clone());\n\n\n\n Ok(CredentialDefinitionId::new(\n\n origin_did,\n\n &schema_infix_id,\n\n &signature_type.to_str(),\n\n tag,\n\n ))\n\n}\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 28, "score": 120907.82085109231 }, { "content": "pub fn build_sub_proof_request(\n\n attrs_for_credential: &[AttributeInfo],\n\n predicates_for_credential: &[PredicateInfo],\n\n) -> Result<SubProofRequest> {\n\n trace!(\n\n \"build_sub_proof_request >>> attrs_for_credential: {:?}, predicates_for_credential: {:?}\",\n\n attrs_for_credential,\n\n predicates_for_credential\n\n );\n\n\n\n let mut sub_proof_request_builder = verifier::Verifier::new_sub_proof_request_builder()?;\n\n\n\n for attr in attrs_for_credential {\n\n let names = if let Some(name) = &attr.name {\n\n vec![name.clone()]\n\n } else if let Some(names) = &attr.names {\n\n names.to_owned()\n\n } else {\n\n error!(\n\n r#\"Attr for credential restriction should contain \"name\" or \"names\" param. Current attr: {:?}\"#,\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 29, "score": 120907.82085109231 }, { "content": "pub fn merge_revocation_registry_deltas(\n\n rev_reg_delta: &RevocationRegistryDelta,\n\n other_delta: &RevocationRegistryDelta,\n\n) -> Result<RevocationRegistryDelta> {\n\n match (rev_reg_delta, other_delta) {\n\n (\n\n RevocationRegistryDelta::RevocationRegistryDeltaV1(v1),\n\n RevocationRegistryDelta::RevocationRegistryDeltaV1(other),\n\n ) => {\n\n let mut result = v1.clone();\n\n result.value.merge(&other.value)?;\n\n Ok(RevocationRegistryDelta::RevocationRegistryDeltaV1(result))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 30, "score": 120907.82085109231 }, { "content": "pub fn get_non_revoc_interval(\n\n global_interval: &Option<NonRevocedInterval>,\n\n local_interval: &Option<NonRevocedInterval>,\n\n) -> Option<NonRevocedInterval> {\n\n trace!(\n\n \"get_non_revoc_interval >>> global_interval: {:?}, local_interval: {:?}\",\n\n global_interval,\n\n local_interval\n\n );\n\n\n\n let interval = local_interval\n\n .clone()\n\n .or_else(|| global_interval.clone().or(None));\n\n\n\n trace!(\"get_non_revoc_interval <<< interval: {:?}\", interval);\n\n\n\n interval\n\n}\n\n\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 31, "score": 120907.82085109231 }, { "content": "pub fn make_revocation_registry_id(\n\n origin_did: &DidValue,\n\n cred_def: &CredentialDefinition,\n\n tag: &str,\n\n rev_reg_type: RegistryType,\n\n) -> Result<RevocationRegistryId> {\n\n let cred_def = match cred_def {\n\n CredentialDefinition::CredentialDefinitionV1(c) => c,\n\n };\n\n\n\n let origin_did = match (origin_did.get_method(), cred_def.id.get_method()) {\n\n (None, Some(_)) => {\n\n return Err(err_msg!(\"Cannot use an unqualified Origin DID with a fully qualified Credential Definition ID\"));\n\n }\n\n (Some(_), None) => {\n\n return Err(err_msg!(\"Cannot use a fully qualified Origin DID with an unqualified Credential Definition ID\"));\n\n }\n\n _ => origin_did,\n\n };\n\n\n\n Ok(RevocationRegistryId::new(\n\n &origin_did,\n\n &cred_def.id,\n\n &rev_reg_type.to_str(),\n\n tag,\n\n ))\n\n}\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 32, "score": 120907.82085109231 }, { "content": "pub fn create_or_update_revocation_state(\n\n tails_reader: TailsReader,\n\n revoc_reg_def: &RevocationRegistryDefinition,\n\n rev_reg_delta: &RevocationRegistryDelta,\n\n rev_reg_idx: u32,\n\n timestamp: u64,\n\n rev_state: Option<&CredentialRevocationState>,\n\n) -> Result<CredentialRevocationState> {\n\n trace!(\n\n \"create_or_update_revocation_state >>> , tails_reader: {:?}, revoc_reg_def: {:?}, \\\n\nrev_reg_delta: {:?}, rev_reg_idx: {}, timestamp: {:?}, rev_state: {:?}\",\n\n tails_reader,\n\n revoc_reg_def,\n\n rev_reg_delta,\n\n rev_reg_idx,\n\n timestamp,\n\n rev_state\n\n );\n\n\n\n let revoc_reg_def = match revoc_reg_def {\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 33, "score": 120907.82085109231 }, { "content": "pub fn decode<T: AsRef<[u8]>>(val: T) -> Result<Vec<u8>, ConversionError> {\n\n Ok(base64::decode(val).map_err(|err| (\"Error decoding base64 data\", err))?)\n\n}\n\n\n", "file_path": "indy-utils/src/base64.rs", "rank": 34, "score": 116215.00450142854 }, { "content": "pub fn decode<T: AsRef<[u8]>>(val: T) -> Result<Vec<u8>, ConversionError> {\n\n Ok(bs58::decode(val)\n\n .into_vec()\n\n .map_err(|err| (\"Error decoding base58 data\", err))?)\n\n}\n\n\n", "file_path": "indy-utils/src/base58.rs", "rank": 35, "score": 116215.00450142853 }, { "content": "pub fn create_revocation_registry<TW>(\n\n origin_did: &DidValue,\n\n cred_def: &CredentialDefinition,\n\n tag: &str,\n\n rev_reg_type: RegistryType,\n\n issuance_type: IssuanceType,\n\n max_cred_num: u32,\n\n tails_writer: &mut TW,\n\n) -> Result<(\n\n RevocationRegistryDefinition,\n\n RevocationRegistryDefinitionPrivate,\n\n RevocationRegistry,\n\n RevocationRegistryDelta,\n\n)>\n\nwhere\n\n TW: TailsWriter,\n\n{\n\n trace!(\"create_revocation_registry >>> origin_did: {:?}, cred_def: {:?}, tag: {:?}, max_cred_num: {:?}, rev_reg_type: {:?}, issuance_type: {:?}\",\n\n origin_did, cred_def, tag, max_cred_num, rev_reg_type, issuance_type);\n\n\n", "file_path": "indy-credx/src/services/issuer.rs", "rank": 36, "score": 115289.7790840774 }, { "content": "pub fn test_pool_ip() -> String {\n\n env::var(\"TEST_POOL_IP\").unwrap_or(\"127.0.0.1\".to_string())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn tmp_path_works() {\n\n let path = tmp_path();\n\n\n\n assert!(path.is_absolute());\n\n assert!(path.has_root());\n\n assert!(path.to_string_lossy().contains(\"indy_ledger_client\"));\n\n }\n\n\n\n #[test]\n\n fn tmp_file_path_works() {\n\n let path = tmp_file_path(\"test.txt\");\n", "file_path": "indy-test-utils/src/environment.rs", "rank": 37, "score": 115289.7790840774 }, { "content": "pub fn tmp_path() -> PathBuf {\n\n let mut path = env::temp_dir();\n\n path.push(\"indy_ledger_client\");\n\n path\n\n}\n\n\n", "file_path": "indy-test-utils/src/environment.rs", "rank": 38, "score": 115289.7790840774 }, { "content": "pub fn decode_urlsafe<T: AsRef<[u8]>>(val: T) -> Result<Vec<u8>, ConversionError> {\n\n Ok(base64::decode_config(val, base64::URL_SAFE)\n\n .map_err(|err| (\"Error decoding base64-URL data\", err))?)\n\n}\n\n\n", "file_path": "indy-utils/src/base64.rs", "rank": 39, "score": 114137.5565710856 }, { "content": "fn is_attr_operator(key: &str) -> bool {\n\n key.starts_with(\"attr::\") && key.ends_with(\"::marker\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n pub const SCHEMA_ID: &str = \"123\";\n\n pub const SCHEMA_NAME: &str = \"Schema Name\";\n\n pub const SCHEMA_ISSUER_DID: &str = \"234\";\n\n pub const SCHEMA_VERSION: &str = \"1.2.3\";\n\n pub const CRED_DEF_ID: &str = \"345\";\n\n pub const ISSUER_DID: &str = \"456\";\n\n\n\n fn schema_id_tag() -> String {\n\n \"schema_id\".to_string()\n\n }\n\n\n\n fn schema_name_tag() -> String {\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 40, "score": 111623.41387291515 }, { "content": "pub fn new_nonce() -> Result<Nonce> {\n\n Nonce::new().map_err(err_map!(Unexpected))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn _interval() -> NonRevocedInterval {\n\n NonRevocedInterval {\n\n from: None,\n\n to: Some(123),\n\n }\n\n }\n\n\n\n #[test]\n\n fn get_non_revoc_interval_for_global() {\n\n let res = get_non_revoc_interval(&Some(_interval()), &None).unwrap();\n\n assert_eq!(_interval(), res);\n\n }\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 41, "score": 110835.5771716103 }, { "content": "pub fn generate_nonce() -> Result<Nonce> {\n\n new_nonce()\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 42, "score": 110835.5771716103 }, { "content": "pub trait TailsWriter: std::fmt::Debug {\n\n fn write(&mut self, generator: &mut RevocationTailsGenerator) -> Result<(String, String)>;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct TailsFileWriter {\n\n root_path: PathBuf,\n\n}\n\n\n\nimpl TailsFileWriter {\n\n pub fn new(root_path: Option<String>) -> Self {\n\n Self {\n\n root_path: root_path\n\n .map(PathBuf::from)\n\n .unwrap_or_else(|| std::env::temp_dir()),\n\n }\n\n }\n\n}\n\n\n\nimpl TailsWriter for TailsFileWriter {\n", "file_path": "indy-credx/src/services/tails.rs", "rank": 43, "score": 107345.7148903634 }, { "content": "pub fn create_master_secret() -> Result<MasterSecret> {\n\n MasterSecret::new().map_err(err_map!(Unexpected))\n\n}\n\n\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 44, "score": 107237.82643326336 }, { "content": "fn is_attr_internal_tag(key: &str, attr_value_map: &HashMap<String, Option<&str>>) -> bool {\n\n INTERNAL_TAG_MATCHER\n\n .captures(key)\n\n .map(|caps| {\n\n caps.get(1)\n\n .map(|s| attr_value_map.contains_key(&s.as_str().to_string()))\n\n .unwrap_or(false)\n\n })\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 45, "score": 104411.9111304207 }, { "content": "pub fn build_non_credential_schema() -> Result<NonCredentialSchema> {\n\n trace!(\"build_non_credential_schema\");\n\n\n\n let mut non_credential_schema_builder = issuer::Issuer::new_non_credential_schema_builder()?;\n\n non_credential_schema_builder.add_attr(\"master_secret\")?;\n\n let res = non_credential_schema_builder.finalize()?;\n\n\n\n trace!(\"build_non_credential_schema <<< res: {:?}\", res);\n\n Ok(res)\n\n}\n\n\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 46, "score": 103981.21743103203 }, { "content": "fn _process_operator(\n\n restriction_op: &Query,\n\n version: &PresentationRequestVersion,\n\n) -> Result<(), ValidationError> {\n\n match restriction_op {\n\n Query::Eq(ref tag_name, ref tag_value)\n\n | Query::Neq(ref tag_name, ref tag_value)\n\n | Query::Gt(ref tag_name, ref tag_value)\n\n | Query::Gte(ref tag_name, ref tag_value)\n\n | Query::Lt(ref tag_name, ref tag_value)\n\n | Query::Lte(ref tag_name, ref tag_value)\n\n | Query::Like(ref tag_name, ref tag_value) => {\n\n _check_restriction(tag_name, tag_value, version)\n\n }\n\n Query::In(ref tag_name, ref tag_values) => {\n\n tag_values\n\n .iter()\n\n .map(|tag_value| _check_restriction(tag_name, tag_value, version))\n\n .collect::<Result<Vec<()>, ValidationError>>()?;\n\n Ok(())\n", "file_path": "indy-data-types/src/anoncreds/pres_request.rs", "rank": 47, "score": 100070.54802075813 }, { "content": "fn _check_restriction(\n\n tag_name: &str,\n\n tag_value: &str,\n\n version: &PresentationRequestVersion,\n\n) -> Result<(), ValidationError> {\n\n if *version == PresentationRequestVersion::V1\n\n && Credential::QUALIFIABLE_TAGS.contains(&tag_name)\n\n && qualifiable::is_fully_qualified(tag_value)\n\n {\n\n return Err(invalid!(\"Presentation request validation failed: fully qualified identifiers can not be used for presentation request of the first version. \\\n\n Please, set \\\"ver\\\":\\\"2.0\\\" to use fully qualified identifiers.\"));\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[cfg(feature = \"serde\")]\n", "file_path": "indy-data-types/src/anoncreds/pres_request.rs", "rank": 48, "score": 100070.54802075813 }, { "content": "#[test]\n\nfn anoncreds_works_for_different_predicate_types() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_single_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_single_issuer_single_prover\")\n\n .unwrap();\n\n\n\n let schema_attributes = r#\"[\"age\", \"height\", \"weight\", \"salary\"]\"#;\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 49, "score": 98165.84574053144 }, { "content": "pub fn encode<T: AsRef<[u8]>>(val: T) -> String {\n\n bs58::encode(val).into_string()\n\n}\n", "file_path": "indy-utils/src/base58.rs", "rank": 50, "score": 95945.25666603219 }, { "content": "#[test]\n\nfn anoncreds_works_for_prover_hold_different_credentials_types() {\n\n Setup::empty();\n\n\n\n //1. Issuer1 creates wallet, gets wallet handles\n\n let (issuer_gvt_wallet_handle, issuer_gvt_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_multiple_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //2. Issuer2 creates wallet, gets wallet handles\n\n let (issuer_xyz_wallet_handle, issuer_xyz_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_multiple_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //3. Prover creates wallet, gets wallet handles\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_multiple_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //4. Issuer1 creates fully qualified GVT Schema and Credential Definition\n\n let gvt_issuer_did = \"did:sov:NcYxiDXkpYi6ov5FcYDi1e\"; // fully qualified did\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 51, "score": 94640.83248747331 }, { "content": "pub fn encode_urlsafe<T: AsRef<[u8]>>(val: T) -> String {\n\n base64::encode_config(val, base64::URL_SAFE)\n\n}\n", "file_path": "indy-utils/src/base64.rs", "rank": 52, "score": 94277.41564453064 }, { "content": "/// An identifier which can be qualified with a prefix and method\n\npub trait Qualifiable: From<String> + std::ops::Deref<Target = str> + Validatable {\n\n fn prefix() -> &'static str;\n\n\n\n fn combine(method: Option<&str>, entity: &str) -> Self {\n\n Self::from(combine(Self::prefix(), method, entity))\n\n }\n\n\n\n fn split<'a>(&'a self) -> (Option<&'a str>, &'a str) {\n\n split(Self::prefix(), self.deref())\n\n }\n\n\n\n fn get_method<'a>(&'a self) -> Option<&'a str> {\n\n let (method, _rest) = self.split();\n\n method\n\n }\n\n\n\n fn default_method(&self, method: Option<&str>) -> Self {\n\n let (prev_method, rest) = self.split();\n\n match prev_method {\n\n Some(_) => Self::from(self.to_string()),\n", "file_path": "indy-utils/src/qualifiable.rs", "rank": 54, "score": 92080.18583849217 }, { "content": "pub fn build_credential_schema(attrs: &HashSet<String>) -> Result<CredentialSchema> {\n\n trace!(\"build_credential_schema >>> attrs: {:?}\", attrs);\n\n\n\n let mut credential_schema_builder = issuer::Issuer::new_credential_schema_builder()?;\n\n for attr in attrs {\n\n credential_schema_builder.add_attr(&attr_common_view(attr))?;\n\n }\n\n\n\n let res = credential_schema_builder.finalize()?;\n\n\n\n trace!(\"build_credential_schema <<< res: {:?}\", res);\n\n\n\n Ok(res)\n\n}\n\n\n", "file_path": "indy-credx/src/services/helpers.rs", "rank": 55, "score": 91880.70676738891 }, { "content": "fn gather_filter_info(referent: &str, identifiers: &HashMap<String, Identifier>) -> Result<Filter> {\n\n let identifier = identifiers.get(referent).ok_or_else(|| {\n\n err_msg!(\n\n InvalidState,\n\n \"Identifier not found for referent: {}\",\n\n referent\n\n )\n\n })?;\n\n\n\n let (_, schema_issuer_did, schema_name, schema_version) =\n\n identifier.schema_id.parts().ok_or_else(|| {\n\n err_msg!(\n\n \"Invalid Schema ID `{}`: wrong number of parts\",\n\n identifier.schema_id.0\n\n )\n\n })?;\n\n\n\n let issuer_did = identifier.cred_def_id.issuer_did().ok_or_else(|| {\n\n err_msg!(\n\n \"Invalid Credential Definition ID `{}`: wrong number of parts\",\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 56, "score": 87034.43603127466 }, { "content": "fn registry_indices_to_set(indices: impl Iterator<Item = i64>) -> Result<BTreeSet<u32>> {\n\n indices.into_iter().try_fold(BTreeSet::new(), |mut r, idx| {\n\n r.insert(\n\n idx.try_into()\n\n .map_err(|_| err_msg!(\"Invalid registry index\"))?,\n\n );\n\n Result::Ok(r)\n\n })\n\n}\n\n\n\nimpl_indy_object!(RevocationRegistryDefinition, \"RevocationRegistryDefinition\");\n\nimpl_indy_object_from_json!(\n\n RevocationRegistryDefinition,\n\n credx_revocation_registry_definition_from_json\n\n);\n\n\n\nimpl IndyObjectId for RevocationRegistryDefinition {\n\n type Id = RevocationRegistryId;\n\n\n\n fn get_id(&self) -> Self::Id {\n", "file_path": "indy-credx/src/ffi/revocation.rs", "rank": 57, "score": 85905.1509689296 }, { "content": "fn _convert_query_to_unqualified(query: &Query) -> Query {\n\n match query {\n\n Query::Eq(tag_name, ref tag_value) => Query::Eq(\n\n tag_name.to_string(),\n\n _convert_value_to_unqualified(tag_name, tag_value),\n\n ),\n\n Query::Neq(ref tag_name, ref tag_value) => Query::Neq(\n\n tag_name.to_string(),\n\n _convert_value_to_unqualified(tag_name, tag_value),\n\n ),\n\n Query::In(ref tag_name, ref tag_values) => Query::In(\n\n tag_name.to_string(),\n\n tag_values\n\n .iter()\n\n .map(|tag_value| _convert_value_to_unqualified(tag_name, tag_value))\n\n .collect::<Vec<String>>(),\n\n ),\n\n Query::And(ref queries) => Query::And(\n\n queries\n\n .iter()\n", "file_path": "indy-data-types/src/anoncreds/pres_request.rs", "rank": 58, "score": 84511.08514790512 }, { "content": "fn is_self_attested(\n\n referent: &str,\n\n info: &AttributeInfo,\n\n self_attested_attrs: &HashSet<String>,\n\n) -> bool {\n\n match info.restrictions.as_ref() {\n\n Some(&Query::And(ref array)) | Some(&Query::Or(ref array)) if array.is_empty() => {\n\n self_attested_attrs.contains(referent)\n\n }\n\n None => self_attested_attrs.contains(referent),\n\n Some(_) => false,\n\n }\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 59, "score": 66438.2226095012 }, { "content": "fn process_filter(\n\n attr_value_map: &HashMap<String, Option<&str>>,\n\n tag: &str,\n\n tag_value: &str,\n\n filter: &Filter,\n\n) -> Result<()> {\n\n trace!(\n\n \"_process_filter: attr_value_map: {:?}, tag: {}, tag_value: {}, filter: {:?}\",\n\n attr_value_map,\n\n tag,\n\n tag_value,\n\n filter\n\n );\n\n match tag {\n\n tag_ @ \"schema_id\" => precess_filed(tag_, &filter.schema_id, tag_value),\n\n tag_ @ \"schema_issuer_did\" => precess_filed(tag_, &filter.schema_issuer_did, tag_value),\n\n tag_ @ \"schema_name\" => precess_filed(tag_, &filter.schema_name, tag_value),\n\n tag_ @ \"schema_version\" => precess_filed(tag_, &filter.schema_version, tag_value),\n\n tag_ @ \"cred_def_id\" => precess_filed(tag_, &filter.cred_def_id, tag_value),\n\n tag_ @ \"issuer_did\" => precess_filed(tag_, &filter.issuer_did, tag_value),\n\n x if is_attr_internal_tag(x, attr_value_map) => {\n\n check_internal_tag_revealed_value(x, tag_value, attr_value_map)\n\n }\n\n x if is_attr_operator(x) => Ok(()),\n\n _ => Err(err_msg!(\"Unknown Filter Type\")),\n\n }\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 60, "score": 66438.2226095012 }, { "content": "fn validate_timestamp(\n\n received_: &HashMap<String, Identifier>,\n\n referent: &str,\n\n global_interval: &Option<NonRevocedInterval>,\n\n local_interval: &Option<NonRevocedInterval>,\n\n) -> Result<()> {\n\n if get_non_revoc_interval(global_interval, local_interval).is_none() {\n\n return Ok(());\n\n }\n\n\n\n if !received_\n\n .get(referent)\n\n .map(|attr| attr.timestamp.is_some())\n\n .unwrap_or(false)\n\n {\n\n return Err(err_msg!(\"Missing timestamp\"));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 61, "score": 66438.2226095012 }, { "content": "fn _serialize_signature(\n\n v: &SJsonValue,\n\n is_top_level: bool,\n\n _type: Option<&str>,\n\n) -> Result<String, ValidationError> {\n\n match v {\n\n SJsonValue::Bool(value) => Ok(if *value {\n\n \"True\".to_string()\n\n } else {\n\n \"False\".to_string()\n\n }),\n\n SJsonValue::Number(value) => Ok(value.to_string()),\n\n SJsonValue::String(value) => Ok(value.to_string()),\n\n SJsonValue::Array(array) => array\n\n .into_iter()\n\n .map(|element| _serialize_signature(element, false, _type))\n\n .collect::<Result<Vec<String>, ValidationError>>()\n\n .map(|res| res.join(\",\")),\n\n SJsonValue::Object(map) => {\n\n let mut result = \"\".to_string();\n", "file_path": "indy-utils/src/txn_signature.rs", "rank": 62, "score": 66438.2226095012 }, { "content": "fn process_operator(\n\n attr_value_map: &HashMap<String, Option<&str>>,\n\n restriction_op: &Query,\n\n filter: &Filter,\n\n) -> Result<()> {\n\n match restriction_op {\n\n Query::Eq(ref tag_name, ref tag_value) => {\n\n process_filter(attr_value_map, &tag_name, &tag_value, filter).map_err(err_map!(\n\n \"$eq operator validation failed for tag: \\\"{}\\\", value: \\\"{}\\\"\",\n\n tag_name,\n\n tag_value\n\n ))\n\n }\n\n Query::Neq(ref tag_name, ref tag_value) => {\n\n if process_filter(attr_value_map, &tag_name, &tag_value, filter).is_err() {\n\n Ok(())\n\n } else {\n\n Err(err_msg!(ProofRejected,\n\n \"$neq operator validation failed for tag: \\\"{}\\\", value: \\\"{}\\\". Condition was passed.\", tag_name, tag_value))\n\n }\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 63, "score": 66438.2226095012 }, { "content": "/// The type of values stored in a `MerkleTree` must implement\n\n/// this trait, in order for them to be able to be fed\n\n/// to a Ring `Context` when computing the hash of a leaf.\n\n///\n\n/// A default instance for types that already implements\n\n/// `AsRef<[u8]>` is provided.\n\n///\n\n/// ## Example\n\n///\n\n/// Here is an example of how to implement `Hashable` for a type\n\n/// that does not (or cannot) implement `AsRef<[u8]>`:\n\n///\n\n/// ```ignore\n\n/// impl Hashable for PublicKey {\n\n/// fn update_context(&self, context: &mut Hasher) -> Result<(), CommonError> {\n\n/// let bytes: Vec<u8> = self.to_bytes();\n\n/// Ok(context.update(&bytes)?)\n\n/// }\n\n/// }\n\n/// ```\n\npub trait Hashable {\n\n /// Update the given `context` with `self`.\n\n ///\n\n /// See `openssl::hash::Hasher::update` for more information.\n\n fn update_context<D: Digest>(&self, context: &mut D) -> Result<(), ValidationError>;\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> Hashable for T {\n\n fn update_context<D: Digest>(&self, context: &mut D) -> Result<(), ValidationError> {\n\n Ok(context.update(self.as_ref()))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn hash_check_sha256() {\n\n assert_eq!(SHA256::output_size(), 32);\n\n assert_eq!(\n\n SHA256::digest_empty(),\n\n &[\n\n 227, 176, 196, 66, 152, 252, 28, 20, 154, 251, 244, 200, 153, 111, 185, 36, 39,\n\n 174, 65, 228, 100, 155, 147, 76, 164, 149, 153, 27, 120, 82, 184, 85,\n\n ]\n\n );\n\n }\n\n}\n", "file_path": "indy-utils/src/hash.rs", "rank": 64, "score": 65369.07056699235 }, { "content": "/// Trait for data types which need validation after being loaded from external sources\n\npub trait Validatable {\n\n fn validate(&self) -> Result<(), ValidationError> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "indy-utils/src/validation.rs", "rank": 65, "score": 65360.629930726354 }, { "content": "fn prepare_credential_for_proving(\n\n requested_attributes: HashSet<(String, bool)>,\n\n requested_predicates: HashSet<String>,\n\n pres_req: &PresentationRequestPayload,\n\n) -> Result<(Vec<RequestedAttributeInfo>, Vec<RequestedPredicateInfo>)> {\n\n trace!(\n\n \"_prepare_credentials_for_proving >>> requested_attributes: {:?}, requested_predicates: {:?}, pres_req: {:?}\",\n\n requested_attributes,\n\n requested_predicates,\n\n pres_req\n\n );\n\n\n\n let mut attrs = Vec::with_capacity(requested_attributes.len());\n\n let mut preds = Vec::with_capacity(requested_predicates.len());\n\n\n\n for (attr_referent, revealed) in requested_attributes {\n\n let attr_info = pres_req\n\n .requested_attributes\n\n .get(attr_referent.as_str())\n\n .ok_or_else(|| {\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 66, "score": 65357.70351761738 }, { "content": "fn verify_requested_restrictions(\n\n pres_req: &PresentationRequestPayload,\n\n requested_proof: &RequestedProof,\n\n received_revealed_attrs: &HashMap<String, Identifier>,\n\n received_unrevealed_attrs: &HashMap<String, Identifier>,\n\n received_predicates: &HashMap<String, Identifier>,\n\n self_attested_attrs: &HashSet<String>,\n\n) -> Result<()> {\n\n let proof_attr_identifiers: HashMap<String, Identifier> = received_revealed_attrs\n\n .iter()\n\n .chain(received_unrevealed_attrs)\n\n .map(|(r, id)| (r.to_string(), id.clone()))\n\n .collect();\n\n\n\n let requested_attrs: HashMap<String, AttributeInfo> = pres_req\n\n .requested_attributes\n\n .iter()\n\n .filter(|&(referent, info)| !is_self_attested(&referent, &info, self_attested_attrs))\n\n .map(|(referent, info)| (referent.to_string(), info.clone()))\n\n .collect();\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 67, "score": 65357.70351761738 }, { "content": "fn get_predicates_for_credential(\n\n sub_proof_index: usize,\n\n requested_proof: &RequestedProof,\n\n pres_req: &PresentationRequestPayload,\n\n) -> Result<Vec<PredicateInfo>> {\n\n trace!(\"_get_predicates_for_credential >>> sub_proof_index: {:?}, requested_credentials: {:?}, pres_req: {:?}\",\n\n sub_proof_index, requested_proof, pres_req);\n\n\n\n let predicates_for_credential = requested_proof\n\n .predicates\n\n .iter()\n\n .filter(|&(predicate_referent, requested_referent)| {\n\n sub_proof_index == requested_referent.sub_proof_index as usize\n\n && pres_req\n\n .requested_predicates\n\n .contains_key(predicate_referent)\n\n })\n\n .map(|(predicate_referent, _)| pres_req.requested_predicates[predicate_referent].clone())\n\n .collect::<Vec<PredicateInfo>>();\n\n\n\n trace!(\n\n \"_get_predicates_for_credential <<< predicates_for_credential: {:?}\",\n\n predicates_for_credential\n\n );\n\n\n\n Ok(predicates_for_credential)\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 68, "score": 65357.70351761738 }, { "content": "fn update_requested_proof(\n\n req_attrs_for_credential: Vec<RequestedAttributeInfo>,\n\n req_predicates_for_credential: Vec<RequestedPredicateInfo>,\n\n proof_req: &PresentationRequestPayload,\n\n credential: &Credential,\n\n sub_proof_index: u32,\n\n requested_proof: &mut RequestedProof,\n\n) -> Result<()> {\n\n trace!(\"_update_requested_proof >>> req_attrs_for_credential: {:?}, req_predicates_for_credential: {:?}, proof_req: {:?}, credential: {:?}, \\\n\n sub_proof_index: {:?}, requested_proof: {:?}\",\n\n req_attrs_for_credential, req_predicates_for_credential, proof_req, secret!(&credential), sub_proof_index, secret!(&requested_proof));\n\n\n\n for attr_info in req_attrs_for_credential {\n\n if attr_info.revealed {\n\n let attribute = &proof_req.requested_attributes[&attr_info.attr_referent];\n\n\n\n if let Some(name) = &attribute.name {\n\n let attribute_values =\n\n get_credential_values_for_attribute(&credential.values.0, &name).ok_or_else(\n\n || err_msg!(\"Credential value not found for attribute {:?}\", name),\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 69, "score": 65357.70351761738 }, { "content": "#[test]\n\n#[ignore] //FIXME\n\nfn anoncreds_works_for_misused_witness() {\n\n //???\n\n // ignore requested timestamp in proof request\n\n // - provide valid proof for invalid time\n\n // - provide hacked proof: specify requested timestamp, actually use invalid TS\n\n unimplemented!();\n\n}\n\n\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 70, "score": 64335.30056415085 }, { "content": "fn compare_attr_from_proof_and_request(\n\n pres_req: &PresentationRequestPayload,\n\n received_revealed_attrs: &HashMap<String, Identifier>,\n\n received_unrevealed_attrs: &HashMap<String, Identifier>,\n\n received_self_attested_attrs: &HashSet<String>,\n\n received_predicates: &HashMap<String, Identifier>,\n\n) -> Result<()> {\n\n let requested_attrs: HashSet<String> = pres_req.requested_attributes.keys().cloned().collect();\n\n\n\n let received_attrs: HashSet<String> = received_revealed_attrs\n\n .iter()\n\n .chain(received_unrevealed_attrs)\n\n .map(|(r, _)| r.to_string())\n\n .collect::<HashSet<String>>()\n\n .union(&received_self_attested_attrs)\n\n .cloned()\n\n .collect();\n\n\n\n if requested_attrs != received_attrs {\n\n return Err(err_msg!(\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 71, "score": 64335.30056415085 }, { "content": "fn verify_revealed_attribute_values(\n\n pres_req: &PresentationRequestPayload,\n\n proof: &Presentation,\n\n) -> Result<()> {\n\n for (attr_referent, attr_info) in proof.requested_proof.revealed_attrs.iter() {\n\n let attr_name = pres_req\n\n .requested_attributes\n\n .get(attr_referent)\n\n .as_ref()\n\n .ok_or_else(|| {\n\n err_msg!(\n\n ProofRejected,\n\n \"Attribute with referent \\\"{}\\\" not found in ProofRequests\",\n\n attr_referent\n\n )\n\n })?\n\n .name\n\n .as_ref()\n\n .ok_or_else(|| {\n\n err_msg!(\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 72, "score": 64335.30056415085 }, { "content": "fn get_revealed_attributes_for_credential(\n\n sub_proof_index: usize,\n\n requested_proof: &RequestedProof,\n\n pres_req: &PresentationRequestPayload,\n\n) -> Result<Vec<AttributeInfo>> {\n\n trace!(\"_get_revealed_attributes_for_credential >>> sub_proof_index: {:?}, requested_credentials: {:?}, pres_req: {:?}\",\n\n sub_proof_index, requested_proof, pres_req);\n\n\n\n let mut revealed_attrs_for_credential = requested_proof\n\n .revealed_attrs\n\n .iter()\n\n .filter(|&(attr_referent, ref revealed_attr_info)| {\n\n sub_proof_index == revealed_attr_info.sub_proof_index as usize\n\n && pres_req.requested_attributes.contains_key(attr_referent)\n\n })\n\n .map(|(attr_referent, _)| pres_req.requested_attributes[attr_referent].clone())\n\n .collect::<Vec<AttributeInfo>>();\n\n\n\n revealed_attrs_for_credential.append(\n\n &mut requested_proof\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 73, "score": 64335.30056415085 }, { "content": "fn build_sub_proof_request(\n\n req_attrs_for_credential: &[RequestedAttributeInfo],\n\n req_predicates_for_credential: &[RequestedPredicateInfo],\n\n) -> Result<SubProofRequest> {\n\n trace!(\"_build_sub_proof_request <<< req_attrs_for_credential: {:?}, req_predicates_for_credential: {:?}\",\n\n req_attrs_for_credential, req_predicates_for_credential);\n\n\n\n let mut sub_proof_request_builder = CryptoVerifier::new_sub_proof_request_builder()?;\n\n\n\n for attr in req_attrs_for_credential {\n\n if attr.revealed {\n\n if let Some(ref name) = &attr.attr_info.name {\n\n sub_proof_request_builder.add_revealed_attr(&attr_common_view(name))?\n\n } else if let Some(ref names) = &attr.attr_info.names {\n\n for name in names {\n\n sub_proof_request_builder.add_revealed_attr(&attr_common_view(name))?\n\n }\n\n }\n\n }\n\n }\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 74, "score": 64335.30056415085 }, { "content": "fn compare_timestamps_from_proof_and_request(\n\n pres_req: &PresentationRequestPayload,\n\n received_revealed_attrs: &HashMap<String, Identifier>,\n\n received_unrevealed_attrs: &HashMap<String, Identifier>,\n\n received_self_attested_attrs: &HashSet<String>,\n\n received_predicates: &HashMap<String, Identifier>,\n\n) -> Result<()> {\n\n pres_req\n\n .requested_attributes\n\n .iter()\n\n .map(|(referent, info)| {\n\n validate_timestamp(\n\n &received_revealed_attrs,\n\n referent,\n\n &pres_req.non_revoked,\n\n &info.non_revoked,\n\n )\n\n .or_else(|_| {\n\n validate_timestamp(\n\n &received_unrevealed_attrs,\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 75, "score": 64335.30056415085 }, { "content": "fn verify_revealed_attribute_value(\n\n attr_name: &str,\n\n proof: &Presentation,\n\n attr_info: &RevealedAttributeInfo,\n\n) -> Result<()> {\n\n let reveal_attr_encoded = attr_info.encoded.to_string();\n\n let reveal_attr_encoded = Regex::new(\"^0*\")\n\n .unwrap()\n\n .replace_all(&reveal_attr_encoded, \"\")\n\n .to_owned();\n\n let sub_proof_index = attr_info.sub_proof_index as usize;\n\n\n\n let crypto_proof_encoded = proof\n\n .proof\n\n .proofs\n\n .get(sub_proof_index)\n\n .ok_or_else(|| {\n\n err_msg!(\n\n ProofRejected,\n\n \"CryptoProof not found by index \\\"{}\\\"\",\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 76, "score": 64335.30056415085 }, { "content": "fn get_credential_values_for_attribute(\n\n credential_attrs: &HashMap<String, AttributeValues>,\n\n requested_attr: &str,\n\n) -> Option<AttributeValues> {\n\n trace!(\n\n \"get_credential_values_for_attribute >>> credential_attrs: {:?}, requested_attr: {:?}\",\n\n secret!(credential_attrs),\n\n requested_attr\n\n );\n\n\n\n let res = credential_attrs\n\n .iter()\n\n .find(|&(ref key, _)| attr_common_view(key) == attr_common_view(&requested_attr))\n\n .map(|(_, values)| values.clone());\n\n\n\n trace!(\n\n \"get_credential_values_for_attribute <<< res: {:?}\",\n\n secret!(&res)\n\n );\n\n\n\n res\n\n}\n\n\n", "file_path": "indy-credx/src/services/prover.rs", "rank": 77, "score": 64335.30056415085 }, { "content": "/// A trait for producing hashes of merkle tree leaves and nodes\n\npub trait TreeHash {\n\n fn hash_leaf<T>(leaf: &T) -> Result<Vec<u8>, ValidationError>\n\n where\n\n T: Hashable;\n\n\n\n fn hash_nodes<T>(left: &T, right: &T) -> Result<Vec<u8>, ValidationError>\n\n where\n\n T: Hashable;\n\n}\n\n\n\nimpl<H: Digest> TreeHash for H {\n\n fn hash_leaf<T>(leaf: &T) -> Result<Vec<u8>, ValidationError>\n\n where\n\n T: Hashable,\n\n {\n\n let mut ctx = Self::new();\n\n ctx.update(&[0x00]);\n\n leaf.update_context(&mut ctx)?;\n\n Ok(ctx.finalize().to_vec())\n\n }\n", "file_path": "indy-utils/src/hash.rs", "rank": 78, "score": 64204.938167998116 }, { "content": "#[test] // IS-1522 restrictions: [], restrictions: {\"$or\": []}\n\nfn anoncreds_works_for_restrictions_as_empty_array() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_attr_value_restriction\")\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_attr_value_restriction\")\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n\n GVT_SCHEMA_NAME,\n\n GVT_SCHEMA_ATTRIBUTES,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 79, "score": 63366.447840127934 }, { "content": "fn check_internal_tag_revealed_value(\n\n key: &str,\n\n tag_value: &str,\n\n attr_value_map: &HashMap<String, Option<&str>>,\n\n) -> Result<()> {\n\n let attr_name = INTERNAL_TAG_MATCHER\n\n .captures(key)\n\n .ok_or_else(|| err_msg!(InvalidState, \"Attribute name became unparseable\",))?\n\n .get(1)\n\n .ok_or_else(|| err_msg!(InvalidState, \"No name has been parsed\",))?\n\n .as_str();\n\n if let Some(Some(revealed_value)) = attr_value_map.get(attr_name) {\n\n if *revealed_value != tag_value {\n\n return Err(err_msg!(\n\n ProofRejected,\n\n \"\\\"{}\\\" values are different: expected: \\\"{}\\\", actual: \\\"{}\\\"\",\n\n key,\n\n tag_value,\n\n revealed_value\n\n ));\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 80, "score": 63366.447840127934 }, { "content": "#[test] // IS-1363 attr::<attribute_name>::value restriction\n\nfn anoncreds_works_for_attr_value_restriction() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_attr_value_restriction\")\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_attr_value_restriction\")\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n\n GVT_SCHEMA_NAME,\n\n GVT_SCHEMA_ATTRIBUTES,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 81, "score": 63366.447840127934 }, { "content": "#[test] // IS-1380\n\nfn anoncreds_fails_for_unmet_attr_value_restrictions() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_fails_for_unmet_attr_value_restrictions\")\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_fails_for_unmet_attr_value_restrictions\")\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n\n GVT_SCHEMA_NAME,\n\n GVT_SCHEMA_ATTRIBUTES,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 82, "score": 62447.04552613704 }, { "content": "#[test]\n\nfn anoncreds_works_for_multiple_issuer_single_prover() {\n\n Setup::empty();\n\n\n\n //1. Issuer1 creates wallet, gets wallet handles\n\n let (issuer_gvt_wallet_handle, issuer_gvt_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_multiple_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //2. Issuer2 creates wallet, gets wallet handles\n\n let (issuer_xyz_wallet_handle, issuer_xyz_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_multiple_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //3. Prover creates wallet, gets wallet handles\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_multiple_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //4. Issuer1 creates GVT Schema and Credential Definition\n\n let (gvt_schema_id, gvt_schema, gvt_cred_def_id, gvt_cred_def_json) =\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 83, "score": 62447.04552613704 }, { "content": "#[test]\n\nfn anoncreds_works_for_requested_attribute_in_upper_case() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_requested_attribute_in_upper_case\",\n\n )\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_requested_attribute_in_upper_case\",\n\n )\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 84, "score": 62447.04552613704 }, { "content": "#[test]\n\nfn anoncreds_works_for_single_issuer_single_prover() {\n\n // Create Issuer pseudo wallet\n\n let mut issuer_wallet = IssuerWallet::default();\n\n\n\n // Create Prover pseudo wallet and master secret\n\n let mut prover_wallet = ProverWallet::default();\n\n\n\n // Issuer creates Schema - would be published to the ledger\n\n let gvt_schema = issuer::create_schema(\n\n &issuer_wallet.did,\n\n GVT_SCHEMA_NAME,\n\n \"1.0\",\n\n GVT_SCHEMA_ATTRIBUTES[..].into(),\n\n None,\n\n )\n\n .expect(\"Error creating gvt schema for issuer\");\n\n\n\n // Issuer creates Credential Definition\n\n let cred_def_parts = issuer::create_credential_definition(\n\n &issuer_wallet.did,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 85, "score": 62447.04552613704 }, { "content": "#[test]\n\nfn anoncreds_works_for_twice_entry_of_credential_for_different_witness() {\n\n Setup::empty();\n\n\n\n // Issuer creates wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_twice_entry_of_credential_for_different_witness\",\n\n )\n\n .unwrap();\n\n\n\n // Prover1 creates wallet, gets wallet handle\n\n let (prover1_wallet_handle, prover1_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_twice_entry_of_credential_for_different_witness\",\n\n )\n\n .unwrap();\n\n\n\n // Prover2 creates wallet, gets wallet handle\n\n let (prover2_wallet_handle, prover2_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_twice_entry_of_credential_for_different_witness\",\n\n )\n\n .unwrap();\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 86, "score": 61573.401899092045 }, { "content": "#[test]\n\nfn anoncreds_works_for_twice_entry_of_attribute_from_different_credential() {\n\n Setup::empty();\n\n\n\n //1. Issuer1 creates wallet, gets wallet handles\n\n let (issuer_gvt_wallet_handle, issuer_gvt_wallet_config) =\n\n wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_twice_entry_of_attribute_from_different_credential\",\n\n )\n\n .unwrap();\n\n\n\n //2. Issuer2 creates wallet, gets wallet handles\n\n let (issuer_abc_wallet_handle, issuer_abc_wallet_config) =\n\n wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_twice_entry_of_attribute_from_different_credential\",\n\n )\n\n .unwrap();\n\n\n\n //3. Prover creates wallet, gets wallet handles\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_twice_entry_of_attribute_from_different_credential\",\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 87, "score": 61573.401899092045 }, { "content": "#[test]\n\nfn anoncreds_works_for_multiple_requested_predicates_from_one_credential() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_multiple_requested_predicates_from_one_credential\",\n\n )\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_multiple_requested_predicates_from_one_credential\",\n\n )\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let attr_names = r#\"[\"task1\",\n\n \"task2\",\n\n \"task3\",\n\n \"task4\",\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 88, "score": 61573.401899092045 }, { "content": "#[test]\n\nfn anoncreds_works_for_single_issuer_multiple_credentials_single_prover() {\n\n Setup::empty();\n\n\n\n //1. Issuer creates wallet, gets wallet handles\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_issuer_multiple_credentials_single_prover\",\n\n )\n\n .unwrap();\n\n\n\n //2. Prover creates wallet, gets wallet handles\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_issuer_multiple_credentials_single_prover\",\n\n )\n\n .unwrap();\n\n\n\n //3. Issuer creates GVT Schema and Credential Definition\n\n let (gvt_schema_id, gvt_schema, gvt_cred_def_id, gvt_cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 89, "score": 60742.18378772546 }, { "content": "#[test]\n\nfn anoncreds_works_for_single_issuer_single_prover_fully_qualified_ids() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_single_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) =\n\n wallet::create_and_open_default_wallet(\"anoncreds_works_for_single_issuer_single_prover\")\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID_V1,\n\n GVT_SCHEMA_NAME,\n\n GVT_SCHEMA_ATTRIBUTES,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 90, "score": 59950.37407485908 }, { "content": "#[test]\n\nfn anoncreds_works_for_single_fully_qualified_issuer_single_unqualified_prover() {\n\n Setup::empty();\n\n\n\n //1. Create Issuer wallet, gets wallet handle\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_fully_qualified_issuer_single_unqualified_prover\",\n\n )\n\n .unwrap();\n\n\n\n //2. Create Prover wallet, gets wallet handle\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_fully_qualified_issuer_single_unqualified_prover\",\n\n )\n\n .unwrap();\n\n\n\n //3. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID_V1,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 91, "score": 59950.37407485908 }, { "content": "#[test]\n\nfn anoncreds_works_for_single_issuer_multiple_credentials_single_prover_complex_restriction_2() {\n\n Setup::empty();\n\n\n\n //1. Issuer creates wallet, gets wallet handles\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_issuer_multiple_credentials_single_issuer2\",\n\n )\n\n .unwrap();\n\n\n\n //2. Prover creates wallet, gets wallet handles\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_issuer_multiple_credentials_single_prover2\",\n\n )\n\n .unwrap();\n\n\n\n //3. Issuer creates GVT Schema and Credential Definition\n\n let (gvt_schema_id, gvt_schema, gvt_cred_def_id, gvt_cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 92, "score": 59195.235104211635 }, { "content": "#[test]\n\nfn verifier_verify_proof_works_for_proof_does_not_correspond_proof_request_attr_and_predicate() {\n\n Setup::empty();\n\n\n\n // 1. Creates wallet, gets wallet handle\n\n let (wallet_handle, wallet_config) = wallet::create_and_open_default_wallet(\"verifier_verify_proof_works_for_proof_does_not_correspond_proof_request_attr_and_predicate\").unwrap();\n\n\n\n // 2. Issuer creates Schema and Credential Definition\n\n let (schema_id, schema_json, cred_def_id, cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n wallet_handle,\n\n ISSUER_DID,\n\n GVT_SCHEMA_NAME,\n\n GVT_SCHEMA_ATTRIBUTES,\n\n );\n\n\n\n // 3. Prover creates Master Secret\n\n anoncreds::prover_create_master_secret(wallet_handle, COMMON_MASTER_SECRET).unwrap();\n\n\n\n // 4. Issuer issue Credential for Prover\n\n anoncreds::multi_steps_create_credential(\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 93, "score": 59195.235104211635 }, { "content": "#[test]\n\nfn anoncreds_works_for_single_issuer_multiple_credentials_single_prover_complex_restriction_1() {\n\n Setup::empty();\n\n\n\n //1. Issuer creates wallet, gets wallet handles\n\n let (issuer_wallet_handle, issuer_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_issuer_multiple_credentials_single_issuer1\",\n\n )\n\n .unwrap();\n\n\n\n //2. Prover creates wallet, gets wallet handles\n\n let (prover_wallet_handle, prover_wallet_config) = wallet::create_and_open_default_wallet(\n\n \"anoncreds_works_for_single_issuer_multiple_credentials_single_prover1\",\n\n )\n\n .unwrap();\n\n\n\n //3. Issuer creates GVT Schema and Credential Definition\n\n let (gvt_schema_id, gvt_schema, gvt_cred_def_id, gvt_cred_def_json) =\n\n anoncreds::multi_steps_issuer_preparation(\n\n issuer_wallet_handle,\n\n ISSUER_DID,\n", "file_path": "indy-credx/tests/anoncreds_demos.rs", "rank": 94, "score": 59195.235104211635 }, { "content": "fn received_self_attested_attrs(proof: &Presentation) -> HashSet<String> {\n\n proof\n\n .requested_proof\n\n .self_attested_attrs\n\n .keys()\n\n .cloned()\n\n .collect()\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 95, "score": 49686.05667903338 }, { "content": "fn get_proof_identifier(proof: &Presentation, index: u32) -> Result<Identifier> {\n\n proof\n\n .identifiers\n\n .get(index as usize)\n\n .cloned()\n\n .ok_or_else(|| err_msg!(\"Identifier not found for index: {}\", index))\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 96, "score": 46987.29225154555 }, { "content": "fn received_predicates(proof: &Presentation) -> Result<HashMap<String, Identifier>> {\n\n let mut predicate_identifiers: HashMap<String, Identifier> = HashMap::new();\n\n for (referent, info) in proof.requested_proof.predicates.iter() {\n\n predicate_identifiers.insert(\n\n referent.to_string(),\n\n get_proof_identifier(proof, info.sub_proof_index)?,\n\n );\n\n }\n\n Ok(predicate_identifiers)\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 97, "score": 46987.29225154555 }, { "content": "fn received_unrevealed_attrs(proof: &Presentation) -> Result<HashMap<String, Identifier>> {\n\n let mut unrevealed_identifiers: HashMap<String, Identifier> = HashMap::new();\n\n for (referent, info) in proof.requested_proof.unrevealed_attrs.iter() {\n\n unrevealed_identifiers.insert(\n\n referent.to_string(),\n\n get_proof_identifier(proof, info.sub_proof_index)?,\n\n );\n\n }\n\n Ok(unrevealed_identifiers)\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 98, "score": 46232.153280898114 }, { "content": "fn received_revealed_attrs(proof: &Presentation) -> Result<HashMap<String, Identifier>> {\n\n let mut revealed_identifiers: HashMap<String, Identifier> = HashMap::new();\n\n for (referent, info) in proof.requested_proof.revealed_attrs.iter() {\n\n revealed_identifiers.insert(\n\n referent.to_string(),\n\n get_proof_identifier(proof, info.sub_proof_index)?,\n\n );\n\n }\n\n for (referent, infos) in proof.requested_proof.revealed_attr_groups.iter() {\n\n revealed_identifiers.insert(\n\n referent.to_string(),\n\n get_proof_identifier(proof, infos.sub_proof_index)?,\n\n );\n\n }\n\n Ok(revealed_identifiers)\n\n}\n\n\n", "file_path": "indy-credx/src/services/verifier.rs", "rank": 99, "score": 46232.153280898114 } ]
Rust
rewryte-generator/src/sqlite.rs
Txuritan/rewryte
3ddcd0d8b374cbb8c895fe0282c828490641c5f6
use { crate::Error, rewryte_parser::models::{Column, ColumnDefault, Enum, ForeignKey, Item, Schema, Table, Types}, std::io, }; pub fn write_schema(schema: &Schema, writer: &mut impl io::Write) -> Result<(), Error> { for (i, item) in schema.items.iter().enumerate() { write_item(item, writer)?; writeln!(writer)?; if i != schema.items.len() - 1 { writeln!(writer)?; } } Ok(()) } pub fn write_item(item: &Item, writer: &mut impl io::Write) -> Result<(), Error> { match &item { Item::Enum(decl) => write_enum(decl, writer)?, Item::Table(decl) => write_table(decl, writer)?, } Ok(()) } pub fn write_enum(_decl: &Enum, _writer: &mut impl io::Write) -> Result<(), Error> { Ok(()) } pub fn write_table(decl: &Table, writer: &mut impl io::Write) -> Result<(), Error> { write!(writer, "CREATE TABLE")?; if decl.not_exists { write!(writer, " IF NOT EXISTS")?; } write!(writer, " {} (", decl.name)?; writeln!(writer)?; for column in &decl.columns { write_column(column, writer)?; write!(writer, ",")?; writeln!(writer)?; } write!(writer, " PRIMARY KEY (")?; for (i, primary) in decl.primary_keys.iter().enumerate() { write!(writer, "{}", primary)?; if i != decl.primary_keys.len() - 1 { write!(writer, ", ")?; } } write!(writer, ")")?; if !decl.foreign_keys.is_empty() { write!(writer, ",")?; writeln!(writer)?; for (i, foreign_key) in decl.foreign_keys.iter().enumerate() { write_foreign_key(foreign_key, writer)?; if i != decl.foreign_keys.len() - 1 { write!(writer, ",")?; writeln!(writer)?; } } if decl.unique_keys.is_empty() { writeln!(writer)?; } } else if decl.unique_keys.is_empty() { writeln!(writer)?; } if !decl.unique_keys.is_empty() { write!(writer, ",")?; writeln!(writer)?; write!(writer, " UNIQUE (")?; for (i, unique) in decl.unique_keys.iter().enumerate() { write!(writer, "{}", unique)?; if i != decl.unique_keys.len() - 1 { write!(writer, ", ")?; } } write!(writer, ")")?; writeln!(writer)?; } write!(writer, ");")?; Ok(()) } pub fn write_column(column: &Column, writer: &mut impl io::Write) -> Result<(), Error> { write!(writer, " {} ", column.name,)?; write_types(&column.typ, writer)?; if !column.null { write!(writer, " NOT NULL")?; } write_column_default(&column.default, writer)?; Ok(()) } pub fn write_types(types: &Types, writer: &mut impl io::Write) -> Result<(), Error> { write!( writer, "{}", match types { Types::Char | Types::Text => "TEXT", Types::Varchar => "VARCHAR", Types::Number | Types::SmallInt | Types::MediumInt | Types::Int | Types::Serial => { "INTEGER" } Types::BigInt => "BIGINT", Types::Float | Types::Real | Types::Numeric => "REAL", Types::Decimal => "DECIMAL", Types::DateTime => "DATETIME", Types::Boolean => "BOOLEAN", Types::Raw(raw) => raw, } )?; Ok(()) } pub fn write_column_default( column_default: &ColumnDefault, writer: &mut impl io::Write, ) -> Result<(), Error> { if column_default != &ColumnDefault::None { write!(writer, " DEFAULT")?; match column_default { ColumnDefault::Now => { write!(writer, " (DATETIME('now', 'utc'))")?; } ColumnDefault::Null => { write!(writer, " NULL")?; } ColumnDefault::Raw(raw) => { write!(writer, " {}", raw)?; } ColumnDefault::None => unreachable!(), } } Ok(()) } pub fn write_foreign_key( foreign_key: &ForeignKey, writer: &mut impl io::Write, ) -> Result<(), Error> { write!( writer, " FOREIGN KEY ({}) REFERENCES {}({}) ON UPDATE {} ON DELETE {}", foreign_key.local, foreign_key.table, foreign_key.foreign, foreign_key.update, foreign_key.delete, )?; Ok(()) } #[cfg(test)] mod tests { use {crate::sqlite::write_table, rewryte_parser::models::*}; #[test] fn simple() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Name", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![], unique_keys: vec![], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Name TEXT NOT NULL, PRIMARY KEY (Id) );", utf8_buff.as_str() ); } #[test] fn multiple_primary_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Key", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Value", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Key", "Value"], foreign_keys: vec![], unique_keys: vec![], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Key TEXT NOT NULL, Value TEXT NOT NULL, PRIMARY KEY (Key, Value) );", utf8_buff.as_str() ); } #[test] fn foreign_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Name", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Other", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![ForeignKey { local: "Other", table: "Other", foreign: "Id", delete: Action::default(), update: Action::default(), }], unique_keys: vec![], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Name TEXT NOT NULL, Other TEXT NOT NULL, PRIMARY KEY (Id), FOREIGN KEY (Other) REFERENCES Other(Id) ON UPDATE NO ACTION ON DELETE NO ACTION );", utf8_buff.as_str() ); } #[test] fn unique_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Key", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Value", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![], unique_keys: vec!["Key"], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Key TEXT NOT NULL, Value TEXT NOT NULL, PRIMARY KEY (Id), UNIQUE (Key) );", utf8_buff.as_str() ); } #[test] fn unique_keys_foreign_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Name", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Other", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![ForeignKey { local: "Other", table: "Other", foreign: "Id", delete: Action::default(), update: Action::default(), }], unique_keys: vec!["Name"], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Name TEXT NOT NULL, Other TEXT NOT NULL, PRIMARY KEY (Id), FOREIGN KEY (Other) REFERENCES Other(Id) ON UPDATE NO ACTION ON DELETE NO ACTION, UNIQUE (Name) );", utf8_buff.as_str() ); } }
use { crate::Error, rewryte_parser::models::{Column, ColumnDefault, Enum, ForeignKey, Item, Schema, Table, Types}, std::io, }; pub fn write_schema(schema: &Schema, writer: &mut impl io::Write) -> Result<(), Error> { for (i, item) in schema.items.iter().enumerate() { write_item(item, writer)?; writeln!(writer)?; if i != schema.items.len() - 1 { writeln!(writer)?; } } Ok(()) }
pub fn write_enum(_decl: &Enum, _writer: &mut impl io::Write) -> Result<(), Error> { Ok(()) } pub fn write_table(decl: &Table, writer: &mut impl io::Write) -> Result<(), Error> { write!(writer, "CREATE TABLE")?; if decl.not_exists { write!(writer, " IF NOT EXISTS")?; } write!(writer, " {} (", decl.name)?; writeln!(writer)?; for column in &decl.columns { write_column(column, writer)?; write!(writer, ",")?; writeln!(writer)?; } write!(writer, " PRIMARY KEY (")?; for (i, primary) in decl.primary_keys.iter().enumerate() { write!(writer, "{}", primary)?; if i != decl.primary_keys.len() - 1 { write!(writer, ", ")?; } } write!(writer, ")")?; if !decl.foreign_keys.is_empty() { write!(writer, ",")?; writeln!(writer)?; for (i, foreign_key) in decl.foreign_keys.iter().enumerate() { write_foreign_key(foreign_key, writer)?; if i != decl.foreign_keys.len() - 1 { write!(writer, ",")?; writeln!(writer)?; } } if decl.unique_keys.is_empty() { writeln!(writer)?; } } else if decl.unique_keys.is_empty() { writeln!(writer)?; } if !decl.unique_keys.is_empty() { write!(writer, ",")?; writeln!(writer)?; write!(writer, " UNIQUE (")?; for (i, unique) in decl.unique_keys.iter().enumerate() { write!(writer, "{}", unique)?; if i != decl.unique_keys.len() - 1 { write!(writer, ", ")?; } } write!(writer, ")")?; writeln!(writer)?; } write!(writer, ");")?; Ok(()) } pub fn write_column(column: &Column, writer: &mut impl io::Write) -> Result<(), Error> { write!(writer, " {} ", column.name,)?; write_types(&column.typ, writer)?; if !column.null { write!(writer, " NOT NULL")?; } write_column_default(&column.default, writer)?; Ok(()) } pub fn write_types(types: &Types, writer: &mut impl io::Write) -> Result<(), Error> { write!( writer, "{}", match types { Types::Char | Types::Text => "TEXT", Types::Varchar => "VARCHAR", Types::Number | Types::SmallInt | Types::MediumInt | Types::Int | Types::Serial => { "INTEGER" } Types::BigInt => "BIGINT", Types::Float | Types::Real | Types::Numeric => "REAL", Types::Decimal => "DECIMAL", Types::DateTime => "DATETIME", Types::Boolean => "BOOLEAN", Types::Raw(raw) => raw, } )?; Ok(()) } pub fn write_column_default( column_default: &ColumnDefault, writer: &mut impl io::Write, ) -> Result<(), Error> { if column_default != &ColumnDefault::None { write!(writer, " DEFAULT")?; match column_default { ColumnDefault::Now => { write!(writer, " (DATETIME('now', 'utc'))")?; } ColumnDefault::Null => { write!(writer, " NULL")?; } ColumnDefault::Raw(raw) => { write!(writer, " {}", raw)?; } ColumnDefault::None => unreachable!(), } } Ok(()) } pub fn write_foreign_key( foreign_key: &ForeignKey, writer: &mut impl io::Write, ) -> Result<(), Error> { write!( writer, " FOREIGN KEY ({}) REFERENCES {}({}) ON UPDATE {} ON DELETE {}", foreign_key.local, foreign_key.table, foreign_key.foreign, foreign_key.update, foreign_key.delete, )?; Ok(()) } #[cfg(test)] mod tests { use {crate::sqlite::write_table, rewryte_parser::models::*}; #[test] fn simple() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Name", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![], unique_keys: vec![], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Name TEXT NOT NULL, PRIMARY KEY (Id) );", utf8_buff.as_str() ); } #[test] fn multiple_primary_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Key", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Value", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Key", "Value"], foreign_keys: vec![], unique_keys: vec![], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Key TEXT NOT NULL, Value TEXT NOT NULL, PRIMARY KEY (Key, Value) );", utf8_buff.as_str() ); } #[test] fn foreign_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Name", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Other", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![ForeignKey { local: "Other", table: "Other", foreign: "Id", delete: Action::default(), update: Action::default(), }], unique_keys: vec![], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Name TEXT NOT NULL, Other TEXT NOT NULL, PRIMARY KEY (Id), FOREIGN KEY (Other) REFERENCES Other(Id) ON UPDATE NO ACTION ON DELETE NO ACTION );", utf8_buff.as_str() ); } #[test] fn unique_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Key", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Value", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![], unique_keys: vec!["Key"], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Key TEXT NOT NULL, Value TEXT NOT NULL, PRIMARY KEY (Id), UNIQUE (Key) );", utf8_buff.as_str() ); } #[test] fn unique_keys_foreign_keys() { let table = Table { name: "Example", not_exists: true, columns: vec![ Column { name: "Id", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Name", typ: Types::Text, null: false, default: ColumnDefault::None, }, Column { name: "Other", typ: Types::Text, null: false, default: ColumnDefault::None, }, ], primary_keys: vec!["Id"], foreign_keys: vec![ForeignKey { local: "Other", table: "Other", foreign: "Id", delete: Action::default(), update: Action::default(), }], unique_keys: vec!["Name"], }; let mut buff = Vec::new(); write_table(&table, &mut buff).expect("Unable to write table to buffer"); let utf8_buff = String::from_utf8(buff).expect("Unable to convert buff into string"); assert_eq!( "CREATE TABLE IF NOT EXISTS Example ( Id TEXT NOT NULL, Name TEXT NOT NULL, Other TEXT NOT NULL, PRIMARY KEY (Id), FOREIGN KEY (Other) REFERENCES Other(Id) ON UPDATE NO ACTION ON DELETE NO ACTION, UNIQUE (Name) );", utf8_buff.as_str() ); } }
pub fn write_item(item: &Item, writer: &mut impl io::Write) -> Result<(), Error> { match &item { Item::Enum(decl) => write_enum(decl, writer)?, Item::Table(decl) => write_table(decl, writer)?, } Ok(()) }
function_block-full_function
[ { "content": "pub fn write_item(item: &Item, writer: &mut impl io::Write) -> Result<(), Error> {\n\n match &item {\n\n Item::Enum(decl) => write_enum(decl, writer)?,\n\n Item::Table(decl) => write_table(decl, writer)?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 1, "score": 274015.28947703855 }, { "content": "pub fn write_item(item: &Item, writer: &mut impl io::Write) -> Result<(), Error> {\n\n match &item {\n\n Item::Enum(decl) => write_enum(decl, writer)?,\n\n Item::Table(decl) => write_table(decl, writer)?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 2, "score": 274015.28947703855 }, { "content": "pub fn write_schema(schema: &Schema, writer: &mut impl io::Write) -> Result<(), Error> {\n\n for item in &schema.items {\n\n write_item(item, writer)?;\n\n\n\n writeln!(writer)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 3, "score": 273820.8198636754 }, { "content": "pub fn write_schema(schema: &Schema, writer: &mut impl io::Write) -> Result<(), Error> {\n\n for item in &schema.items {\n\n write_item(item, writer)?;\n\n\n\n writeln!(writer)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 5, "score": 273820.8198636754 }, { "content": "// TODO: figure out how to handle `IF NOT EXISTS`\n\npub fn write_enum(decl: &Enum, writer: &mut impl io::Write) -> Result<(), Error> {\n\n write!(writer, \"CREATE TYPE {} AS ENUM (\", decl.name)?;\n\n\n\n writeln!(writer)?;\n\n\n\n for (i, variant) in decl.variants.iter().enumerate() {\n\n write!(writer, \" '{}'\", variant)?;\n\n\n\n if i != decl.variants.len() - 1 {\n\n write!(writer, \",\")?;\n\n }\n\n\n\n writeln!(writer)?;\n\n }\n\n\n\n write!(writer, \");\")?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 6, "score": 261389.868511807 }, { "content": "pub fn write_item(item: &Item, writer: &mut impl io::Write, options: Options) -> Result<(), Error> {\n\n match &item {\n\n Item::Enum(decl) => write_enum(decl, writer, options)?,\n\n Item::Table(decl) => write_table(decl, writer, options)?,\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 7, "score": 255852.72481301945 }, { "content": "pub fn write_types(types: &Types, writer: &mut impl io::Write) -> Result<(), Error> {\n\n write!(\n\n writer,\n\n \"{}\",\n\n match types {\n\n Types::Char => r#\"\"char\"\"#,\n\n Types::Text => \"TEXT\",\n\n Types::Varchar => \"VARCHAR\",\n\n Types::SmallInt => \"SMALLINT\",\n\n Types::Number | Types::Int | Types::MediumInt | Types::Serial => \"INT\",\n\n Types::BigInt => \"BIGINT\",\n\n Types::Float | Types::Real => \"REAL\",\n\n Types::Numeric => \"NUMERIC\",\n\n Types::Decimal => \"DECIMAL\",\n\n Types::DateTime => \"TIMESTAMP WITH TIME ZONE\",\n\n Types::Boolean => \"BOOL\",\n\n Types::Raw(raw) => raw,\n\n }\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 8, "score": 253100.9220738326 }, { "content": "pub fn write_enum(_decl: &Enum, _writer: &mut impl io::Write) -> Result<(), Error> {\n\n todo!()\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 11, "score": 248201.82109351817 }, { "content": "pub fn write_table(decl: &Table, writer: &mut impl io::Write) -> Result<(), Error> {\n\n write!(writer, \"CREATE TABLE\")?;\n\n\n\n if decl.not_exists {\n\n write!(writer, \" IF NOT EXISTS\")?;\n\n }\n\n\n\n write!(writer, \" {} (\", decl.name)?;\n\n\n\n writeln!(writer)?;\n\n\n\n for column in &decl.columns {\n\n write_column(column, writer)?;\n\n\n\n write!(writer, \",\")?;\n\n\n\n writeln!(writer)?;\n\n }\n\n\n\n write!(writer, \" PRIMARY KEY (\")?;\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 13, "score": 247838.8401925558 }, { "content": "pub fn write_enum(decl: &Enum, writer: &mut impl io::Write, options: Options) -> Result<(), Error> {\n\n let ident = quote::format_ident!(\"{}\", decl.name);\n\n\n\n let juniper_derive = if options.juniper {\n\n if cfg!(feature = \"feature-gate-juniper\") {\n\n quote::quote! {\n\n #[cfg_attr(feature = \"rewryte-juniper\", derive(juniper::GraphQLEnum))]\n\n }\n\n } else {\n\n quote::quote! {\n\n #[derive(juniper::GraphQLEnum)]\n\n }\n\n }\n\n } else {\n\n quote::quote! {}\n\n };\n\n\n\n let serde_derive = if options.serde {\n\n if cfg!(feature = \"feature-gate-serde\") {\n\n quote::quote! {\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 14, "score": 243437.83077761557 }, { "content": "pub fn write_types(_types: &Types, _writer: &mut impl io::Write) -> Result<(), Error> {\n\n todo!()\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 15, "score": 239912.8746555438 }, { "content": "pub fn write_table(_decl: &Table, _writer: &mut impl io::Write) -> Result<(), Error> {\n\n todo!()\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 16, "score": 234650.79277426694 }, { "content": "pub fn write_column(column: &Column, writer: &mut impl io::Write) -> Result<(), Error> {\n\n write!(writer, \" {} \", column.name,)?;\n\n\n\n write_types(&column.typ, writer)?;\n\n\n\n if !column.null {\n\n write!(writer, \" NOT NULL\")?;\n\n }\n\n\n\n write_column_default(&column.default, writer)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 17, "score": 219750.27292975038 }, { "content": "pub fn write_column(_column: &Column, _writer: &mut impl io::Write) -> Result<(), Error> {\n\n todo!()\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 19, "score": 206562.22551146155 }, { "content": "#[allow(dead_code)]\n\npub fn parse<'i>(ctx: &mut Context, input: &'i str) -> Result<Schema<'i>, Error> {\n\n let mut pairs: Pairs<'i, Rule> = Parser::parse(Rule::schema, input)?;\n\n\n\n let mut items = Vec::new();\n\n\n\n let pair = match pairs.next() {\n\n Some(pair) if pair.as_rule() == Rule::schema => pair,\n\n Some(pair) if pair.as_rule() == Rule::EOI => return Ok(Schema { items }),\n\n Some(pair) => {\n\n ctx.diags.push(\n\n Diagnostic::error()\n\n .with_message(\"Unexpected token\")\n\n .with_labels(vec![Label::primary(ctx.file_id, span_range_single(&pair))\n\n .with_message(format!(\n\n \"expected `schema`, found `{:?}`\",\n\n pair.as_rule()\n\n ))]),\n\n );\n\n\n\n return Err(Error::UnexpectedPair(pair.as_span().into()));\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 20, "score": 188430.60865359317 }, { "content": "#[inline]\n\nfn parse_enum<'i>(ctx: &mut Context, pair: Pair<'i, Rule>) -> Result<Enum<'i>, Error> {\n\n debug_assert!(\n\n pair.as_rule() == Rule::decl_enum,\n\n \"The root pair must be a `decl_enum` to be able to parse a enum declaration\"\n\n );\n\n\n\n let inner_span = pair.as_span();\n\n let mut inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n\n let name = match inner.next() {\n\n Some(pair) if pair.as_rule() == Rule::ident => pair.as_str(),\n\n Some(pair) => {\n\n ctx.diags.push(\n\n Diagnostic::error()\n\n .with_message(\"Unexpected token\")\n\n .with_labels(vec![Label::primary(ctx.file_id, span_range_single(&pair))\n\n .with_message(format!(\n\n \"expected `ident`, found `{:?}`\",\n\n pair.as_rule()\n\n ))]),\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 21, "score": 163709.5343182789 }, { "content": "#[cfg(feature = \"build-script\")]\n\npub fn models_to_writer<W, S>(writer: &mut W, schema: S, extra: Option<&[&str]>)\n\nwhere\n\n W: Write,\n\n S: AsRef<Path>,\n\n{\n\n let path: &Path = schema.as_ref();\n\n\n\n let contents = match fs::read_to_string(&path) {\n\n Ok(file) => file,\n\n Err(err) if err.kind() == ErrorKind::NotFound => {\n\n panic!(\"File does not exist: {}\", path.display());\n\n }\n\n Err(err) => {\n\n panic!(\"{}: {:?}\", path.display(), err);\n\n }\n\n };\n\n\n\n let contents_str = contents.as_str();\n\n\n\n let mut files = SimpleFiles::new();\n", "file_path": "rewryte/src/lib.rs", "rank": 22, "score": 163086.28597452803 }, { "content": "#[inline]\n\nfn parse_table<'i>(ctx: &mut Context, pair: Pair<'i, Rule>) -> Result<Table<'i>, Error> {\n\n debug_assert!(\n\n pair.as_rule() == Rule::decl_table,\n\n \"The root pair must be a `decl_table` to be able to parse a table declaration\"\n\n );\n\n\n\n let inner_span = pair.as_span();\n\n let mut inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n\n let name = match inner.next() {\n\n Some(pair) if pair.as_rule() == Rule::ident => pair.as_str(),\n\n Some(pair) => {\n\n ctx.diags.push(\n\n Diagnostic::error()\n\n .with_message(\"Unexpected token\")\n\n .with_labels(vec![Label::primary(ctx.file_id, span_range_single(&pair))\n\n .with_message(format!(\n\n \"expected `ident`, found `{:?}`\",\n\n pair.as_rule()\n\n ))]),\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 23, "score": 151416.72589867964 }, { "content": "pub fn write_table(\n\n decl: &Table,\n\n writer: &mut impl io::Write,\n\n options: Options,\n\n) -> Result<(), Error> {\n\n let ident = quote::format_ident!(\"{}\", decl.name);\n\n\n\n let juniper_derive = if options.juniper {\n\n if cfg!(feature = \"feature-gate-juniper\") {\n\n quote::quote! {\n\n #[cfg_attr(feature = \"rewryte-juniper\", derive(juniper::GraphQLObject))]\n\n }\n\n } else {\n\n quote::quote! {\n\n #[derive(juniper::GraphQLObject)]\n\n }\n\n }\n\n } else {\n\n quote::quote! {}\n\n };\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 24, "score": 121226.14985396023 }, { "content": "pub fn write_schema(\n\n schema: &Schema,\n\n writer: &mut impl io::Write,\n\n options: Options,\n\n) -> Result<(), Error> {\n\n for item in &schema.items {\n\n write_item(item, writer, options)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 25, "score": 121175.3210147006 }, { "content": "#[inline]\n\nfn parse_modifier_ref<'i>(ctx: &mut Context, pair: Pair<'i, Rule>) -> Result<Modifier<'i>, Error> {\n\n debug_assert!(\n\n pair.as_rule() == Rule::modifier_ref,\n\n \"The root pair must be a `modifier_ref` to be able to parse column ref modifier\"\n\n );\n\n\n\n let inner_span = pair.as_span();\n\n let mut inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n\n let table = match inner.next() {\n\n Some(pair) if pair.as_rule() == Rule::ident => pair.as_str(),\n\n Some(pair) => {\n\n ctx.diags.push(\n\n Diagnostic::error()\n\n .with_message(\"Unexpected token\")\n\n .with_labels(vec![Label::primary(ctx.file_id, span_range_single(&pair))\n\n .with_message(format!(\n\n \"expected `ident`, found `{:?}`\",\n\n pair.as_rule()\n\n ))]),\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 26, "score": 120450.06874049944 }, { "content": "#[proc_macro]\n\npub fn schema(input: TokenStream) -> TokenStream {\n\n let input = match syn::parse::<FormatInput>(input) {\n\n Ok(syntax_tree) => syntax_tree,\n\n Err(err) => return TokenStream::from(err.to_compile_error()),\n\n };\n\n\n\n let contents = match fs::read_to_string(&input.path) {\n\n Ok(file) => file,\n\n Err(err) if err.kind() == ErrorKind::NotFound => {\n\n return error(\n\n input.lit_path,\n\n format!(\"File does not exist: {}\", input.path.display()),\n\n );\n\n }\n\n Err(err) => {\n\n return error(input.lit_path, err);\n\n }\n\n };\n\n\n\n let contents_str = contents.as_str();\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 27, "score": 120197.64626495038 }, { "content": "fn error(path: LitStr, msg: impl std::fmt::Display) -> TokenStream {\n\n TokenStream::from(syn::Error::new_spanned(path, msg).to_compile_error())\n\n}\n\n\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 28, "score": 99804.21997150398 }, { "content": "pub fn write_column_default(\n\n column_default: &ColumnDefault,\n\n writer: &mut impl io::Write,\n\n) -> Result<(), Error> {\n\n if column_default != &ColumnDefault::None {\n\n write!(writer, \" DEFAULT\")?;\n\n\n\n match column_default {\n\n ColumnDefault::Now => {\n\n write!(writer, \" (timezone('utc', now()))\")?;\n\n }\n\n ColumnDefault::Null => {\n\n write!(writer, \" NULL\")?;\n\n }\n\n ColumnDefault::Raw(raw) => {\n\n write!(writer, \" {}\", raw)?;\n\n }\n\n ColumnDefault::None => unreachable!(),\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 30, "score": 93286.63837327066 }, { "content": "pub fn write_foreign_key(\n\n foreign_key: &ForeignKey,\n\n writer: &mut impl io::Write,\n\n) -> Result<(), Error> {\n\n write!(\n\n writer,\n\n \" FOREIGN KEY ({}) REFERENCES {}({}) ON UPDATE {} ON DELETE {}\",\n\n foreign_key.local,\n\n foreign_key.table,\n\n foreign_key.foreign,\n\n foreign_key.update,\n\n foreign_key.delete,\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n mod enums {\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 31, "score": 93286.63837327066 }, { "content": "pub fn write_foreign_key(\n\n _foreign_key: &ForeignKey,\n\n _writer: &mut impl io::Write,\n\n) -> Result<(), Error> {\n\n todo!()\n\n}\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 32, "score": 93286.63837327066 }, { "content": "pub fn write_column_default(\n\n _column_default: &ColumnDefault,\n\n _writer: &mut impl io::Write,\n\n) -> Result<(), Error> {\n\n todo!()\n\n}\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 33, "score": 93286.63837327066 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let matches = clap::App::new(clap::crate_name!())\n\n .version(clap::crate_version!())\n\n .author(clap::crate_authors!())\n\n .about(clap::crate_description!())\n\n .arg(\n\n clap::Arg::with_name(\"input\")\n\n .long(\"input\")\n\n .short(\"i\")\n\n .value_name(\"FILE\")\n\n .help(\"The required DAL schema file\"),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"output\")\n\n .long(\"output\")\n\n .short(\"o\")\n\n .value_name(\"FILE\")\n\n .help(\"The file to write the transformed schema to\")\n\n .conflicts_with(\"check\"),\n\n )\n", "file_path": "rewryte-cli/src/main.rs", "rank": 35, "score": 92618.79624179452 }, { "content": "#[proc_macro]\n\npub fn models(input: TokenStream) -> TokenStream {\n\n let input = match syn::parse::<ModelInput>(input) {\n\n Ok(syntax_tree) => syntax_tree,\n\n Err(err) => return TokenStream::from(err.to_compile_error()),\n\n };\n\n\n\n let contents = match fs::read_to_string(&input.path) {\n\n Ok(file) => file,\n\n Err(err) if err.kind() == ErrorKind::NotFound => {\n\n return error(\n\n input.lit_path,\n\n format!(\"File does not exist: {}\", input.path.display()),\n\n );\n\n }\n\n Err(err) => {\n\n return error(input.lit_path, err);\n\n }\n\n };\n\n\n\n let contents_str = contents.as_str();\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 36, "score": 78304.51860191964 }, { "content": "pub trait FromRow {\n\n fn from_row(row: &Row<'_>) -> anyhow::Result<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n\nmacro_rules! impl_from_row {\n\n ($( $from:ty, )*) => {\n\n $(\n\n impl FromRow for $from {\n\n fn from_row(row: &Row) -> anyhow::Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n row.get(0)\n\n .context(concat!(\"Failed to get data for row index 0: `\", stringify!($from), \"`\"))\n\n }\n\n }\n\n )*\n\n };\n", "file_path": "rewryte/src/sqlite.rs", "rank": 37, "score": 50092.85512873801 }, { "content": "pub trait FromRow {\n\n fn from_row(row: Row) -> anyhow::Result<Self>\n\n where\n\n Self: Sized;\n\n}\n\n\n\nmacro_rules! impl_from_row {\n\n ($( $from:ty, )*) => {\n\n $(\n\n impl FromRow for $from {\n\n fn from_row(row: Row) -> anyhow::Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n row.try_get(0)\n\n .context(concat!(\"Failed to get data for row index 0: `\", stringify!($from), \"`\"))\n\n }\n\n }\n\n )*\n\n };\n", "file_path": "rewryte/src/postgres.rs", "rank": 38, "score": 50092.85512873801 }, { "content": "fn slice_iter<'a>(\n\n s: &'a [&'a (dyn ToSql + Sync)],\n\n) -> impl ExactSizeIterator<Item = &'a dyn ToSql> + 'a {\n\n s.iter().map(|s| *s as _)\n\n}\n\n\n", "file_path": "rewryte/src/postgres.rs", "rank": 39, "score": 49472.03824078913 }, { "content": "pub trait StatementExt {\n\n fn query<T, P, F>(&mut self, params: P, f: F) -> anyhow::Result<MappedRowsExt<'_, F>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnMut(&Row<'_>) -> anyhow::Result<T>;\n\n\n\n fn query_opt<T, P, F>(\n\n &mut self,\n\n params: P,\n\n f: F,\n\n ) -> anyhow::Result<Option<MappedRowsExt<'_, F>>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnMut(&Row<'_>) -> anyhow::Result<T>;\n\n\n\n fn type_query<T, P>(&mut self, params: P) -> anyhow::Result<TypeMappedRowsExt<'_, T>>\n\n where\n\n P: IntoIterator,\n", "file_path": "rewryte/src/sqlite.rs", "rank": 40, "score": 48955.843147128704 }, { "content": "#[async_trait::async_trait]\n\npub trait ClientExt {\n\n async fn type_query<T, S>(\n\n &self,\n\n statement: &S,\n\n params: &[&(dyn ToSql + Sync)],\n\n ) -> anyhow::Result<Vec<T>>\n\n where\n\n S: ?Sized + ToStatement + Send + Sync,\n\n T: FromRow + Send + Sync;\n\n\n\n async fn type_query_opt<T, S>(\n\n &self,\n\n statement: &S,\n\n params: &[&(dyn ToSql + Sync)],\n\n ) -> anyhow::Result<Option<Vec<T>>>\n\n where\n\n S: ?Sized + ToStatement + Send + Sync,\n\n T: FromRow + Send + Sync;\n\n\n\n async fn type_query_one<T, S>(\n", "file_path": "rewryte/src/postgres.rs", "rank": 41, "score": 48955.843147128704 }, { "content": "pub trait ConnectionExt {\n\n fn query_one<T, P, F>(&self, sql: &str, params: P, f: F) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>;\n\n\n\n fn query_one_opt<T, P, F>(&self, sql: &str, params: P, f: F) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>;\n\n\n\n fn type_query_one<T, P>(&self, sql: &str, params: P) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow;\n\n\n\n fn type_query_one_opt<T, P>(&self, sql: &str, params: P) -> anyhow::Result<Option<T>>\n", "file_path": "rewryte/src/sqlite.rs", "rank": 42, "score": 48955.843147128704 }, { "content": "#[inline]\n\nfn parse_column<'i>(\n\n ctx: &mut Context,\n\n pair: Pair<'i, Rule>,\n\n) -> Result<(ColumnPartial<'i>, Vec<Modifier<'i>>), Error> {\n\n debug_assert!(\n\n pair.as_rule() == Rule::column,\n\n \"The root pair must be a `column` to be able to parse a table column definition\"\n\n );\n\n\n\n let inner_span = pair.as_span();\n\n let mut inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n\n let name = match inner.next() {\n\n Some(pair) if pair.as_rule() == Rule::ident => pair.as_str(),\n\n Some(pair) => {\n\n ctx.diags.push(\n\n Diagnostic::error()\n\n .with_message(\"Unexpected token\")\n\n .with_labels(vec![Label::primary(ctx.file_id, span_range_single(&pair))\n\n .with_message(format!(\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 43, "score": 48341.93069834512 }, { "content": "#[inline]\n\nfn parse_modifiers<'i>(\n\n ctx: &mut Context,\n\n pair: Pair<'i, Rule>,\n\n) -> Result<Vec<Modifier<'i>>, Error> {\n\n debug_assert!(\n\n pair.as_rule() == Rule::modifiers,\n\n \"The root pair must be a `modifiers` to be able to parse column modifiers\"\n\n );\n\n\n\n let inner_span = pair.as_span();\n\n let inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n\n let mut modifiers = Vec::new();\n\n\n\n for pair in inner {\n\n match pair.as_rule() {\n\n Rule::modifier_default => {\n\n let mut inner = pair.into_inner();\n\n\n\n let default = match inner.next() {\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 44, "score": 48341.93069834512 }, { "content": "#[inline]\n\nfn parse_modifier_ref_action<'i>(\n\n ctx: &mut Context,\n\n pair: Pair<'i, Rule>,\n\n) -> Result<(Action, Action), Error> {\n\n debug_assert!(\n\n pair.as_rule() == Rule::ref_action,\n\n \"The root pair must be a `ref_action` to be able to parse column ref modifier\"\n\n );\n\n\n\n let inner_span = pair.as_span();\n\n let inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n\n let mut delete = Action::default();\n\n let mut update = Action::default();\n\n\n\n for pair in inner {\n\n let (rule, action) = match pair.as_rule() {\n\n Rule::ref_action_delete => {\n\n let mut inner: Pairs<'i, Rule> = pair.into_inner();\n\n\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 45, "score": 46342.470853900275 }, { "content": "pub trait Format<W: io::Write> {\n\n fn fmt(&self, writer: &mut W, typ: FormatType) -> Result<(), Error>;\n\n}\n\n\n\nimpl<'i, W: io::Write> Format<W> for Schema<'i> {\n\n fn fmt(&self, writer: &mut W, typ: FormatType) -> Result<(), Error> {\n\n match typ {\n\n FormatType::MySQL => mysql::write_schema(self, writer)?,\n\n FormatType::PostgreSQL => postgresql::write_schema(self, writer)?,\n\n FormatType::SQLite => sqlite::write_schema(self, writer)?,\n\n FormatType::Rust => todo!(),\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "rewryte-generator/src/lib.rs", "rank": 46, "score": 40998.337586268826 }, { "content": "#[inline]\n\nfn span_range_end(span: Span) -> Range<usize> {\n\n (span.end())..(span.end())\n\n}\n\n\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 47, "score": 38909.40885802166 }, { "content": "#[inline]\n\nfn span_range_single<'i>(pair: &Pair<'i, Rule>) -> Range<usize> {\n\n let span = pair.as_span();\n\n\n\n (span.start())..(span.start())\n\n}\n\n\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 48, "score": 35037.39729194413 }, { "content": "use {\n\n crate::parser::Rule,\n\n pest::{error::Error as PestError, Span},\n\n};\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum Error {\n\n #[error(\"`{0}` is not a valid action\")]\n\n InvalidAction(String),\n\n #[error(\"Unexpected end of stream\")]\n\n UnexpectedEOS,\n\n #[error(\"Unexpected pair in stream: {0:?}\")]\n\n UnexpectedPair(ErrorSpan),\n\n\n\n #[error(\"Parse error\")]\n\n Parse(#[from] PestError<Rule>),\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ErrorSpan {\n", "file_path": "rewryte-parser/src/error.rs", "rank": 49, "score": 28905.214081524973 }, { "content": " value: String,\n\n start: usize,\n\n end: usize,\n\n}\n\n\n\nimpl From<Span<'_>> for ErrorSpan {\n\n fn from(span: Span<'_>) -> Self {\n\n ErrorSpan {\n\n value: span.as_str().to_string(),\n\n start: span.start(),\n\n end: span.end(),\n\n }\n\n }\n\n}\n", "file_path": "rewryte-parser/src/error.rs", "rank": 50, "score": 28902.17580571508 }, { "content": "use {\n\n crate::Error,\n\n rewryte_parser::models::{Column, ColumnDefault, Enum, ForeignKey, Item, Schema, Table, Types},\n\n std::io,\n\n};\n\n\n", "file_path": "rewryte-generator/src/mysql.rs", "rank": 52, "score": 18.96211305908568 }, { "content": "use {\n\n crate::Error,\n\n rewryte_parser::models::{Column, ColumnDefault, Enum, ForeignKey, Item, Schema, Table, Types},\n\n std::io,\n\n};\n\n\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 53, "score": 18.962113059085684 }, { "content": "use {\n\n crate::Error,\n\n heck::{KebabCase, SnakeCase},\n\n rewryte_parser::models::{Enum, Item, Schema, Table, Types},\n\n std::io,\n\n};\n\n\n\n#[derive(Default, Debug, Clone, Copy, Hash, PartialEq, Eq, PartialOrd, Ord)]\n\npub struct Options {\n\n pub juniper: bool,\n\n pub serde: bool,\n\n pub sqlx: bool,\n\n}\n\n\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 54, "score": 17.650809976906746 }, { "content": "use {\n\n crate::Error,\n\n std::{convert::TryFrom, fmt},\n\n};\n\n\n\n#[derive(Clone, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]\n\npub struct Schema<'a> {\n\n pub items: Vec<Item<'a>>,\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]\n\npub enum Item<'a> {\n\n Enum(Enum<'a>),\n\n Table(Table<'a>),\n\n}\n\n\n\n#[derive(Clone, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]\n\npub struct Enum<'a> {\n\n pub name: &'a str,\n\n pub not_exists: bool,\n", "file_path": "rewryte-parser/src/models.rs", "rank": 55, "score": 17.444909026883728 }, { "content": "{\n\n pub(crate) fn new(rows: Rows<'stmt>, f: F) -> Self {\n\n Self { rows, map: f }\n\n }\n\n}\n\n\n\nimpl<T, F> Iterator for MappedRowsExt<'_, F>\n\nwhere\n\n F: FnMut(&Row<'_>) -> anyhow::Result<T>,\n\n{\n\n type Item = anyhow::Result<T>;\n\n\n\n fn next(&mut self) -> Option<anyhow::Result<T>> {\n\n let map = &mut self.map;\n\n\n\n self.rows\n\n .next()\n\n .map_err(anyhow::Error::from)\n\n .transpose()\n\n .map(|row_result| {\n", "file_path": "rewryte/src/sqlite.rs", "rank": 56, "score": 15.763351019874307 }, { "content": "\n\n#[derive(Clone, Copy, Debug)]\n\npub enum FormatType {\n\n MySQL,\n\n PostgreSQL,\n\n Rust,\n\n SQLite,\n\n}\n\n\n\nimpl<'s> TryFrom<&'s str> for FormatType {\n\n type Error = Error;\n\n\n\n fn try_from(s: &'s str) -> Result<Self, Self::Error> {\n\n match s {\n\n \"mysql\" => Ok(FormatType::MySQL),\n\n \"postgresql\" => Ok(FormatType::PostgreSQL),\n\n \"rust\" => Ok(FormatType::Rust),\n\n \"sqlite\" => Ok(FormatType::SQLite),\n\n t => Err(Error::InvalidFormat(t.to_string())),\n\n }\n\n }\n\n}\n\n\n", "file_path": "rewryte-generator/src/lib.rs", "rank": 57, "score": 15.532538089180772 }, { "content": " }\n\n}\n\n\n\nimpl<T> Iterator for TypeMappedRowsExt<'_, T>\n\nwhere\n\n T: FromRow,\n\n{\n\n type Item = anyhow::Result<T>;\n\n\n\n fn next(&mut self) -> Option<anyhow::Result<T>> {\n\n self.rows\n\n .next()\n\n .map_err(anyhow::Error::from)\n\n .transpose()\n\n .map(|row_result| {\n\n row_result\n\n .and_then(|row| T::from_row(&row))\n\n .map_err(anyhow::Error::from)\n\n })\n\n }\n\n}\n\n\n", "file_path": "rewryte/src/sqlite.rs", "rank": 58, "score": 14.391939461452429 }, { "content": " P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnMut(&Row<'_>) -> anyhow::Result<T>,\n\n {\n\n let rows = self.query(params)?;\n\n\n\n Ok(MappedRowsExt::new(rows, f))\n\n }\n\n\n\n fn query_opt<T, P, F>(\n\n &mut self,\n\n params: P,\n\n f: F,\n\n ) -> anyhow::Result<Option<MappedRowsExt<'_, F>>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnMut(&Row<'_>) -> anyhow::Result<T>,\n\n {\n\n let rows = match self.query(params).map_err(anyhow::Error::from) {\n", "file_path": "rewryte/src/sqlite.rs", "rank": 59, "score": 14.358523533128889 }, { "content": "\n\n let mut files = SimpleFiles::new();\n\n\n\n let file_id = files.add(\"<inline>\", contents_str);\n\n\n\n let mut ctx = Context::new(file_id);\n\n\n\n match parse(&mut ctx, contents_str) {\n\n Ok(schema) => {\n\n let mut writer = BufWriter::new(Vec::new());\n\n\n\n if let Err(err) = schema.fmt(&mut writer, input.format) {\n\n return error(input.lit_path, err);\n\n }\n\n\n\n let inner = match writer.into_inner() {\n\n Ok(vec) => vec,\n\n Err(err) => {\n\n return error(input.lit_path, err);\n\n }\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 60, "score": 14.015358891152015 }, { "content": "use {\n\n crate::{\n\n models::{\n\n Action, Column, ColumnDefault, ColumnPartial, Enum, ForeignKey, Item, Modifier, Schema,\n\n Table, Types,\n\n },\n\n Error,\n\n },\n\n codespan_reporting::diagnostic::{Diagnostic, Label},\n\n pest::{\n\n iterators::{Pair, Pairs},\n\n Parser as _, Span,\n\n },\n\n std::{convert::TryFrom, ops::Range},\n\n};\n\n\n\n#[derive(pest_derive::Parser)]\n\n#[grammar = \"dal.pest\"]\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 61, "score": 14.003472073329817 }, { "content": "\n\n fn query_one<T, P, F>(&mut self, params: P, f: F) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>,\n\n {\n\n let mut rows = self.query(params)?;\n\n\n\n match rows.next()? {\n\n Some(row) => Ok(f(&row)?),\n\n None => Err(rusqlite::Error::QueryReturnedNoRows.into()),\n\n }\n\n }\n\n\n\n fn query_one_opt<T, P, F>(&mut self, params: P, f: F) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>,\n", "file_path": "rewryte/src/sqlite.rs", "rank": 62, "score": 13.916330849391901 }, { "content": " }\n\n\n\n {\n\n let idents = std::iter::repeat(ident.clone());\n\n\n\n writeln!(\n\n writer,\n\n \"{}\",\n\n quote::quote! {\n\n impl ::rewryte::sqlite::types::FromSql for #ident {\n\n fn column_result(value: ::rewryte::sqlite::types::ValueRef) -> ::rewryte::sqlite::types::FromSqlResult<Self> {\n\n value.as_str().and_then(|s| match s {\n\n #(\n\n #variants_kebab => ::std::result::Result::Ok(#idents::#variants),\n\n )*\n\n _ => ::std::result::Result::Err(::rewryte::sqlite::types::FromSqlError::InvalidType),\n\n })\n\n }\n\n }\n\n }\n\n )?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 63, "score": 13.659907866612585 }, { "content": "\n\n fn type_query_opt<T, P>(\n\n &mut self,\n\n params: P,\n\n ) -> anyhow::Result<Option<TypeMappedRowsExt<'_, T>>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow,\n\n {\n\n let rows = match self.query(params).map_err(anyhow::Error::from) {\n\n Ok(rows) => rows,\n\n Err(err) => match err.downcast_ref::<rusqlite::Error>() {\n\n Some(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),\n\n _ => return Err(err),\n\n },\n\n };\n\n\n\n Ok(Some(TypeMappedRowsExt::new(rows)))\n\n }\n", "file_path": "rewryte/src/sqlite.rs", "rank": 64, "score": 13.311049079647422 }, { "content": " Ok(rows) => rows,\n\n Err(err) => match err.downcast_ref::<rusqlite::Error>() {\n\n Some(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),\n\n _ => return Err(err),\n\n },\n\n };\n\n\n\n Ok(Some(MappedRowsExt::new(rows, f)))\n\n }\n\n\n\n fn type_query<T, P>(&mut self, params: P) -> anyhow::Result<TypeMappedRowsExt<'_, T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow,\n\n {\n\n let rows = self.query(params)?;\n\n\n\n Ok(TypeMappedRowsExt::new(rows))\n\n }\n", "file_path": "rewryte/src/sqlite.rs", "rank": 65, "score": 13.261346910141762 }, { "content": " use {crate::postgresql::write_enum, rewryte_parser::models::*};\n\n\n\n #[test]\n\n fn simple() {\n\n let decl = Enum {\n\n name: \"Test\",\n\n not_exists: false,\n\n variants: vec![\"Variant1\", \"Variant2\"],\n\n };\n\n\n\n let mut writer = Vec::new();\n\n\n\n write_enum(&decl, &mut writer).expect(\"Unable to write enum to buffer\");\n\n\n\n let utf8_writer =\n\n String::from_utf8(writer).expect(\"Unable to convert buff into string\");\n\n\n\n assert_eq!(\n\n \"CREATE TYPE Test AS ENUM (\n\n 'Variant1',\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 66, "score": 13.229325779031118 }, { "content": " where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow;\n\n}\n\n\n\nimpl ConnectionExt for rusqlite::Connection {\n\n fn query_one<T, P, F>(&self, sql: &str, params: P, f: F) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>,\n\n {\n\n let mut stmt = self.prepare(sql)?;\n\n\n\n let row = stmt.query_one(params, f)?;\n\n\n\n Ok(row)\n\n }\n\n\n", "file_path": "rewryte/src/sqlite.rs", "rank": 67, "score": 12.952025557308747 }, { "content": " serde_json::Value,\n\n];\n\n\n\n#[cfg(feature = \"with-uuid\")]\n\nimpl_from_row![\n\n uuid::Uuid,\n\n];\n\n\n\n/// An iterator over the mapped resulting rows of a query.\n\n///\n\n/// `F` is used to transform the _streaming_ iterator into a _standard_ iterator.\n\n#[must_use = \"iterators are lazy and do nothing unless consumed\"]\n\npub struct MappedRowsExt<'stmt, F> {\n\n rows: Rows<'stmt>,\n\n map: F,\n\n}\n\n\n\nimpl<'stmt, T, F> MappedRowsExt<'stmt, F>\n\nwhere\n\n F: FnMut(&Row<'_>) -> anyhow::Result<T>,\n", "file_path": "rewryte/src/sqlite.rs", "rank": 68, "score": 12.855382695192587 }, { "content": " T: FromRow,\n\n {\n\n #[pin]\n\n stream: RowStream,\n\n #[pin]\n\n _p: PhantomPinned,\n\n _t: PhantomData<T>,\n\n }\n\n}\n\n\n\nimpl<T> Stream for TypedRowStreamExt<T>\n\nwhere\n\n T: FromRow,\n\n{\n\n type Item = Result<T, anyhow::Error>;\n\n\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n let this = self.project();\n\n\n\n let polled: Option<Row> = futures::ready!(this.stream.poll_next(cx)?);\n", "file_path": "rewryte/src/postgres.rs", "rank": 69, "score": 12.554860611024171 }, { "content": " P::Item: ToSql,\n\n T: FromRow;\n\n\n\n fn type_query_opt<T, P>(\n\n &mut self,\n\n params: P,\n\n ) -> anyhow::Result<Option<TypeMappedRowsExt<'_, T>>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow;\n\n\n\n fn query_one<T, P, F>(&mut self, params: P, f: F) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>;\n\n\n\n fn type_query_one<T, P>(&mut self, params: P) -> anyhow::Result<T>\n\n where\n", "file_path": "rewryte/src/sqlite.rs", "rank": 70, "score": 12.459408456629811 }, { "content": "\n\n return Err(Error::UnexpectedPair(pair.as_span().into()));\n\n }\n\n };\n\n\n\n match rule {\n\n Rule::ref_action_delete => delete = Action::try_from(action)?,\n\n Rule::ref_action_update => update = Action::try_from(action)?,\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n Ok((delete, update))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n pub use {\n\n super::*,\n\n crate::models::{Column, Item, Table},\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 71, "score": 12.361977194037237 }, { "content": " P::Item: ToSql,\n\n T: FromRow,\n\n {\n\n let mut rows = self.query(params)?;\n\n\n\n match rows.next()? {\n\n Some(row) => Ok(T::from_row(&row)?),\n\n None => Err(rusqlite::Error::QueryReturnedNoRows.into()),\n\n }\n\n }\n\n\n\n fn type_query_one_opt<T, P>(&mut self, params: P) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow,\n\n {\n\n let mut rows = match self.query(params).map_err(anyhow::Error::from) {\n\n Ok(rows) => rows,\n\n Err(err) => match err.downcast_ref::<rusqlite::Error>() {\n", "file_path": "rewryte/src/sqlite.rs", "rank": 72, "score": 12.011068024098186 }, { "content": " if mapped.by_ref().any(|value| &*value == \"serde\") {\n\n options.serde = true;\n\n }\n\n\n\n if mapped.by_ref().any(|value| &*value == \"sqlx\") {\n\n options.sqlx = true;\n\n }\n\n }\n\n\n\n if let Err(err) = rewryte_generator::rust::write_schema(&schema, &mut writer, options) {\n\n return error(input.lit_path, err);\n\n }\n\n\n\n let inner = match writer.into_inner() {\n\n Ok(vec) => vec,\n\n Err(err) => {\n\n return error(input.lit_path, err);\n\n }\n\n };\n\n\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 73, "score": 11.849981286864518 }, { "content": " let output = matches\n\n .value_of(\"output\")\n\n .ok_or_else(|| anyhow::anyhow!(\"You must specify an output for the schema\"))?;\n\n let file = File::create(output)?;\n\n let mut writer = BufWriter::new(file);\n\n\n\n schema.fmt(&mut writer, typ)?;\n\n }\n\n }\n\n Err(err) => {\n\n eprintln!(\"{:?}\", err);\n\n\n\n let writer = StandardStream::stderr(ColorChoice::Always);\n\n let config = Config::default();\n\n\n\n for diag in ctx.diagnostics() {\n\n term::emit(&mut writer.lock(), &config, &files, diag)?;\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "rewryte-cli/src/main.rs", "rank": 74, "score": 11.827571887084725 }, { "content": " }\n\n )?;\n\n }\n\n\n\n #[cfg(feature = \"sqlite\")]\n\n {\n\n writeln!(\n\n writer,\n\n \"{}\",\n\n quote::quote! {\n\n impl ::rewryte::sqlite::FromRow for #ident {\n\n fn from_row(row: &::rewryte::sqlite::Row<'_>) -> ::anyhow::Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n use ::anyhow::Context;\n\n\n\n ::std::result::Result::Ok(Self {\n\n #(\n\n #field_names: row.get(#ids).context(#messages)?,\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 75, "score": 11.783635957908855 }, { "content": " #[cfg(feature = \"postgres\")]\n\n {\n\n writeln!(\n\n writer,\n\n \"{}\",\n\n quote::quote! {\n\n impl ::rewryte::postgres::FromRow for #ident {\n\n fn from_row(row: ::rewryte::postgres::Row) -> ::anyhow::Result<Self>\n\n where\n\n Self: Sized,\n\n {\n\n use ::anyhow::Context;\n\n\n\n ::std::result::Result::Ok(Self {\n\n #(\n\n #field_names: row.try_get(#ids).context(#messages)?,\n\n )*\n\n })\n\n }\n\n }\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 76, "score": 11.783635957908855 }, { "content": " P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow;\n\n\n\n fn query_one_opt<T, P, F>(&mut self, params: P, f: F) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>;\n\n\n\n fn type_query_one_opt<T, P>(&mut self, params: P) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow;\n\n}\n\n\n\nimpl StatementExt for rusqlite::Statement<'_> {\n\n fn query<T, P, F>(&mut self, params: P, f: F) -> anyhow::Result<MappedRowsExt<'_, F>>\n\n where\n", "file_path": "rewryte/src/sqlite.rs", "rank": 77, "score": 11.7756857463994 }, { "content": " }\n\n None => {\n\n return Err(Error::UnexpectedEOS);\n\n }\n\n };\n\n\n\n for root_group in pair.into_inner() {\n\n match root_group.as_rule() {\n\n Rule::decl_enum => {\n\n let decl = parse_enum(ctx, root_group)?;\n\n\n\n items.push(Item::Enum(decl));\n\n }\n\n Rule::decl_table => {\n\n let decl = parse_table(ctx, root_group)?;\n\n\n\n items.push(Item::Table(decl));\n\n }\n\n Rule::comment => continue,\n\n Rule::EOI => break,\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 78, "score": 11.738223291747598 }, { "content": "pub mod mysql;\n\npub mod postgresql;\n\npub mod rust;\n\npub mod sqlite;\n\n\n\nuse {\n\n rewryte_parser::models::Schema,\n\n std::{convert::TryFrom, fmt, io},\n\n};\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum Error {\n\n #[error(\"`{0}` is not a valid format type\")]\n\n InvalidFormat(String),\n\n\n\n #[error(\"Format error\")]\n\n Format(#[from] fmt::Error),\n\n #[error(\"IO error\")]\n\n Io(#[from] io::Error),\n\n}\n", "file_path": "rewryte-generator/src/lib.rs", "rank": 79, "score": 11.628455625508288 }, { "content": " fn query_one_opt<T, P, F>(&self, sql: &str, params: P, f: F) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n F: FnOnce(&Row<'_>) -> anyhow::Result<T>,\n\n {\n\n match self.query_one(sql, params, f) {\n\n Ok(res) => Ok(Some(res)),\n\n Err(err) => match err.downcast_ref::<rusqlite::Error>() {\n\n Some(rusqlite::Error::QueryReturnedNoRows) => Ok(None),\n\n _ => Err(err),\n\n },\n\n }\n\n }\n\n\n\n fn type_query_one<T, P>(&self, sql: &str, params: P) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow,\n", "file_path": "rewryte/src/sqlite.rs", "rank": 80, "score": 11.579659736213609 }, { "content": " TABLE_NULL,\n\n def_table(Column {\n\n name: \"value\",\n\n typ: Types::Text,\n\n null: true,\n\n default: ColumnDefault::default(),\n\n }),\n\n );\n\n }\n\n\n\n #[test]\n\n fn reference() {\n\n assert_span(\n\n \"tests::tables::reference\",\n\n TABLE_REFERENCE,\n\n Schema {\n\n items: vec![Item::Table(Table {\n\n name: \"Settings\",\n\n not_exists: false,\n\n columns: vec![\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 81, "score": 11.206001982516415 }, { "content": " SetNull,\n\n SetDefault,\n\n Cascade,\n\n}\n\n\n\nimpl Default for Action {\n\n fn default() -> Self {\n\n Action::NoAction\n\n }\n\n}\n\n\n\nimpl<'s> TryFrom<&'s str> for Action {\n\n type Error = Error;\n\n\n\n fn try_from(value: &'s str) -> Result<Self, Self::Error> {\n\n match value {\n\n \"no action\" => Ok(Action::NoAction),\n\n \"restrict\" => Ok(Action::Restrict),\n\n \"set null\" => Ok(Action::SetNull),\n\n \"set default\" => Ok(Action::SetDefault),\n", "file_path": "rewryte-parser/src/models.rs", "rank": 82, "score": 11.115259291275258 }, { "content": " let mut writer = Buffer::no_color();\n\n let config = Config::default();\n\n\n\n for diag in ctx.diags {\n\n term::emit(&mut writer, &config, &files, &diag).unwrap();\n\n }\n\n\n\n panic!(\n\n \"{}{:?}\",\n\n String::from_utf8_lossy(writer.as_slice()).into_owned(),\n\n err\n\n );\n\n }\n\n }\n\n }\n\n\n\n mod enums {\n\n use super::*;\n\n\n\n const ENUM: &str = \"enum Rating {\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 83, "score": 11.111109808323334 }, { "content": " row_result\n\n .and_then(|row| (map)(&row))\n\n .map_err(anyhow::Error::from)\n\n })\n\n }\n\n}\n\npub struct TypeMappedRowsExt<'stmt, T> {\n\n rows: Rows<'stmt>,\n\n typ: PhantomData<T>,\n\n}\n\n\n\nimpl<'stmt, T> TypeMappedRowsExt<'stmt, T>\n\nwhere\n\n T: FromRow,\n\n{\n\n pub(crate) fn new(rows: Rows<'stmt>) -> Self {\n\n Self {\n\n rows,\n\n typ: PhantomData::default(),\n\n }\n", "file_path": "rewryte/src/sqlite.rs", "rank": 84, "score": 10.94754347375168 }, { "content": " params: &[&(dyn ToSql + Sync)],\n\n ) -> anyhow::Result<TypedRowStreamExt<T>>\n\n where\n\n S: ?Sized + ToStatement + Send + Sync,\n\n T: FromRow,\n\n {\n\n let stream = self.query_raw(statement, slice_iter(params)).await?;\n\n\n\n Ok(TypedRowStreamExt {\n\n stream,\n\n _p: PhantomPinned,\n\n _t: PhantomData,\n\n })\n\n }\n\n}\n\n\n\npin_project_lite::pin_project! {\n\n /// A stream of the mapped resulting table rows.\n\n pub struct TypedRowStreamExt<T>\n\n where\n", "file_path": "rewryte/src/postgres.rs", "rank": 85, "score": 10.927352173044689 }, { "content": " _ => {\n\n ctx.diags.push(\n\n Diagnostic::error()\n\n .with_message(\"Unexpected token\")\n\n .with_labels(vec![Label::primary(ctx.file_id, span_range_single(&root_group))\n\n .with_message(format!(\n\n \"expected `enum declaration`, `table declaration`, or `comment`, found `{:?}`\",\n\n root_group.as_rule()\n\n ))]),\n\n );\n\n\n\n return Err(Error::UnexpectedPair(root_group.as_span().into()));\n\n }\n\n }\n\n }\n\n\n\n Ok(Schema { items })\n\n}\n\n\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 86, "score": 10.70780881945069 }, { "content": " {\n\n let mut stmt = self.prepare(sql)?;\n\n\n\n let row = stmt.type_query_one(params)?;\n\n\n\n Ok(row)\n\n }\n\n\n\n fn type_query_one_opt<T, P>(&self, sql: &str, params: P) -> anyhow::Result<Option<T>>\n\n where\n\n P: IntoIterator,\n\n P::Item: ToSql,\n\n T: FromRow,\n\n {\n\n match self.type_query_one(sql, params) {\n\n Ok(res) => Ok(Some(res)),\n\n Err(err) => match err.downcast_ref::<rusqlite::Error>() {\n\n Some(rusqlite::Error::QueryReturnedNoRows) => Ok(None),\n\n _ => Err(err),\n\n },\n\n }\n\n }\n\n}\n\n\n", "file_path": "rewryte/src/sqlite.rs", "rank": 87, "score": 10.70594617303852 }, { "content": "pub mod error;\n\npub mod models;\n\npub mod parser;\n\n\n\npub use crate::{\n\n error::Error,\n\n parser::{parse, Context},\n\n};\n", "file_path": "rewryte-parser/src/lib.rs", "rank": 88, "score": 10.641452676252053 }, { "content": "\n\n #[cfg(feature = \"postgres\")]\n\n {\n\n let name = decl.name;\n\n let idents = std::iter::repeat(ident.clone());\n\n let num_variants = decl.variants.len();\n\n\n\n let variant_names = &decl.variants;\n\n\n\n {\n\n writeln!(\n\n writer,\n\n \"{}\",\n\n quote::quote! {\n\n impl<'r> ::rewryte::postgres::types::FromSql<'r> for #ident {\n\n fn from_sql(_type: &::rewryte::postgres::types::Type, buf: &'r [u8]) -> ::std::result::Result<\n\n #ident,\n\n ::std::boxed::Box<dyn ::std::error::Error + ::std::marker::Sync + ::std::marker::Send>\n\n > {\n\n match ::std::str::from_utf8(buf)? {\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 89, "score": 10.611662257184559 }, { "content": " 'Variant2'\n\n);\",\n\n utf8_writer.as_str(),\n\n );\n\n }\n\n }\n\n\n\n mod tables {\n\n use {crate::postgresql::write_table, rewryte_parser::models::*};\n\n\n\n #[test]\n\n fn simple() {\n\n let table = Table {\n\n name: \"Example\",\n\n not_exists: true,\n\n columns: vec![\n\n Column {\n\n name: \"Id\",\n\n typ: Types::Text,\n\n null: false,\n", "file_path": "rewryte-generator/src/postgresql.rs", "rank": 90, "score": 10.432448524784778 }, { "content": " let rendered = match String::from_utf8(inner) {\n\n Ok(string) => string,\n\n Err(err) => {\n\n return error(input.lit_path, err);\n\n }\n\n };\n\n\n\n match rendered.parse() {\n\n Ok(stream) => stream,\n\n Err(err) => error(input.lit_path, err),\n\n }\n\n }\n\n Err(err) => {\n\n let config = Config::default();\n\n\n\n let mut writer = NoColor::new(Vec::new());\n\n\n\n for diag in ctx.diagnostics() {\n\n if let Err(err) = term::emit(&mut writer, &config, &files, diag) {\n\n return error(input.lit_path, err);\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 91, "score": 10.405544210949113 }, { "content": " {\n\n let mut rows = match self.query(params).map_err(anyhow::Error::from) {\n\n Ok(rows) => rows,\n\n Err(err) => match err.downcast_ref::<rusqlite::Error>() {\n\n Some(rusqlite::Error::QueryReturnedNoRows) => return Ok(None),\n\n _ => return Err(err),\n\n },\n\n };\n\n\n\n let res: Option<T> = match rows.next()? {\n\n Some(row) => Some(f(&row)?),\n\n None => None,\n\n };\n\n\n\n Ok(res)\n\n }\n\n\n\n fn type_query_one<T, P>(&mut self, params: P) -> anyhow::Result<T>\n\n where\n\n P: IntoIterator,\n", "file_path": "rewryte/src/sqlite.rs", "rank": 92, "score": 10.231679899118078 }, { "content": " Ok(())\n\n }\n\n}\n\n\n\npub(crate) struct ColumnPartial<'a> {\n\n pub name: &'a str,\n\n pub typ: Types<'a>,\n\n pub null: bool,\n\n}\n\n\n\npub(crate) enum Modifier<'p> {\n\n Default {\n\n value: &'p str,\n\n },\n\n DefaultDateTime,\n\n DefaultNull,\n\n PrimaryKey,\n\n Reference {\n\n table: &'p str,\n\n column: &'p str,\n\n delete: Action,\n\n update: Action,\n\n },\n\n Unique,\n\n}\n", "file_path": "rewryte-parser/src/models.rs", "rank": 93, "score": 10.10316910392128 }, { "content": " \"cascade\" => Ok(Action::Cascade),\n\n t => Err(Error::InvalidAction(t.to_string())),\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Action {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(\n\n f,\n\n \"{}\",\n\n match self {\n\n Action::NoAction => \"NO ACTION\",\n\n Action::Restrict => \"RESTRICT\",\n\n Action::SetNull => \"SET NULL\",\n\n Action::SetDefault => \"SET DEFAULT\",\n\n Action::Cascade => \"CASCADE\",\n\n }\n\n )?;\n\n\n", "file_path": "rewryte-parser/src/models.rs", "rank": 94, "score": 9.977645333882515 }, { "content": " #[cfg(feature = \"sqlite\")]\n\n {\n\n {\n\n let idents = std::iter::repeat(ident.clone());\n\n\n\n writeln!(\n\n writer,\n\n \"{}\",\n\n quote::quote! {\n\n impl ::rewryte::sqlite::types::ToSql for #ident {\n\n fn to_sql(&self) -> ::rewryte::sqlite::Result<::rewryte::sqlite::types::ToSqlOutput> {\n\n match self {\n\n #(\n\n #idents::#variants => ::std::result::Result::Ok(#variants_kebab.into()),\n\n )*\n\n }\n\n }\n\n }\n\n }\n\n )?;\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 95, "score": 9.854811774811239 }, { "content": " #(\n\n #variants_kebab => ::std::result::Result::Ok(#idents::#variants),\n\n )*\n\n s => {\n\n ::std::result::Result::Err(\n\n ::std::convert::Into::into(format!(\"invalid variant `{}`\", s))\n\n )\n\n }\n\n }\n\n }\n\n\n\n fn accepts(type_: &::rewryte::postgres::types::Type) -> bool {\n\n if type_.name() != #name {\n\n return false;\n\n }\n\n\n\n match *type_.kind() {\n\n ::rewryte::postgres::types::Kind::Enum(ref variants) => {\n\n if variants.len() != #num_variants {\n\n return false;\n", "file_path": "rewryte-generator/src/rust.rs", "rank": 96, "score": 9.811693353412373 }, { "content": "#[cfg(feature = \"postgres\")]\n\npub mod postgres;\n\n#[cfg(all(feature = \"sqlite\"))]\n\npub mod sqlite;\n\n\n\npub use rewryte_macro::{models, schema};\n\n\n\n#[cfg(feature = \"build-script\")]\n\nuse {\n\n codespan_reporting::{\n\n files::SimpleFiles,\n\n term::{self, termcolor::NoColor, Config},\n\n },\n\n rewryte_parser::parser::{parse, Context},\n\n std::{\n\n fs,\n\n io::{ErrorKind, Write},\n\n path::Path,\n\n },\n\n};\n\n\n\n#[cfg(feature = \"build-script\")]\n", "file_path": "rewryte/src/lib.rs", "rank": 97, "score": 9.509429589017682 }, { "content": "\n\n let mut files = SimpleFiles::new();\n\n\n\n let file_id = files.add(\"<inline>\", contents_str);\n\n\n\n let mut ctx = Context::new(file_id);\n\n\n\n match parse(&mut ctx, contents_str) {\n\n Ok(schema) => {\n\n let mut writer = BufWriter::new(Vec::new());\n\n\n\n let mut options = rewryte_generator::rust::Options::default();\n\n\n\n if let Some(extra) = input.extra {\n\n let mut mapped = extra.iter().map(LitStr::value);\n\n\n\n if mapped.by_ref().any(|value| &*value == \"juniper\") {\n\n options.juniper = true;\n\n }\n\n\n", "file_path": "rewryte-macro/src/lib.rs", "rank": 98, "score": 9.459991819266403 }, { "content": " Explicit\n\n Mature\n\n Teen\n\n General\n\n }\";\n\n\n\n #[test]\n\n fn simple() {\n\n assert_span(\n\n \"tests::enums::simple\",\n\n ENUM,\n\n Schema {\n\n items: vec![Item::Enum(Enum {\n\n name: \"Rating\",\n\n not_exists: false,\n\n variants: vec![\"Explicit\", \"Mature\", \"Teen\", \"General\"],\n\n })],\n\n },\n\n );\n\n }\n", "file_path": "rewryte-parser/src/parser.rs", "rank": 99, "score": 9.373156731345382 } ]
Rust
src/serde/ser/from_seq/header.rs
snaar/chopper
62a0305233f16b5001b433c8ef83844f12794173
use serde::ser::{Impossible, SerializeSeq, SerializeStruct, SerializeTuple, SerializeTupleStruct}; use serde::{Serialize, Serializer}; use crate::chopper::types::{FieldType, Header}; use crate::serde::ser::error::SerError; use crate::serde::ser::field_type::to_field_type; pub fn to_header<T>(value: &T, timestamp_field_index: usize) -> Result<Header, SerError> where T: Serialize + ?Sized, { value.serialize(HeaderSerializer::new(timestamp_field_index)) } pub struct HeaderSerializer { timestamp_field_index: usize, field_types: Vec<FieldType>, } impl HeaderSerializer { pub fn new(timestamp_field_index: usize) -> HeaderSerializer { HeaderSerializer { timestamp_field_index, field_types: Vec::new(), } } fn into_header(self) -> Result<Header, SerError> { if self.field_types.is_empty() { return Err(SerError::NoTimestampField); } let mut field_types = self.field_types; let field_names = Header::generate_default_field_names(field_types.len() - 1); let idx = self.timestamp_field_index; if field_types[idx] != FieldType::Long { return Err(SerError::InvalidTimestampFieldType); } field_types.remove(idx); Ok(Header::new(field_names, field_types)) } } impl Serializer for HeaderSerializer { type Ok = Header; type Error = SerError; type SerializeSeq = Self; type SerializeTuple = Self; type SerializeTupleStruct = Self; type SerializeTupleVariant = Impossible<Self::Ok, Self::Error>; type SerializeMap = Impossible<Self::Ok, Self::Error>; type SerializeStruct = Self; type SerializeStructVariant = Impossible<Self::Ok, Self::Error>; fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error> where T: Serialize, { value.serialize(self) } fn serialize_newtype_struct<T: ?Sized>( self, _name: &'static str, value: &T, ) -> Result<Self::Ok, Self::Error> where T: Serialize, { value.serialize(self) } fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> { Ok(self) } fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> { Ok(self) } fn serialize_tuple_struct( self, _name: &'static str, _len: usize, ) -> Result<Self::SerializeTupleStruct, Self::Error> { Ok(self) } fn serialize_struct( self, _name: &'static str, _len: usize, ) -> Result<Self::SerializeStruct, Self::Error> { Ok(self) } return_error! { <type_not_supported> bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str bytes none unit unit_struct unit_variant newtype_variant tuple_variant map struct_variant } } impl SerializeSeq for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } impl SerializeTuple for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } impl SerializeTupleStruct for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } impl SerializeStruct for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_field<T: ?Sized>( &mut self, _key: &'static str, value: &T, ) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } #[cfg(test)] mod tests { use crate::chopper::types::{FieldType, Header}; use crate::serde::ser::from_seq::header::to_header; #[test] fn test() { let row = ( false, 5u8, vec![b'a'], 'a', 6.6f64, 7.7f32, 8i32, 123u64, 9i64, 10i16, "a".to_string(), ); let header = to_header(&row, 7).unwrap(); assert_eq!(header.field_names().len(), 10); assert_eq!(header.field_types().len(), 10); assert_eq!( header.field_names(), &Header::generate_default_field_names(10) ); assert_eq!( header.field_types(), &vec![ FieldType::Boolean, FieldType::Byte, FieldType::ByteBuf, FieldType::Char, FieldType::Double, FieldType::Float, FieldType::Int, FieldType::Long, FieldType::Short, FieldType::String, ] ); } }
use serde::ser::{Impossible, SerializeSeq, SerializeStruct, SerializeTuple, SerializeTupleStruct}; use serde::{Serialize, Serializer}; use crate::chopper::types::{FieldType, Header}; use crate::serde::ser::error::SerError; use crate::serde::ser::field_type::to_field_type; pub fn to_header<T>(value: &T, timestamp_field_
_index)) } pub struct HeaderSerializer { timestamp_field_index: usize, field_types: Vec<FieldType>, } impl HeaderSerializer { pub fn new(timestamp_field_index: usize) -> HeaderSerializer { HeaderSerializer { timestamp_field_index, field_types: Vec::new(), } } fn into_header(self) -> Result<Header, SerError> { if self.field_types.is_empty() { return Err(SerError::NoTimestampField); } let mut field_types = self.field_types; let field_names = Header::generate_default_field_names(field_types.len() - 1); let idx = self.timestamp_field_index; if field_types[idx] != FieldType::Long { return Err(SerError::InvalidTimestampFieldType); } field_types.remove(idx); Ok(Header::new(field_names, field_types)) } } impl Serializer for HeaderSerializer { type Ok = Header; type Error = SerError; type SerializeSeq = Self; type SerializeTuple = Self; type SerializeTupleStruct = Self; type SerializeTupleVariant = Impossible<Self::Ok, Self::Error>; type SerializeMap = Impossible<Self::Ok, Self::Error>; type SerializeStruct = Self; type SerializeStructVariant = Impossible<Self::Ok, Self::Error>; fn serialize_some<T: ?Sized>(self, value: &T) -> Result<Self::Ok, Self::Error> where T: Serialize, { value.serialize(self) } fn serialize_newtype_struct<T: ?Sized>( self, _name: &'static str, value: &T, ) -> Result<Self::Ok, Self::Error> where T: Serialize, { value.serialize(self) } fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> { Ok(self) } fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> { Ok(self) } fn serialize_tuple_struct( self, _name: &'static str, _len: usize, ) -> Result<Self::SerializeTupleStruct, Self::Error> { Ok(self) } fn serialize_struct( self, _name: &'static str, _len: usize, ) -> Result<Self::SerializeStruct, Self::Error> { Ok(self) } return_error! { <type_not_supported> bool i8 i16 i32 i64 u8 u16 u32 u64 f32 f64 char str bytes none unit unit_struct unit_variant newtype_variant tuple_variant map struct_variant } } impl SerializeSeq for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } impl SerializeTuple for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } impl SerializeTupleStruct for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } impl SerializeStruct for HeaderSerializer { type Ok = Header; type Error = SerError; fn serialize_field<T: ?Sized>( &mut self, _key: &'static str, value: &T, ) -> Result<(), Self::Error> where T: Serialize, { self.field_types.push(to_field_type(value)?); Ok(()) } fn end(self) -> Result<Self::Ok, Self::Error> { self.into_header() } } #[cfg(test)] mod tests { use crate::chopper::types::{FieldType, Header}; use crate::serde::ser::from_seq::header::to_header; #[test] fn test() { let row = ( false, 5u8, vec![b'a'], 'a', 6.6f64, 7.7f32, 8i32, 123u64, 9i64, 10i16, "a".to_string(), ); let header = to_header(&row, 7).unwrap(); assert_eq!(header.field_names().len(), 10); assert_eq!(header.field_types().len(), 10); assert_eq!( header.field_names(), &Header::generate_default_field_names(10) ); assert_eq!( header.field_types(), &vec![ FieldType::Boolean, FieldType::Byte, FieldType::ByteBuf, FieldType::Char, FieldType::Double, FieldType::Float, FieldType::Int, FieldType::Long, FieldType::Short, FieldType::String, ] ); } }
index: usize) -> Result<Header, SerError> where T: Serialize + ?Sized, { value.serialize(HeaderSerializer::new(timestamp_field
function_block-random_span
[]
Rust
tests/functional.rs
pwalski/tchannel_rs
17169ae7d689f255c26be985589867a60045d724
#[cfg(test)] #[macro_use] extern crate log; #[macro_use] extern crate serial_test; use std::sync::Arc; use bytes::Bytes; use test_case::test_case; use tchannel_rs::handler::{HandlerResult, RequestHandler}; use tchannel_rs::messages::MessageChannel; use tchannel_rs::messages::RawMessage; use tchannel_rs::Config; use tchannel_rs::{SubChannel, TChannel, TResult}; #[test_case("service", "endpoint", "header", "body"; "Basic")] #[test_case("service", "endpoint", "header", ""; "Empty body")] #[test_case("service", "endpoint", "", "body"; "Empty header")] #[test_case("service", "endpoint", "", ""; "Empty header and body")] #[test_case("service", "a", "b", "c"; "One byte frame args")] #[test_case("service", "", "", ""; "Zero byte frame args")] #[serial] #[tokio::test] async fn single_frame_msg( service: &str, endpoint: &str, header: &str, body: &str, ) -> Result<(), anyhow::Error> { echo_test(service, endpoint, header, body).await } #[test_case("service", "from_v(&['a' as u8; u16::MAX as usize * 10])", "header", "body"; "Long endpoint/arg1")] #[test_case("service", "endpoint", from_v(&[b'b'; u16::MAX as usize * 10]), "body"; "Long header/arg2")] #[test_case("service", "endpoint", "header", "from_v(&['c' as u8; u16::MAX as usize * 10])"; "Long body/arg3")] #[test_case("service", "from_v(&['a' as u8; u16::MAX as usize * 10])", "from_v(&['b' as u8; u16::MAX as usize * 10])", "from_v(&['c' as u8; u16::MAX as usize * 10])"; "Long all args")] #[serial] #[tokio::test] async fn multi_frame_msg( service: &str, endpoint: &str, header: &str, body: &str, ) -> Result<(), anyhow::Error> { echo_test(service, endpoint, header, body).await } async fn echo_test( service: &str, endpoint: &str, header: &str, body: &str, ) -> Result<(), anyhow::Error> { let _ = env_logger::builder().is_test(true).try_init(); let server = start_echo_server(service, endpoint) .await .expect("Failed to start server"); let req = RawMessage::new( endpoint.to_string(), header.to_string(), Bytes::from(body.to_string()), ); let res = make_request(service, req.clone()) .await .expect("Failed to make request"); server.shutdown_server().expect("Failed to shutdown server"); assert_eq!( req.endpoint(), res.endpoint(), "Endpoint fields should match" ); assert_eq!(req.header(), res.header(), "Header fields should match"); assert_eq!(req.body(), res.body(), "Body fields should match"); Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] #[serial] async fn parallel_messages() -> Result<(), anyhow::Error> { let service = "service"; let endpoint = "endpoint"; let server = start_echo_server(service, endpoint).await?; let client = TChannel::new(Config::default())?; let subchannel = client.subchannel(&service).await?; let small_msgs = (0..50) .map(|i| { RawMessage::new( endpoint.to_string(), format!("header-{}", i), Bytes::from(format!("body-{}", i)), ) }) .collect::<Vec<RawMessage>>(); let large_msgs = (0..5_u8) .map(|i| { RawMessage::new( endpoint.to_string(), from_v(&[i as u8; u16::MAX as usize * 20]).to_string(), Bytes::from(from_v(&[i + 5_u8; u16::MAX as usize * 10]).to_string()), ) }) .collect::<Vec<RawMessage>>(); let (small, large) = tokio::join!( tokio::spawn(send_msgs(subchannel.clone(), small_msgs)), tokio::spawn(send_msgs(subchannel.clone(), large_msgs)) ); assert!(small.is_ok()); assert!(large.is_ok()); server.shutdown_server()?; Ok(()) } async fn send_msgs( subchannel: Arc<SubChannel>, msgs: Vec<RawMessage>, ) -> Result<(), anyhow::Error> { for req in msgs { debug!("Sending {} bytes.", req.header().len() + req.body().len()); let res = subchannel.send(req.clone(), &LOCAL_SERVER).await?; assert_eq!(req.endpoint(), res.endpoint(), "Endpoints should match"); assert_eq!(req.header(), res.header(), "Header fields should match"); assert_eq!(req.body(), res.body(), "Body fields should match"); debug!("Sent"); } Ok(()) } async fn start_echo_server<STR: AsRef<str>>(service: STR, endpoint: STR) -> TResult<TChannel> { let server = TChannel::new(Config::default())?; let subchannel = server.subchannel(&service).await?; subchannel.register(&endpoint, EchoHandler {}).await?; server.start_server()?; Ok(server) } async fn make_request<STR: AsRef<str>>(service: STR, req: RawMessage) -> HandlerResult<RawMessage> { debug!("Outgoing arg2/header len {}", &req.header().len()); let client = TChannel::new(Config::default())?; let subchannel = client.subchannel(service).await?; subchannel.send(req, LOCAL_SERVER).await } #[derive(Debug)] struct EchoHandler {} impl RequestHandler for EchoHandler { type REQ = RawMessage; type RES = RawMessage; fn handle(&mut self, request: Self::REQ) -> HandlerResult<Self::RES> { debug!("Incoming arg2/header len {}", request.header().len()); Ok(request) } } const LOCAL_SERVER: &str = "127.0.0.1:8888"; fn from_v(v: &[u8]) -> &str { std::str::from_utf8(v).unwrap() }
#[cfg(test)] #[macro_use] extern crate log; #[macro_use] extern crate serial_test; use std::sync::Arc; use bytes::Bytes; use test_case::test_case; use tchannel_rs::handler::{HandlerResult, RequestHandler}; use tchannel_rs::messages::MessageChannel; use tchannel_rs::messages::RawMessage; use tchannel_rs::Config; use tchannel_rs::{SubChannel, TChannel, TResult}; #[test_case("service", "endpoint", "header", "body"; "Basic")] #[test_case("service", "endpoint", "header", ""; "Empty body")] #[test_case("service", "endpoint", "", "body"; "Empty header")] #[test_case("service", "endpoint", "", ""; "Empty header and body")] #[test_case("service", "a", "b", "c"; "One byte frame args")] #[test_case("service", "", "", ""; "Zero byte frame args")] #[serial] #[tokio::test] async fn single_frame_msg( service: &str, endpoint: &str, header: &str, body: &str, ) -> Result<(), anyhow::Error> { echo_test(service, endpoint, header, body).await } #[test_case("service", "from_v(&['a' as u8; u16::MAX as usize * 10])", "header", "body"; "Long endpoint/arg1")] #[test_case("service", "endpoint", from_v(&[b'b'; u16::MAX as usize * 10]), "body"; "Long header/arg2")] #[test_case("service", "endpoint", "header", "from_v(&['c' as u8; u16::MAX as usize * 10])"; "Long body/arg3")] #[test_case("service", "from_v(&['a' as u8; u16::MAX as usize * 10])", "from_v(&['b' as u8; u16::MAX as usize * 10])", "from_v(&['c' as u8; u16::MAX as usize * 10])"; "Long all args")] #[serial] #[tokio::test] async fn multi_frame_msg( service: &str, endpoint: &str, header: &str, body: &str, ) -> Result<(), anyhow::Error> { echo_test(service, endpoint, header, body).await } async fn echo_test( service: &str, endpoint: &str, header: &str, body: &str, ) -> Result<(), anyhow::Error> { let _ = env_logger::builder().is_test(true).try_init(); let server = start_echo_server(service, endpoint) .await .expect("Failed to start server"); let req = RawMessage::new( endpoint.to_string(), header.to_string(), Bytes::from(body.to_string()), ); let res = make_request(service, req.clone()) .await .expect("Failed to make request"); server.shutdown_server().expect("Failed to shutdown server"); assert_eq!( req.endpoint(), res.endpoint(), "Endpoint fields should match" ); assert_eq!(req.header(), res.header(), "Header fields should match"); assert_eq!(req.body(), res.body(), "Body fields should match"); Ok(()) } #[tokio::test(flavor = "multi_thread", worker_threads = 4)] #[serial] async fn parallel_messages() -> Result<(), anyhow::Error> { let service = "service"; let endpoint = "endpoint"; let server = start_echo_server(service, endpoint).await?; let client = TChannel::new(Config::default())?; let subchannel = client.subchannel(&service).await?; let small_msgs = (0..50) .map(|i| { RawMessage::new( endpoint.to_string(), format!("header-{}", i), Bytes::from(format!("body-{}", i)), ) }) .collect::<Vec<RawMessage>>(); let large_msgs = (0..5_u8) .map(|i| { RawMessage::new( endpoint.to_string(), from_v(&[i as u8; u16::MAX as usize * 20]).to_string(), Bytes::from(from_v(&[i + 5_u8; u16::MAX as usize * 10]).to_string()), ) }) .collect::<Vec<RawMessage>>(); let (small, large) = tokio::join!( tokio::spawn(send_msgs(subchannel.clone(), small_msgs)), tokio::spawn(send_msgs(subchannel.clone(), large_msgs)) ); assert!(small.is_ok()); assert!(large.is_ok()); server.shutdown_server()?; Ok(()) } async fn send_msgs( subchannel: Arc<SubChannel>, msgs: Vec<RawMessage>, ) -> Result<(), anyhow::Error> { for req in msgs { debug!("Sending {} bytes.", req.header().len() + req.body().len()); let res = subchannel.send(req.clone(), &LOCAL_SERVER).await?; assert_eq!(req.endpoint(), res.endpoint(), "Endpoints should match"); assert_eq!(req.header(), res.header(), "Header fields should match"); assert_eq!(req.body(), res.body(), "Body fields should match"); debug!("Sent"); } Ok(()) }
async fn make_request<STR: AsRef<str>>(service: STR, req: RawMessage) -> HandlerResult<RawMessage> { debug!("Outgoing arg2/header len {}", &req.header().len()); let client = TChannel::new(Config::default())?; let subchannel = client.subchannel(service).await?; subchannel.send(req, LOCAL_SERVER).await } #[derive(Debug)] struct EchoHandler {} impl RequestHandler for EchoHandler { type REQ = RawMessage; type RES = RawMessage; fn handle(&mut self, request: Self::REQ) -> HandlerResult<Self::RES> { debug!("Incoming arg2/header len {}", request.header().len()); Ok(request) } } const LOCAL_SERVER: &str = "127.0.0.1:8888"; fn from_v(v: &[u8]) -> &str { std::str::from_utf8(v).unwrap() }
async fn start_echo_server<STR: AsRef<str>>(service: STR, endpoint: STR) -> TResult<TChannel> { let server = TChannel::new(Config::default())?; let subchannel = server.subchannel(&service).await?; subchannel.register(&endpoint, EchoHandler {}).await?; server.start_server()?; Ok(server) }
function_block-full_function
[ { "content": "fn encode_small_string<STR: AsRef<str>>(value: STR, dst: &mut BytesMut) -> CodecResult<()> {\n\n encode_string_field(value, dst, &BytesMut::put_u8)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 0, "score": 163112.80213647935 }, { "content": "fn encode_string_field<T: TryFrom<usize>, STR: AsRef<str>>(\n\n value: STR,\n\n dst: &mut BytesMut,\n\n encode_len_fn: &dyn Fn(&mut BytesMut, T),\n\n) -> CodecResult<()> {\n\n encode_len(dst, value.as_ref().len(), encode_len_fn)?;\n\n dst.write_str(value.as_ref())?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 1, "score": 155788.76289146388 }, { "content": "fn decode_bitflag<T, F: Fn(u8) -> Option<T>>(byte: u8, decoder: F) -> CodecResult<T> {\n\n decoder(byte).ok_or_else(|| CodecError::Error(format!(\"Unknown flag: {}\", byte)))\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 2, "score": 155607.11789826708 }, { "content": "fn encode_string<STR: AsRef<str>>(value: STR, dst: &mut BytesMut) -> CodecResult<()> {\n\n encode_string_field(value, dst, &BytesMut::put_u16)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 4, "score": 147036.6361164431 }, { "content": "fn bytes_to_string(arg: Option<Bytes>) -> Result<String, FromUtf8Error> {\n\n arg.map_or_else(\n\n || Ok(String::new()),\n\n |b| String::from_utf8(Vec::from(b.chunk())),\n\n )\n\n}\n\n\n", "file_path": "src/messages/json.rs", "rank": 5, "score": 143952.72000982 }, { "content": "fn bytes_to_string(arg: Option<Bytes>) -> Result<String, FromUtf8Error> {\n\n arg.map_or_else(\n\n || Ok(String::new()),\n\n |b| String::from_utf8(Vec::from(b.chunk())),\n\n )\n\n}\n\n\n\nimpl MessageChannel<RawMessage, RawMessage> for SubChannel {\n\n fn send<'a, ADDR: ToSocketAddrs + Send + 'a>(\n\n &'a self,\n\n request: RawMessage,\n\n host: ADDR,\n\n ) -> Pin<Box<dyn Future<Output = HandlerResult<RawMessage>> + Send + '_>> {\n\n Box::pin(self.send(request, host))\n\n }\n\n}\n", "file_path": "src/messages/raw.rs", "rank": 6, "score": 143952.72000982 }, { "content": "fn encode_header_fields<T: TryFrom<usize>>(\n\n headers: HashMap<String, String>,\n\n dst: &mut BytesMut,\n\n encode_len_fn: &dyn Fn(&mut BytesMut, T),\n\n encode_string: &dyn Fn(String, &mut BytesMut) -> CodecResult<()>,\n\n) -> CodecResult<()> {\n\n encode_len(dst, headers.len(), encode_len_fn)?;\n\n for (header_key, header_value) in headers {\n\n encode_string(header_key, dst)?;\n\n encode_string(header_value, dst)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 7, "score": 137691.7552055301 }, { "content": "fn decode_headers_field<T: TryInto<usize>>(\n\n src: &mut Bytes,\n\n decode_len_fn: &dyn Fn(&mut Bytes) -> T,\n\n decode_string_fn: &dyn Fn(&mut Bytes) -> CodecResult<String>,\n\n) -> CodecResult<HashMap<String, String>> {\n\n let len = decode_len(src, decode_len_fn)?;\n\n let mut headers = HashMap::new();\n\n for _ in 0..len {\n\n let key = decode_string_fn(src)?;\n\n let val = decode_string_fn(src)?;\n\n headers.insert(key, val);\n\n }\n\n Ok(headers)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 8, "score": 137691.7552055301 }, { "content": "fn convert<MSG: Message>(msg_res: HandlerResult<MSG>) -> MessageArgsResponse {\n\n match msg_res {\n\n Ok(message) => Ok((ResponseCode::Ok, message.try_into()?)),\n\n Err(err) => match err {\n\n HandlerError::MessageError(message) => Ok((ResponseCode::Ok, message.try_into()?)),\n\n HandlerError::GeneralError(message) => Err(TChannelError::Error(message)),\n\n HandlerError::InternalError(err) => Err(err),\n\n },\n\n }\n\n}\n\n\n\nimpl<REQ: Message, RES: Message, HANDLER: RequestHandler<REQ = REQ, RES = RES>> MessageArgsHandler\n\n for RequestHandlerAdapter<REQ, RES, HANDLER>\n\n{\n\n fn handle(\n\n &mut self,\n\n request_args: MessageArgs,\n\n ) -> Pin<Box<dyn Future<Output = MessageArgsResponse> + Send + '_>> {\n\n Box::pin(future::ready(self.handle(request_args)))\n\n }\n\n}\n", "file_path": "src/handler.rs", "rank": 9, "score": 137187.86097810662 }, { "content": "fn bytes_to_json(arg: Option<Bytes>) -> Result<Map<String, Value>, CodecError> {\n\n let arg = arg.unwrap_or_default();\n\n Ok(serde_json::from_slice(arg.as_ref())?)\n\n}\n\n\n\nimpl Message for JsonMessage {}\n\n\n\nimpl MessageWithArgs for JsonMessage {\n\n fn args_scheme() -> ArgSchemeValue {\n\n ArgSchemeValue::Json\n\n }\n\n}\n\n\n\nimpl MessageChannel<JsonMessage, JsonMessage> for SubChannel {\n\n fn send<'a, ADDR: ToSocketAddrs + Send + 'a>(\n\n &'a self,\n\n request: JsonMessage,\n\n host: ADDR,\n\n ) -> Pin<Box<dyn Future<Output = HandlerResult<JsonMessage>> + Send + '_>> {\n\n Box::pin(self.send(request, host))\n\n }\n\n}\n", "file_path": "src/messages/json.rs", "rank": 10, "score": 133284.38909969866 }, { "content": "fn encode_small_headers(headers: HashMap<String, String>, dst: &mut BytesMut) -> CodecResult<()> {\n\n encode_header_fields::<u8>(headers, dst, &BytesMut::put_u8, &encode_small_string)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 11, "score": 132800.75125047727 }, { "content": "fn msg(msg: String) -> RawMessage {\n\n RawMessage::new(\"pong\".into(), \"Polo\".into(), msg.into())\n\n}\n", "file_path": "examples/server.rs", "rank": 12, "score": 132637.51927279442 }, { "content": "fn decode_small_headers(src: &mut Bytes) -> CodecResult<HashMap<String, String>> {\n\n decode_headers_field(src, &Bytes::get_u8, &decode_small_string)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 13, "score": 131258.45279556318 }, { "content": "fn decode_arg(src: &mut Bytes) -> CodecResult<Option<Bytes>> {\n\n match src.remaining() {\n\n 0 | 1 => Err(CodecError::Error(\"Cannot read arg length\".to_owned())),\n\n remaining => match src.get_u16() {\n\n 0 => Ok(None),\n\n len if len > (remaining as u16 - 2) => {\n\n Err(CodecError::Error(format!(\"Wrong arg length: {}\", len)))\n\n }\n\n len => Ok(Some(src.split_to(len as usize))),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 14, "score": 130985.55630741332 }, { "content": "fn encode_args(args: VecDeque<Option<Bytes>>, dst: &mut BytesMut) -> CodecResult<()> {\n\n let args_len = args.len();\n\n if args_len == 0 || args_len > MAX_FRAME_ARGS {\n\n return Err(CodecError::Error(format!(\n\n \"Wrong number of frame args {}\",\n\n args_len\n\n )));\n\n }\n\n for arg in args {\n\n match arg {\n\n None => dst.put_u16(0),\n\n Some(arg) => {\n\n dst.put_u16(arg.len() as u16);\n\n dst.put_slice(arg.as_ref()); // take len bytes from above\n\n }\n\n }\n\n }\n\n if dst.len() > (FRAME_MAX_LENGTH - FRAME_HEADER_LENGTH) as usize {\n\n return Err(CodecError::Error(format!(\"Frame too long: {}\", dst.len())));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 15, "score": 125636.12509919582 }, { "content": "fn decode_args(src: &mut Bytes) -> CodecResult<VecDeque<Option<Bytes>>> {\n\n if src.remaining() == 0 {\n\n return Err(CodecError::Error(\"Frame missing args\".to_owned()));\n\n }\n\n let mut args = VecDeque::new();\n\n while !src.is_empty() && args.len() < MAX_FRAME_ARGS {\n\n args.push_back(decode_arg(src)?);\n\n }\n\n if !src.is_empty() {\n\n return Err(CodecError::Error(\"Incorrect frame length\".to_owned()));\n\n }\n\n Ok(args)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 16, "score": 123362.68199143102 }, { "content": "fn decode_small_string(src: &mut Bytes) -> CodecResult<String> {\n\n decode_string_field(src, &Bytes::get_u8)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 17, "score": 122995.49385912032 }, { "content": "fn get_args_scheme<FIELDS: CallFields>(fields: &FIELDS) -> CodecResult<ArgSchemeValue> {\n\n let headers = fields.headers();\n\n if let Some(scheme) = headers.get(TransportHeaderKey::ArgScheme.to_string().as_str()) {\n\n Ok(ArgSchemeValue::from_str(scheme)?)\n\n } else {\n\n Err(CodecError::Error(\"Missing arg schema arg\".to_owned()))\n\n }\n\n}\n\n\n\nasync fn read_continuation(\n\n frame_input: &mut FrameInput,\n\n args_defragmenter: &mut ArgsDefragmenter,\n\n) -> TResult<()> {\n\n debug!(\"Reading continuation\");\n\n while let Some(frame_id) = frame_input.recv().await {\n\n let mut frame = frame_id.frame;\n\n match frame.frame_type() {\n\n Type::CallResponseContinue | Type::CallRequestContinue => {\n\n debug!(\"Reading frame with type {:?}\", frame.frame_type());\n\n let mut continuation = CallContinue::decode(frame.payload_mut())?;\n", "file_path": "src/defragmentation.rs", "rank": 18, "score": 120239.32174856245 }, { "content": "fn decode_string_field<T: Into<usize>>(\n\n src: &mut Bytes,\n\n get_len: &dyn Fn(&mut Bytes) -> T,\n\n) -> CodecResult<String> {\n\n let len = get_len(src);\n\n let bytes = src.copy_to_bytes(len.into());\n\n Ok(String::from_utf8(bytes.chunk().to_vec())?)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 19, "score": 117082.9139623869 }, { "content": "fn encode_headers(headers: HashMap<String, String>, dst: &mut BytesMut) -> CodecResult<()> {\n\n encode_header_fields::<u16>(headers, dst, &BytesMut::put_u16, &encode_string)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 20, "score": 116756.22553887706 }, { "content": "fn decode_headers(src: &mut Bytes) -> CodecResult<HashMap<String, String>> {\n\n decode_headers_field(src, &Bytes::get_u16, &decode_string)\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 21, "score": 114362.42575908896 }, { "content": "fn decode_string(src: &mut Bytes) -> CodecResult<String> {\n\n decode_string_field(src, &Bytes::get_u16)\n\n}\n", "file_path": "src/frames/payloads.rs", "rank": 22, "score": 103987.81311621578 }, { "content": "fn verify_args(call_args: &mut CallArgs) -> CodecResult<&mut VecDeque<Option<Bytes>>> {\n\n match call_args.checksum_type() {\n\n ChecksumType::None => Ok(call_args.args_mut()),\n\n _ => todo!(),\n\n }\n\n}\n\n\n", "file_path": "src/defragmentation.rs", "rank": 23, "score": 103566.63584366857 }, { "content": "fn calculate_payload_limit(fields_len: usize) -> usize {\n\n //64KiB max frame size - header size -1 (flag) - serialized fields size\n\n FRAME_MAX_LENGTH as usize - FRAME_HEADER_LENGTH as usize - 1 - fields_len\n\n}\n\n\n", "file_path": "src/fragmentation.rs", "rank": 24, "score": 99447.64148502541 }, { "content": "fn decode_checksum(src: &mut Bytes) -> CodecResult<(ChecksumType, Option<u32>)> {\n\n let checksum_type = decode_bitflag(src.get_u8(), ChecksumType::from_u8)?;\n\n match checksum_type {\n\n ChecksumType::None => Ok((ChecksumType::None, None)),\n\n checksum => Ok((checksum, Some(src.get_u32()))),\n\n }\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 25, "score": 93869.88041896264 }, { "content": "fn encode_len<T: TryFrom<usize>>(\n\n dst: &mut BytesMut,\n\n value: usize,\n\n encode_len_fn: &dyn Fn(&mut BytesMut, T),\n\n) -> CodecResult<()> {\n\n encode_len_fn(\n\n dst,\n\n value\n\n .try_into()\n\n .map_err(|_| CodecError::Error(format!(\"Failed to cast '{}' len.\", value)))?, //TODO impl From for CodecError\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 26, "score": 91771.05942600738 }, { "content": "fn decode_len<T: TryInto<usize>>(\n\n src: &mut Bytes,\n\n decode_len_fn: &dyn Fn(&mut Bytes) -> T,\n\n) -> CodecResult<usize> {\n\n decode_len_fn(src)\n\n .try_into()\n\n .map_err(|_| CodecError::Error(\"Failed to cast len to usize.\".to_string()))\n\n}\n", "file_path": "src/frames/payloads.rs", "rank": 27, "score": 91771.05942600738 }, { "content": "fn encode_checksum(\n\n checksum_type: ChecksumType,\n\n value: Option<u32>,\n\n dst: &mut BytesMut,\n\n) -> CodecResult<()> {\n\n dst.put_u8(checksum_type as u8);\n\n if checksum_type != ChecksumType::None {\n\n dst.put_u32(value.ok_or_else(|| CodecError::Error(\"Missing checksum value.\".to_owned()))?)\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 28, "score": 77882.21608853695 }, { "content": " protected static TChannel createClient() throws Exception {\n\n TChannel tchannel = new TChannel.Builder(\"client\")\n\n .build();\n\n tchannel.makeSubChannel(\"server\");\n\n return tchannel;\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 29, "score": 77873.491417044 }, { "content": "fn first_addr<ADDR: ToSocketAddrs>(addr: ADDR) -> ConnectionResult<SocketAddr> {\n\n let mut addrs = addr.to_socket_addrs()?;\n\n if let Some(addr) = addrs.next() {\n\n return Ok(addr);\n\n }\n\n Err(ConnectionError::Error(\n\n \"Unable to get host addr\".to_string(),\n\n ))\n\n}\n\n\n\nasync fn send_frames(frames: TFrameStream, frames_out: &FrameOutput) -> ConnectionResult<()> {\n\n debug!(\"Sending frames\");\n\n frames\n\n .then(|frame| frames_out.send(frame))\n\n .inspect_err(|err| error!(\"Failed to send frame {:?}\", err))\n\n .try_for_each(|_res| future::ready(Ok(())))\n\n .await\n\n}\n", "file_path": "src/subchannel.rs", "rank": 30, "score": 77358.73997054958 }, { "content": "/// Trait for handling requests with usage of `async`.\n\n///\n\n/// Handler can be registered under an `endpoint` name by calling [`crate::SubChannel::register_async`] function.\n\npub trait RequestHandlerAsync: Debug + Sync + Send {\n\n type REQ: Message;\n\n type RES: Message;\n\n fn handle(\n\n &mut self,\n\n request: Self::REQ,\n\n ) -> Pin<Box<dyn Future<Output = HandlerResult<Self::RES>> + Send + '_>>;\n\n}\n\n\n", "file_path": "src/handler.rs", "rank": 31, "score": 73489.5395327725 }, { "content": "fn calculate_checksum(_args: &VecDeque<Option<Bytes>>, csum_type: ChecksumType) -> Option<u32> {\n\n match csum_type {\n\n ChecksumType::None => None,\n\n other => todo!(\"Unsupported checksum type {:?}\", other),\n\n }\n\n}\n\n\n\n// Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::frames::payloads::CallContinue;\n\n use crate::frames::payloads::CallRequest;\n\n use crate::frames::TFrame;\n\n use futures::StreamExt;\n\n use tokio_test::*;\n\n\n\n const SERVICE_NAME: &str = \"test_service\";\n\n const ARG_SCHEME: ArgSchemeValue = ArgSchemeValue::Json;\n", "file_path": "src/fragmentation.rs", "rank": 32, "score": 71911.85374236968 }, { "content": "package tchannel.rs.samples.server;\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 33, "score": 65907.46808197598 }, { "content": "fn checksum_len(checksum_type: ChecksumType) -> usize {\n\n match checksum_type {\n\n ChecksumType::None => 1, // checksum_type\n\n _ => 5, // checksum_type + checksum\n\n }\n\n}\n\n\n", "file_path": "src/fragmentation.rs", "rank": 34, "score": 65906.57483966749 }, { "content": " protected static TChannel createServer() throws Exception {\n\n TChannel tchannel = new TChannel.Builder(\"server\")\n\n .setServerHost(InetAddress.getByAddress(new byte[] { 0, 0, 0, 0 }))\n\n .setServerPort(8888)\n\n .build();\n\n tchannel.makeSubChannel(\"server\")\n\n .register(\"pong\", new ExampleRawHandler());\n\n tchannel.listen().channel().closeFuture().sync();\n\n return tchannel;\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 35, "score": 63102.94212749452 }, { "content": " private SyncServer() {\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 36, "score": 62578.49928384344 }, { "content": " public static void main(String[] args) throws Exception {\n\n TChannel server = createServer();\n\n final long start = System.currentTimeMillis();\n\n System.out.println(String.format(\"%nTime cost: %dms\", System.currentTimeMillis() - start));\n\n server.shutdown(false);\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 37, "score": 61538.64457416082 }, { "content": "use std::fmt::Debug;\n\nuse strum_macros::Display;\n\nuse strum_macros::EnumString;\n\n\n\n#[derive(Debug, Display, PartialEq, Eq, Hash)]\n\npub enum TransportHeaderKey {\n\n #[strum(serialize = \"as\")]\n\n ArgScheme,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"cas\")]\n\n ClaimAtStart,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"caf\")]\n\n ClaimAtFinish,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"cn\")]\n\n CallerName,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"re\")]\n\n RetryFlags,\n", "file_path": "src/frames/headers.rs", "rank": 38, "score": 61150.75912522147 }, { "content": " RetryOnConnectionError,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"t\")]\n\n RetryOnTimeout,\n\n}\n\n\n\n#[derive(Debug, Display, EnumString, PartialEq, Eq)]\n\npub enum InitHeaderKey {\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"host_port\")]\n\n HostPort,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"process_name\")]\n\n ProcessName,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"tchannel_language\")]\n\n TChannelLanguage,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"tchannel_language_version\")]\n\n TChannelLanguageVersion,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"tchannel_version\")]\n\n TChannelVersion,\n\n}\n", "file_path": "src/frames/headers.rs", "rank": 39, "score": 61143.76985782571 }, { "content": " #[allow(dead_code)]\n\n #[strum(serialize = \"se\")]\n\n SpeculativeExecution,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"fd\")]\n\n FailureDomain,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"sk\")]\n\n ShardKey,\n\n}\n\n\n\n#[derive(Debug, Display, EnumString, PartialEq, Eq)]\n\npub enum ArgSchemeValue {\n\n #[strum(serialize = \"raw\")]\n\n Raw,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"json\")]\n\n Json,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"http\")]\n", "file_path": "src/frames/headers.rs", "rank": 40, "score": 61141.31856359801 }, { "content": " Http,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"thrift\")]\n\n Thrift,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"sthrift\")]\n\n StreamingThrift,\n\n //TODO how to handle it?\n\n #[allow(dead_code)]\n\n #[strum(disabled)]\n\n Custom(String),\n\n}\n\n\n\n#[derive(Debug, Display, PartialEq, Eq)]\n\npub enum RetryFlagValue {\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"n\")]\n\n NoRetry,\n\n #[allow(dead_code)]\n\n #[strum(serialize = \"c\")]\n", "file_path": "src/frames/headers.rs", "rank": 41, "score": 61139.22065421439 }, { "content": " static class ExampleRawHandler extends RawRequestHandler {\n\n private int count = 0;\n\n\n\n @Override\n\n public RawResponse handleImpl(RawRequest request) {\n\n System.out.println(String.format(\"Request received: header: %s, body: %s\",\n\n request.getHeader(),\n\n request.getBody()));\n\n count++;\n\n switch (count) {\n\n case 1:\n\n return createResponse(request, ResponseCode.OK, \"Polo\", \"Pong!\");\n\n case 2:\n\n return createResponse(request, ResponseCode.Error, \"Polo\", \"I feel bad ...\");\n\n default:\n\n return createResponse(request, ResponseCode.Error, \"Polo\", \"Not again!\");\n\n }\n\n }\n\n\n\n private RawResponse createResponse(RawRequest request, ResponseCode code, String header, String body) {\n\n return new RawResponse.Builder(request)\n\n .setTransportHeaders(request.getTransportHeaders())\n\n .setHeader(header)\n\n .setBody(body)\n\n .build();\n\n }\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 42, "score": 59563.1135336067 }, { "content": " private int count = 0;\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 43, "score": 58623.89629084291 }, { "content": "pub trait Call<FIELDS: CallFields> {\n\n fn fields(self) -> FIELDS;\n\n fn flags(&self) -> Flags;\n\n fn args(&mut self) -> &mut CallArgs;\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 44, "score": 58420.350194168714 }, { "content": "#[derive(Debug, new)]\n\nstruct FrameHandler {\n\n subchannels: SharedSubChannels,\n\n frame_dispatchers: Arc<FramesDispatcher>,\n\n sender: Sender<TFrameId>,\n\n}\n\n\n\nimpl FrameHandler {\n\n pub async fn handle(&self, id: u32, frame_input: FrameInput) -> TResult<()> {\n\n debug!(\"Handling message (id {})\", &id);\n\n self.handle_input(id, frame_input)\n\n .and_then(|frames| self.send_frames(frames))\n\n .await\n\n }\n\n\n\n async fn handle_input(&self, id: u32, frame_input: FrameInput) -> TResult<Vec<TFrameId>> {\n\n let (request_fields, message_args) =\n\n RequestDefragmenter::new(frame_input).read_request().await?;\n\n self.frame_dispatchers.deregister(&id).await;\n\n let subchannel = self.get_subchannel(request_fields.service()).await?;\n\n let (response_code, response_args) = subchannel.handle(message_args).await?;\n", "file_path": "src/server.rs", "rank": 45, "score": 57837.799376591836 }, { "content": " private RawResponse createResponse(RawRequest request, ResponseCode code, String header, String body) {\n\n return new RawResponse.Builder(request)\n\n .setTransportHeaders(request.getTransportHeaders())\n\n .setHeader(header)\n\n .setBody(body)\n\n .build();\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 46, "score": 57714.88702097696 }, { "content": " @Override\n\n public RawResponse handleImpl(RawRequest request) {\n\n System.out.println(String.format(\"Request received: header: %s, body: %s\",\n\n request.getHeader(),\n\n request.getBody()));\n\n count++;\n\n switch (count) {\n\n case 1:\n\n return createResponse(request, ResponseCode.OK, \"Polo\", \"Pong!\");\n\n case 2:\n\n return createResponse(request, ResponseCode.Error, \"Polo\", \"I feel bad ...\");\n\n default:\n\n return createResponse(request, ResponseCode.Error, \"Polo\", \"Not again!\");\n\n }\n", "file_path": "examples-jvm-server/src/main/java/tchannel/rs/samples/server/SyncServer.java", "rank": 47, "score": 57714.88702097696 }, { "content": "pub trait CallFields {\n\n fn headers(&self) -> &HashMap<String, String>;\n\n fn tracing(&self) -> &Tracing;\n\n}\n\n\n\n#[derive(Debug, Getters, new)]\n\npub struct CallRequestFields {\n\n #[get = \"pub\"]\n\n /// ttl:4\n\n ttl: u32,\n\n #[get = \"pub\"]\n\n /// tracing:25\n\n tracing: Tracing,\n\n #[get = \"pub\"]\n\n /// service~1\n\n service: String,\n\n #[get = \"pub\"]\n\n /// nh:1 (hk~1, hv~1){nh}\n\n headers: HashMap<String, String>,\n\n}\n", "file_path": "src/frames/payloads.rs", "rank": 48, "score": 53457.755960004986 }, { "content": "type TFramedWrite = FramedWrite<OwnedWriteHalf, TFrameIdCodec>;\n\n\n\nimpl Server {\n\n pub fn new(subchannels: SharedSubChannels, buffer_size: usize, server_tasks: usize) -> Server {\n\n Server {\n\n subchannels,\n\n frame_dispatchers: Arc::new(FramesDispatcher::new(buffer_size)),\n\n buffer_size,\n\n server_tasks,\n\n }\n\n }\n\n\n\n pub async fn run(config: Arc<Config>, subchannels: SharedSubChannels) -> ConnectionResult<()> {\n\n debug!(\"Starting server on {}\", config.server_address);\n\n let listener = TcpListener::bind(config.server_address).await?;\n\n loop {\n\n let (stream, addr) = listener.accept().await?;\n\n debug!(\"Handling incoming connection from {}\", addr);\n\n let subchannels = subchannels.clone();\n\n let mut server =\n", "file_path": "src/server.rs", "rank": 49, "score": 53268.432660201506 }, { "content": "type TFramedRead = FramedRead<OwnedReadHalf, TFrameIdCodec>;\n", "file_path": "src/server.rs", "rank": 50, "score": 53268.432660201506 }, { "content": "/// Trait for handling requests.\n\n///\n\n/// Handler can be registered under an `endpoint` name by calling [`crate::SubChannel::register`] function.\n\npub trait RequestHandler: Debug + Sync + Send {\n\n type REQ: Message;\n\n type RES: Message;\n\n fn handle(&mut self, request: Self::REQ) -> HandlerResult<Self::RES>;\n\n}\n\n\n\npub(crate) trait MessageArgsHandler: Debug + Send + Sync {\n\n fn handle(\n\n &mut self,\n\n request: MessageArgs,\n\n ) -> Pin<Box<dyn Future<Output = MessageArgsResponse> + Send + '_>>;\n\n}\n\n\n\n#[derive(Debug, new)]\n\npub(crate) struct RequestHandlerAsyncAdapter<\n\n REQ: Message,\n\n RES: Message,\n\n HANDLER: RequestHandlerAsync<REQ = REQ, RES = RES>,\n\n>(HANDLER);\n\n\n", "file_path": "src/handler.rs", "rank": 51, "score": 50873.13207505176 }, { "content": "fn create_tracing() -> Tracing {\n\n Tracing::new(0, 0, 0, TraceFlags::NONE)\n\n}\n\n\n", "file_path": "src/fragmentation.rs", "rank": 52, "score": 48772.98230632218 }, { "content": "pub trait Message: MessageWithArgs + Debug + Send {}\n\n\n", "file_path": "src/messages/mod.rs", "rank": 53, "score": 48665.189299971615 }, { "content": "// Mutex to be Sync, Cell to get owned type on Drop impl TChannel, Option for lazy initialization.\n\ntype ServerHandle = Mutex<Option<JoinHandle<ConnectionResult<()>>>>;\n\n\n\n/// TChannel general result.\n\npub type TResult<T> = Result<T, TChannelError>;\n\n\n\n/// TChannel protocol. Keeps started server handle and created [`SubChannel`s](crate::SubChannel).\n\npub struct TChannel {\n\n config: Arc<Config>,\n\n connection_pools: Arc<ConnectionPools>,\n\n subchannels: SharedSubChannels,\n\n server_handle: ServerHandle,\n\n}\n\n\n\nimpl TChannel {\n\n /// Initializes TChannel.\n\n pub fn new(config: Config) -> TResult<Self> {\n\n let config = Arc::new(config);\n\n let connection_pools = ConnectionPools::new(config.clone());\n\n let subchannels = Arc::new(RwLock::new(HashMap::new()));\n\n Ok(TChannel {\n", "file_path": "src/channel.rs", "rank": 54, "score": 46281.46941861453 }, { "content": "type HandlerRef = Arc<Mutex<Box<dyn MessageArgsHandler>>>;\n\n\n\n/// TChannel protocol subchannel.\n\n///\n\n/// Allows to send [`Message`](crate::messages::Message) and [`register`](Self::register)/[`unregister`](Self::unregister) [`RequestHandler`](crate::handler::RequestHandler) (or [`RequestHandlerAsync`](crate::handler::RequestHandlerAsync)).\n\n#[derive(Debug, new)]\n\npub struct SubChannel {\n\n service_name: String,\n\n connection_pools: Arc<ConnectionPools>,\n\n #[new(default)]\n\n handlers: RwLock<HashMap<String, HandlerRef>>,\n\n}\n\n\n\nimpl SubChannel {\n\n pub(super) async fn send<REQ: Message, RES: Message, ADDR: ToSocketAddrs>(\n\n &self,\n\n request: REQ,\n\n host: ADDR,\n\n ) -> HandlerResult<RES> {\n\n let (frames_in, frames_out) = self.create_frame_io(host).await?;\n", "file_path": "src/subchannel.rs", "rank": 55, "score": 45142.65305795227 }, { "content": "pub trait MessageChannel<REQ: Message, RES: Message> {\n\n /// Sends `message` to `host` address.\n\n ///\n\n /// Error message response arrives as [`super::errors::HandlerError::MessageError`].\n\n /// # Arguments\n\n /// * `request` - Implementation of `Message` trait.\n\n /// * `host` - Address used to connect to host or find previously pooled connection.\n\n fn send<'a, ADDR: ToSocketAddrs + Send + 'a>(\n\n &'a self,\n\n request: REQ,\n\n host: ADDR,\n\n ) -> Pin<Box<dyn Future<Output = HandlerResult<RES>> + Send + '_>>;\n\n}\n\n\n\npub(crate) mod args {\n\n use super::*;\n\n\n", "file_path": "src/messages/mod.rs", "rank": 56, "score": 41710.928722354176 }, { "content": "", "file_path": "examples-jvm-server/README.md", "rank": 57, "score": 38157.11650780853 }, { "content": "/**\n\n# Besides of `tchannel_rs` the example requires following dependencies:\n\ntokio = { version = \"^1\", features = [\"macros\"] }\n\nenv_logger = \"^0\" # to print logs\n\n*/\n\nuse tchannel_rs::handler::{HandlerResult, RequestHandler};\n\nuse tchannel_rs::messages::{MessageChannel, RawMessage};\n\nuse tchannel_rs::{Config, TChannel, TResult};\n\n\n\n#[tokio::main]\n\nasync fn main() -> TResult<()> {\n\n // To see TChannel logs\n\n env_logger::init();\n\n // Server\n\n let tserver = TChannel::new(Config::default())?;\n\n let subchannel = tserver.subchannel(\"service\").await?;\n\n subchannel.register(\"endpoint\", Handler {}).await?;\n\n tserver.start_server()?;\n\n\n\n // Client\n", "file_path": "examples/basic.rs", "rank": 58, "score": 33173.663236311375 }, { "content": " let tclient = TChannel::new(Config::default())?;\n\n let subchannel = tclient.subchannel(\"service\").await?;\n\n let request = RawMessage::new(\"endpoint\".into(), \"a\".into(), \"b\".into());\n\n let response_res = subchannel.send(request, \"127.0.0.1:8888\").await;\n\n\n\n // Server shutdown\n\n tserver.shutdown_server()?;\n\n\n\n assert!(response_res.is_ok());\n\n let response = response_res.unwrap();\n\n assert_eq!(\"a\", response.header());\n\n assert_eq!(\"y\".as_bytes(), response.body().as_ref());\n\n Ok(())\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "examples/basic.rs", "rank": 59, "score": 33173.38833130886 }, { "content": "/**\n\n# Besides of `tchannel_rs` the example requires following dependencies:\n\ntokio = { version = \"^1\", features = [\"macros\"] }\n\nlog = \"^0\"\n\nenv_logger = \"^0\" # to print logs\n\n */\n\nuse log::{error, info};\n\nuse tchannel_rs::messages::{MessageChannel, RawMessage};\n\nuse tchannel_rs::{Config, TChannel};\n\n\n", "file_path": "examples/client.rs", "rank": 60, "score": 33155.9777387478 }, { "content": " async fn create_frames<REQ: Message>(&self, request: REQ) -> TResult<TFrameStream> {\n\n let message_args = request.try_into()?;\n\n RequestFragmenter::new(self.service_name.clone(), message_args).create_frames()\n\n }\n\n\n\n pub(crate) async fn handle(&self, request: MessageArgs) -> MessageArgsResponse {\n\n let endpoint = Self::read_endpoint_name(&request)?;\n\n let handler_locked = self.get_handler(endpoint).await?;\n\n let mut handler = handler_locked.lock().await; //TODO do I really want Mutex? maybe handle(&self,..) instead of handle(&mut self,..) ?\n\n handler.handle(request).await\n\n }\n\n\n\n async fn get_handler(&self, endpoint: String) -> TResult<HandlerRef> {\n\n let handlers = self.handlers.read().await;\n\n match handlers.get(&endpoint) {\n\n Some(handler) => Ok(handler.clone()),\n\n None => Err(TChannelError::Error(format!(\n\n \"No handler with name '{}'.\",\n\n endpoint\n\n ))),\n", "file_path": "src/subchannel.rs", "rank": 61, "score": 32972.0472199674 }, { "content": "use crate::channel::TResult;\n\nuse crate::connection::pool::ConnectionPools;\n\nuse crate::connection::{ConnectionResult, FrameInput, FrameOutput};\n\nuse crate::defragmentation::ResponseDefragmenter;\n\nuse crate::errors::{CodecError, ConnectionError, HandlerError, TChannelError};\n\nuse crate::fragmentation::RequestFragmenter;\n\nuse crate::frames::TFrameStream;\n\nuse crate::handler::{\n\n HandlerResult, MessageArgsHandler, RequestHandler, RequestHandlerAdapter, RequestHandlerAsync,\n\n RequestHandlerAsyncAdapter,\n\n};\n\nuse crate::messages::args::{MessageArgs, MessageArgsResponse, ResponseCode};\n\nuse crate::messages::Message;\n\nuse futures::StreamExt;\n\nuse futures::{future, TryStreamExt};\n\nuse log::{debug, error};\n\nuse std::collections::HashMap;\n\nuse std::net::{SocketAddr, ToSocketAddrs};\n\nuse std::sync::Arc;\n\nuse tokio::sync::{Mutex, RwLock};\n\n\n", "file_path": "src/subchannel.rs", "rank": 62, "score": 32968.939005033506 }, { "content": " &self,\n\n request: REQ,\n\n frames_in: FrameInput,\n\n frames_out: &FrameOutput,\n\n ) -> TResult<(ResponseCode, RES)> {\n\n let frames = self.create_frames(request).await?;\n\n send_frames(frames, frames_out).await?;\n\n let response = ResponseDefragmenter::new(frames_in)\n\n .read_response_msg()\n\n .await;\n\n frames_out.close().await; //TODO ugly and broken\n\n response\n\n }\n\n\n\n /// Registers request handler.\n\n pub async fn register<REQ, RES, HANDLER>(\n\n &self,\n\n endpoint: impl AsRef<str>,\n\n request_handler: HANDLER,\n\n ) -> TResult<()>\n", "file_path": "src/subchannel.rs", "rank": 63, "score": 32967.80632981208 }, { "content": " let response_res = self.send_internal(request, frames_in, &frames_out).await;\n\n frames_out.close().await; //TODO still ugly\n\n match response_res {\n\n Ok((code, response)) => match code {\n\n ResponseCode::Ok => Ok(response),\n\n ResponseCode::Error => Err(HandlerError::MessageError(response)),\n\n },\n\n Err(err) => Err(HandlerError::InternalError(err)),\n\n }\n\n }\n\n\n\n async fn create_frame_io<ADDR: ToSocketAddrs>(\n\n &self,\n\n host: ADDR,\n\n ) -> TResult<(FrameInput, FrameOutput)> {\n\n let host = first_addr(host)?;\n\n self.connect(host).await\n\n }\n\n\n\n pub(super) async fn send_internal<REQ: Message, RES: Message>(\n", "file_path": "src/subchannel.rs", "rank": 64, "score": 32964.89049365411 }, { "content": " endpoint: impl AsRef<str>,\n\n request_handler: HandlerRef,\n\n ) -> TResult<()> {\n\n let mut handlers = self.handlers.write().await;\n\n if handlers.contains_key(endpoint.as_ref()) {\n\n return Err(TChannelError::Error(format!(\n\n \"Handler already registered for '{}'\",\n\n endpoint.as_ref()\n\n )));\n\n }\n\n handlers.insert(endpoint.as_ref().to_string(), request_handler);\n\n Ok(()) //TODO return &mut of nested handler?\n\n }\n\n\n\n async fn connect(&self, host: SocketAddr) -> TResult<(FrameInput, FrameOutput)> {\n\n let pool = self.connection_pools.get(host).await?;\n\n let connection = pool.get().await?;\n\n Ok(connection.new_frames_io().await?)\n\n }\n\n\n", "file_path": "src/subchannel.rs", "rank": 65, "score": 32962.660602501346 }, { "content": " }\n\n }\n\n\n\n fn read_endpoint_name(request: &MessageArgs) -> Result<String, CodecError> {\n\n match request.args.get(0) {\n\n Some(arg) => Ok(String::from_utf8(arg.to_vec())?),\n\n None => Err(CodecError::Error(\"Missing arg1/endpoint name\".to_string())),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/subchannel.rs", "rank": 66, "score": 32962.59724350685 }, { "content": " where\n\n REQ: Message + 'static,\n\n RES: Message + 'static,\n\n HANDLER: RequestHandler<REQ = REQ, RES = RES> + 'static,\n\n {\n\n let handler_adapter = RequestHandlerAdapter::new(request_handler);\n\n self.register_handler(endpoint, Arc::new(Mutex::new(Box::new(handler_adapter))))\n\n .await\n\n }\n\n\n\n /// Registers async request handler.\n\n pub async fn register_async<REQ, RES, HANDLER>(\n\n &self,\n\n endpoint: impl AsRef<str>,\n\n request_handler: HANDLER,\n\n ) -> TResult<()>\n\n where\n\n REQ: Message + 'static,\n\n RES: Message + 'static,\n\n HANDLER: RequestHandlerAsync<REQ = REQ, RES = RES> + 'static,\n", "file_path": "src/subchannel.rs", "rank": 67, "score": 32961.77455614194 }, { "content": " {\n\n let handler_adapter = RequestHandlerAsyncAdapter::new(request_handler);\n\n self.register_handler(endpoint, Arc::new(Mutex::new(Box::new(handler_adapter))))\n\n .await\n\n }\n\n\n\n /// Unregisters request handler. Found handler will be dropped.\n\n pub async fn unregister(&mut self, endpoint: impl AsRef<str>) -> TResult<()> {\n\n let mut handlers = self.handlers.write().await;\n\n match handlers.remove(endpoint.as_ref()) {\n\n Some(_) => Ok(()),\n\n None => Err(TChannelError::Error(format!(\n\n \"Handler '{}' is missing.\",\n\n endpoint.as_ref()\n\n ))),\n\n }\n\n }\n\n\n\n async fn register_handler(\n\n &self,\n", "file_path": "src/subchannel.rs", "rank": 68, "score": 32961.77379747175 }, { "content": " }\n\n }\n\n\n\n async fn get_subchannel<STR: AsRef<str>>(&self, service: STR) -> TResult<Arc<SubChannel>> {\n\n let subchannels = self.subchannels.read().await;\n\n subchannels.get(service.as_ref()).cloned().ok_or_else(|| {\n\n TChannelError::Error(format!(\"Failed to find subchannel '{}'\", service.as_ref()))\n\n })\n\n }\n\n\n\n async fn send_frames(&self, frames: Vec<TFrameId>) -> TResult<()> {\n\n for frame in frames {\n\n self.sender.send(frame).await?\n\n }\n\n Ok(())\n\n }\n\n\n\n fn to_error_frame(&self, code: ErrorCode, msg: String, id: u32) -> CodecResult<TFrameId> {\n\n //TODO add meaningful tracing\n\n let tracing = Tracing::default();\n\n let err = ErrorMsg::new(code, tracing, msg);\n\n let frame = TFrame::new(Type::Error, err.encode_bytes()?);\n\n Ok(TFrameId::new(id, frame))\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 69, "score": 32435.299960486627 }, { "content": "use crate::channel::{SharedSubChannels, TResult};\n\nuse crate::config::Config;\n\nuse crate::connection::{ConnectionResult, FrameInput, FrameSender, FramesDispatcher};\n\nuse crate::defragmentation::RequestDefragmenter;\n\nuse crate::errors::{ConnectionError, TChannelError};\n\nuse crate::fragmentation::ResponseFragmenter;\n\nuse crate::frames::headers::InitHeaderKey;\n\nuse crate::frames::payloads::{Codec, CodecResult, ErrorCode, ErrorMsg, PROTOCOL_VERSION};\n\nuse crate::frames::payloads::{Init, Tracing};\n\nuse crate::frames::{TFrame, TFrameId, TFrameIdCodec, Type};\n\nuse crate::SubChannel;\n\nuse futures::TryFutureExt;\n\nuse futures::{self, SinkExt, StreamExt, TryStreamExt};\n\nuse log::debug;\n\nuse std::collections::HashMap;\n\nuse std::fmt::Debug;\n\nuse std::sync::Arc;\n\nuse tokio::net::tcp::{OwnedReadHalf, OwnedWriteHalf};\n\nuse tokio::net::{TcpListener, TcpStream};\n\nuse tokio::sync::mpsc;\n", "file_path": "src/server.rs", "rank": 70, "score": 32429.3385629242 }, { "content": " match ResponseFragmenter::new(request_fields.service(), response_code, response_args)\n\n .create_frames()\n\n {\n\n Ok(frames) => Ok(frames.map(|f| TFrameId::new(id, f)).collect().await),\n\n Err(TChannelError::ConnectionError(err)) => Ok(vec![self.to_error_frame(\n\n ErrorCode::NetworkError,\n\n err.to_string(),\n\n id,\n\n )?]),\n\n Err(TChannelError::Error(msg)) => Ok(vec![self.to_error_frame(\n\n ErrorCode::UnexpectedError,\n\n msg,\n\n id,\n\n )?]),\n\n Err(err @ TChannelError::ConnectionPoolError(_)) => Err(err),\n\n Err(err) => Ok(vec![self.to_error_frame(\n\n ErrorCode::UnexpectedError,\n\n err.to_string(),\n\n id,\n\n )?]),\n", "file_path": "src/server.rs", "rank": 71, "score": 32427.507023240778 }, { "content": " debug!(\"Err while dispatching frame: {}\", err.to_string());\n\n TChannelError::from(ConnectionError::from((id, err, Tracing::default())))\n\n })\n\n }\n\n\n\n async fn handle_init_handshake(\n\n &self,\n\n framed_read: &mut TFramedRead,\n\n framed_write: &mut TFramedWrite,\n\n ) -> TResult<()> {\n\n match framed_read.next().await {\n\n Some(Ok(mut frame_id)) => {\n\n Self::check_init_req(&mut frame_id).await?;\n\n Self::send_init_res(framed_write, *frame_id.id()).await?;\n\n Ok(())\n\n }\n\n Some(Err(err)) => Err(TChannelError::CodecError(err)),\n\n None => Err(TChannelError::Error(\n\n \"Received no Init request.\".to_string(),\n\n )),\n", "file_path": "src/server.rs", "rank": 72, "score": 32427.059086671696 }, { "content": "use tokio::sync::mpsc::Sender;\n\nuse tokio_util::codec::{FramedRead, FramedWrite};\n\n\n\n#[derive(Debug)]\n\npub struct Server {\n\n subchannels: SharedSubChannels,\n\n frame_dispatchers: Arc<FramesDispatcher>,\n\n buffer_size: usize,\n\n server_tasks: usize,\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 73, "score": 32426.976667188694 }, { "content": " }\n\n }\n\n\n\n async fn check_init_req(frame_id: &mut TFrameId) -> ConnectionResult<()> {\n\n let frame = frame_id.frame_mut();\n\n match frame.frame_type() {\n\n Type::InitRequest => {\n\n let init = Init::decode(frame.payload_mut())?;\n\n debug!(\"Received Init response: {:?}\", init);\n\n match *init.version() {\n\n PROTOCOL_VERSION => Ok(()),\n\n other_version => Err(ConnectionError::Error(format!(\n\n \"Unsupported protocol version: {} \",\n\n other_version\n\n ))),\n\n }\n\n }\n\n Type::Error => {\n\n let error = ErrorMsg::decode(frame.payload_mut())?;\n\n Err(ConnectionError::MessageErrorId(error, *frame_id.id()))\n", "file_path": "src/server.rs", "rank": 74, "score": 32426.951387848894 }, { "content": " Server::new(subchannels, config.frame_buffer_size, config.server_tasks);\n\n tokio::spawn(async move {\n\n if let Err(err) = server.handle_connection(stream).await {\n\n error!(\"Connection error: {}\", err);\n\n }\n\n });\n\n }\n\n }\n\n\n\n async fn handle_connection(&mut self, stream: TcpStream) -> TResult<()> {\n\n let (read, write) = stream.into_split();\n\n let mut framed_read = FramedRead::new(read, TFrameIdCodec {});\n\n let mut framed_write = FramedWrite::new(write, TFrameIdCodec {});\n\n self.handle_init_handshake(&mut framed_read, &mut framed_write)\n\n .await?;\n\n\n\n let (sender, receiver) = mpsc::channel::<TFrameId>(self.buffer_size);\n\n FrameSender::spawn(framed_write, receiver, self.buffer_size); //TODO should use same value?\n\n let handler = FrameHandler::new(\n\n self.subchannels.clone(),\n", "file_path": "src/server.rs", "rank": 75, "score": 32424.719218167284 }, { "content": " }\n\n other_type => Err(ConnectionError::UnexpectedResponseError(*other_type)),\n\n }\n\n }\n\n\n\n async fn send_init_res(framed_write: &mut TFramedWrite, id: u32) -> ConnectionResult<()> {\n\n //TODO properly handle Init headers\n\n let headers = HashMap::from_iter(IntoIterator::into_iter([(\n\n InitHeaderKey::TChannelLanguage.to_string(),\n\n \"rust\".to_string(),\n\n )]));\n\n let init = Init::new(PROTOCOL_VERSION, headers);\n\n let init_frame_id =\n\n TFrameId::new(id, TFrame::new(Type::InitResponse, init.encode_bytes()?));\n\n Ok(framed_write.send(init_frame_id).await?)\n\n }\n\n}\n\n\n", "file_path": "src/server.rs", "rank": 76, "score": 32423.594943966815 }, { "content": "/**\n\n# Besides of `tchannel_rs` the example requires following dependencies:\n\ntokio = { version = \"^1\", features = [\"macros\"] }\n\nlog = \"^0\"\n\nenv_logger = \"^0\" # to print logs\n\n */\n\nuse log::{error, info};\n\nuse std::ops::AddAssign;\n\nuse std::time::Duration;\n\nuse tchannel_rs::errors::HandlerError;\n\nuse tchannel_rs::handler::{HandlerResult, RequestHandler};\n\nuse tchannel_rs::messages::RawMessage;\n\nuse tchannel_rs::{Config, TChannel};\n\n\n", "file_path": "examples/server.rs", "rank": 77, "score": 32423.57893286578 }, { "content": " self.frame_dispatchers.clone(),\n\n sender,\n\n );\n\n\n\n framed_read\n\n .map_err(TChannelError::CodecError)\n\n .try_filter_map(|frame| self.dispatch_frame(frame))\n\n .try_for_each_concurrent(self.server_tasks, |(id, frames)| handler.handle(id, frames))\n\n .await\n\n //TODO send fatal protocol error when it fails\n\n }\n\n\n\n async fn dispatch_frame(&self, frame: TFrameId) -> TResult<Option<(u32, FrameInput)>> {\n\n let id = *frame.id();\n\n self.frame_dispatchers\n\n .dispatch(frame)\n\n .await\n\n .map(|receiver_option| receiver_option.map(|receiver| (id, receiver)))\n\n //TODO Figure out tracing.\n\n .map_err(|err| {\n", "file_path": "src/server.rs", "rank": 78, "score": 32420.072088183533 }, { "content": "#[derive(Debug)]\n\nstruct Handler {}\n\nimpl RequestHandler for Handler {\n\n type REQ = RawMessage;\n\n type RES = RawMessage;\n\n fn handle(&mut self, request: Self::REQ) -> HandlerResult<Self::RES> {\n\n let req_header = request.header().clone();\n\n Ok(RawMessage::new(\"x\".into(), req_header, \"y\".into()))\n\n }\n\n}\n", "file_path": "examples/basic.rs", "rank": 79, "score": 31550.50873642061 }, { "content": "use crate::errors::CodecError;\n\nuse crate::frames::{FRAME_HEADER_LENGTH, FRAME_MAX_LENGTH};\n\nuse crate::messages::args::ResponseCode;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\nuse num_traits::FromPrimitive;\n\nuse std::collections::{HashMap, VecDeque};\n\nuse std::fmt::Write;\n\n\n\n/// Supported TChannel protocol version\n\npub const PROTOCOL_VERSION: u16 = 2;\n\n/// According to protocol frames have arg1, arg2, arg3.\n\n/// Implementation uses Vec of Bytes with length 3.\n\npub const MAX_FRAME_ARGS: usize = 3;\n\n/// Length of arg length frame field (2 bytes, u16)\n\npub const ARG_LEN_LEN: usize = 2;\n\n\n\npub type CodecResult<T> = Result<T, CodecError>;\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 80, "score": 29798.26737430548 }, { "content": "\n\nimpl Codec for CallRequestFields {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u32(self.ttl);\n\n self.tracing.encode(dst)?;\n\n encode_small_string(self.service, dst)?;\n\n encode_small_headers(self.headers, dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(CallRequestFields::new(\n\n src.get_u32(),\n\n Tracing::decode(src)?,\n\n decode_small_string(src)?,\n\n decode_small_headers(src)?,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 81, "score": 29798.234497684967 }, { "content": "pub mod headers;\n\npub mod payloads;\n\n\n\nuse crate::errors::CodecError;\n\nuse bytes::{Buf, BufMut, Bytes, BytesMut};\n\nuse futures::Stream;\n\nuse num_traits::FromPrimitive;\n\nuse std::io::Cursor;\n\nuse std::pin::Pin;\n\nuse tokio_util::codec::{Decoder, Encoder};\n\n\n\npub const FRAME_HEADER_LENGTH: u16 = 16;\n\npub const FRAME_MAX_LENGTH: u16 = u16::MAX - 1;\n\npub const ZERO: u8 = 0;\n\n\n\n#[derive(Copy, Clone, Debug, FromPrimitive, PartialEq)]\n\npub enum Type {\n\n // First message on every connection must be init\n\n InitRequest = 0x1,\n\n\n", "file_path": "src/frames/mod.rs", "rank": 82, "score": 29796.05589719463 }, { "content": "impl Encoder<TFrameId> for TFrameIdCodec {\n\n type Error = crate::errors::CodecError;\n\n\n\n fn encode(&mut self, item: TFrameId, dst: &mut BytesMut) -> Result<(), Self::Error> {\n\n let frame = item.frame();\n\n let len = frame.size() as u16 + FRAME_HEADER_LENGTH;\n\n trace!(\"Encoding TFrame (id {}, len {})\", item.id(), frame.size());\n\n dst.reserve(len as usize);\n\n dst.put_u16(len);\n\n dst.put_u8(*frame.frame_type() as u8);\n\n dst.put_u8(ZERO); // zero\n\n dst.put_u32(*item.id());\n\n for _ in 0..8 {\n\n dst.put_u8(ZERO)\n\n }\n\n dst.put_slice(frame.payload());\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/frames/mod.rs", "rank": 83, "score": 29795.51601493716 }, { "content": " #[get = \"pub\"]\n\n /// checksum type, checksum, args\n\n args: CallArgs,\n\n}\n\n\n\nimpl Codec for CallWithFieldsEncoded {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u8(self.flags.bits());\n\n dst.put(self.fields);\n\n self.args.encode(dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(CallWithFieldsEncoded::new(\n\n decode_bitflag(src.get_u8(), Flags::from_bits)?,\n\n CallRequestFields::decode(src)?.encode_bytes()?,\n\n CallArgs::decode(src)?,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 84, "score": 29795.3426123296 }, { "content": " /// checksum type, checksum, args\n\n pub args: CallArgs,\n\n}\n\n\n\nimpl Codec for CallRequest {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u8(self.flags.bits());\n\n self.fields.encode(dst)?;\n\n self.args.encode(dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(CallRequest::new(\n\n decode_bitflag(src.get_u8(), Flags::from_bits)?,\n\n CallRequestFields::decode(src)?,\n\n CallArgs::decode(src)?,\n\n ))\n\n }\n\n}\n", "file_path": "src/frames/payloads.rs", "rank": 85, "score": 29795.3246978964 }, { "content": " #[get = \"pub\"]\n\n /// tracing:25\n\n pub tracing: Tracing,\n\n #[get = \"pub\"]\n\n /// nh:1 (hk~1, hv~1){nh}\n\n pub headers: HashMap<String, String>,\n\n}\n\n\n\nimpl Codec for CallResponseFields {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u8(self.code as u8);\n\n self.tracing.encode(dst)?;\n\n encode_small_headers(self.headers, dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(CallResponseFields::new(\n\n decode_bitflag(src.get_u8(), ResponseCode::from_u8)?,\n\n Tracing::decode(src)?,\n", "file_path": "src/frames/payloads.rs", "rank": 86, "score": 29794.40821578868 }, { "content": " }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n let (checksum_type, checksum) = decode_checksum(src)?;\n\n Ok(CallArgs::new(\n\n checksum_type,\n\n checksum,\n\n decode_args(src)?, //arg3\n\n ))\n\n }\n\n}\n\n\n\n#[derive(Debug, Getters, new)]\n\npub struct CallWithFieldsEncoded {\n\n #[get = \"pub\"]\n\n /// flags:1\n\n flags: Flags,\n\n #[get = \"pub\"]\n\n /// ttl, tracing, service name, headers\n\n fields: Bytes,\n", "file_path": "src/frames/payloads.rs", "rank": 87, "score": 29794.081908728283 }, { "content": "impl Decoder for TFrameIdCodec {\n\n type Item = TFrameId;\n\n type Error = crate::errors::CodecError;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n if Self::is_buffering(src) {\n\n return Ok(None);\n\n }\n\n let size = src.get_u16();\n\n if size < FRAME_HEADER_LENGTH {\n\n return Err(CodecError::Error(\"Frame too short\".to_owned()));\n\n }\n\n let frame_type_bytes = src.get_u8();\n\n let frame_type = match FromPrimitive::from_u8(frame_type_bytes) {\n\n Some(frame_type) => frame_type,\n\n None => {\n\n return Err(CodecError::Error(format!(\n\n \"Unknown frame type {}\",\n\n frame_type_bytes\n\n )))\n", "file_path": "src/frames/mod.rs", "rank": 88, "score": 29793.64521817905 }, { "content": " #[get = \"pub\"]\n\n /// code, tracing, headers\n\n pub fields: CallResponseFields,\n\n #[get = \"pub\"]\n\n #[get_mut = \"pub\"]\n\n /// checksum type, checksum, args\n\n pub args: CallArgs,\n\n}\n\n\n\nimpl Codec for CallResponse {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u8(self.flags.bits());\n\n self.fields.encode(dst)?;\n\n self.args.encode(dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(CallResponse::new(\n\n decode_bitflag(src.get_u8(), Flags::from_bits)?,\n", "file_path": "src/frames/payloads.rs", "rank": 89, "score": 29793.284636679113 }, { "content": "\n\n#[derive(Debug, PartialEq, new)]\n\npub struct ErrorMsg {\n\n code: ErrorCode,\n\n tracing: Tracing,\n\n message: String,\n\n}\n\n\n\nimpl Codec for ErrorMsg {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u8(self.code as u8);\n\n self.tracing.encode(dst)?;\n\n encode_string(self.message, dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(ErrorMsg::new(\n\n decode_bitflag(src.get_u8(), ErrorCode::from_u8)?,\n\n Tracing::decode(src)?,\n\n decode_string(src)?,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/frames/payloads.rs", "rank": 90, "score": 29791.885967922342 }, { "content": "\n\nimpl Call<CallRequestFields> for CallRequest {\n\n fn fields(self) -> CallRequestFields {\n\n self.fields\n\n }\n\n\n\n fn flags(&self) -> Flags {\n\n self.flags\n\n }\n\n\n\n fn args(&mut self) -> &mut CallArgs {\n\n &mut self.args\n\n }\n\n}\n\n\n\n#[derive(Debug, Getters, new)]\n\npub(crate) struct CallResponseFields {\n\n #[get = \"pub\"]\n\n /// code:1\n\n pub code: ResponseCode,\n", "file_path": "src/frames/payloads.rs", "rank": 91, "score": 29790.804245519885 }, { "content": "#[derive(Debug, Getters, MutGetters, new)]\n\npub struct CallContinue {\n\n #[get = \"pub\"]\n\n /// flags:1\n\n flags: Flags,\n\n #[get = \"pub\"]\n\n #[get_mut = \"pub\"]\n\n /// common fields\n\n pub args: CallArgs,\n\n}\n\n\n\nimpl Codec for CallContinue {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u8(self.flags.bits());\n\n self.args.encode(dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(CallContinue::new(\n", "file_path": "src/frames/payloads.rs", "rank": 92, "score": 29790.751391412534 }, { "content": "impl CallFields for CallRequestFields {\n\n fn headers(&self) -> &HashMap<String, String> {\n\n &self.headers\n\n }\n\n\n\n fn tracing(&self) -> &Tracing {\n\n &self.tracing\n\n }\n\n}\n\n\n\n#[derive(Debug, Getters, MutGetters, new)]\n\npub struct CallRequest {\n\n #[get = \"pub\"]\n\n /// flags:1\n\n pub flags: Flags,\n\n #[get = \"pub\"]\n\n /// ttl, tracing, service name, headers\n\n pub fields: CallRequestFields,\n\n #[get = \"pub\"]\n\n #[get_mut = \"pub\"]\n", "file_path": "src/frames/payloads.rs", "rank": 93, "score": 29790.018492012237 }, { "content": "\n\n // Protocol level ping req (no body)\n\n PingRequest = 0xd0,\n\n\n\n // PingFrame res (no body)\n\n PingResponse = 0xd1,\n\n\n\n // Protocol level error.\n\n Error = 0xff,\n\n}\n\n\n\npub type TFrameStream = Pin<Box<dyn Stream<Item = TFrame> + Send>>;\n\n\n\n#[derive(Debug, Getters, MutGetters, new)]\n\npub struct TFrame {\n\n #[get = \"pub\"]\n\n pub frame_type: Type,\n\n #[get_mut = \"pub\"]\n\n #[get = \"pub\"]\n\n payload: Bytes,\n", "file_path": "src/frames/mod.rs", "rank": 94, "score": 29789.95514025024 }, { "content": " decode_small_headers(src)?,\n\n ))\n\n }\n\n}\n\n\n\nimpl CallFields for CallResponseFields {\n\n fn headers(&self) -> &HashMap<String, String> {\n\n &self.headers\n\n }\n\n\n\n fn tracing(&self) -> &Tracing {\n\n &self.tracing\n\n }\n\n}\n\n\n\n#[derive(Debug, Getters, MutGetters, new)]\n\npub(crate) struct CallResponse {\n\n /// flags:1\n\n #[get = \"pub\"]\n\n pub flags: Flags,\n", "file_path": "src/frames/payloads.rs", "rank": 95, "score": 29789.762262793938 }, { "content": " dst.put_u8(self.trace_flags.bits());\n\n Ok(()) // TODO Ok?\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(Tracing::new(\n\n src.get_u64(),\n\n src.get_u64(),\n\n src.get_u64(),\n\n decode_bitflag(src.get_u8(), TraceFlags::from_bits)?,\n\n ))\n\n }\n\n}\n\n\n\n#[derive(Debug, Getters, new)]\n\npub struct Init {\n\n #[get = \"pub\"]\n\n version: u16,\n\n #[get = \"pub\"]\n\n headers: HashMap<String, String>,\n", "file_path": "src/frames/payloads.rs", "rank": 96, "score": 29789.561668713744 }, { "content": " #[get = \"pub\"]\n\n /// tracing:25\n\n tracing: Tracing,\n\n}\n\n\n\nimpl Codec for Claim {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n dst.put_u32(self.ttl);\n\n self.tracing.encode(dst)?;\n\n Ok(())\n\n }\n\n\n\n fn decode(src: &mut Bytes) -> CodecResult<Self> {\n\n Ok(Claim::new(src.get_u32(), Tracing::decode(src)?))\n\n }\n\n}\n\n\n\n// pub struct PingRequest {} // no body\n\n\n\n// pub struct PingResponse {} // no body\n", "file_path": "src/frames/payloads.rs", "rank": 97, "score": 29789.256630187676 }, { "content": "//TODO convert to builder and verify args length?\n\n#[derive(Debug, Getters, MutGetters, new)]\n\npub struct CallArgs {\n\n #[get = \"pub\"]\n\n /// csumtype:1\n\n checksum_type: ChecksumType,\n\n #[get = \"pub\"]\n\n /// (csum:4){0,1}\n\n checksum: Option<u32>,\n\n #[get = \"pub\"]\n\n #[get_mut = \"pub\"]\n\n /// arg1~2 arg2~2 arg3~2\n\n pub args: VecDeque<Option<Bytes>>, //TODO consider using references\n\n}\n\n\n\nimpl Codec for CallArgs {\n\n fn encode(self, dst: &mut BytesMut) -> CodecResult<()> {\n\n encode_checksum(self.checksum_type, self.checksum, dst)?;\n\n encode_args(self.args, dst)?;\n\n Ok(())\n", "file_path": "src/frames/payloads.rs", "rank": 98, "score": 29788.76517228509 } ]
Rust
src/shell/history/entry.rs
doy/nbsh
4151ab7aab939a12721a0f4207c87b5c09ace339
use crate::shell::prelude::*; pub struct Entry { cmdline: String, env: Env, pty: super::pty::Pty, fullscreen: Option<bool>, start_instant: std::time::Instant, start_time: time::OffsetDateTime, state: State, } impl Entry { pub fn new( cmdline: String, env: Env, size: (u16, u16), event_w: crate::shell::event::Writer, ) -> Result<Self> { let start_instant = std::time::Instant::now(); let start_time = time::OffsetDateTime::now_utc(); let (pty, pts) = super::pty::Pty::new(size, event_w.clone()).unwrap(); let (child, fh) = Self::spawn_command(&cmdline, &env, &pts)?; tokio::spawn(Self::task(child, fh, env.idx(), event_w)); Ok(Self { cmdline, env, pty, fullscreen: None, start_instant, start_time, state: State::Running((0, 0)), }) } pub fn render( &self, out: &mut impl textmode::Textmode, entry_count: usize, vt: &mut super::pty::Vt, focused: bool, scrolling: bool, offset: time::UtcOffset, ) { let idx = self.env.idx(); let size = out.screen().size(); let time = self.state.exit_info().map_or_else( || { format!( "[{}]", crate::format::time(self.start_time.to_offset(offset)) ) }, |info| { format!( "({}) [{}]", crate::format::duration( info.instant - self.start_instant ), crate::format::time(self.start_time.to_offset(offset)), ) }, ); if vt.bell(focused) { out.write(b"\x07"); } Self::set_bgcolor(out, idx, focused); out.set_fgcolor(textmode::color::YELLOW); let entry_count_width = format!("{}", entry_count + 1).len(); let idx_str = format!("{}", idx + 1); out.write_str(&" ".repeat(entry_count_width - idx_str.len())); out.write_str(&idx_str); out.write_str(" "); out.reset_attributes(); Self::set_bgcolor(out, idx, focused); if let Some(info) = self.state.exit_info() { if info.status.signal().is_some() { out.set_fgcolor(textmode::color::MAGENTA); } else if info.status.success() { out.set_fgcolor(textmode::color::DARKGREY); } else { out.set_fgcolor(textmode::color::RED); } out.write_str(&crate::format::exit_status(info.status)); } else { out.write_str(" "); } out.reset_attributes(); if vt.is_bell() { out.set_bgcolor(textmode::Color::Rgb(64, 16, 16)); } else { Self::set_bgcolor(out, idx, focused); } out.write_str("$ "); Self::set_bgcolor(out, idx, focused); let start = usize::from(out.screen().cursor_position().1); let end = usize::from(size.1) - time.len() - 2; let max_len = end - start; let cmd = if self.cmd().len() > max_len { &self.cmd()[..(max_len - 4)] } else { self.cmd() }; if let State::Running(span) = self.state { let span = (span.0.min(cmd.len()), span.1.min(cmd.len())); if !cmd[..span.0].is_empty() { out.write_str(&cmd[..span.0]); } if !cmd[span.0..span.1].is_empty() { out.set_bgcolor(textmode::Color::Rgb(16, 64, 16)); out.write_str(&cmd[span.0..span.1]); Self::set_bgcolor(out, idx, focused); } if !cmd[span.1..].is_empty() { out.write_str(&cmd[span.1..]); } } else { out.write_str(cmd); } if self.cmd().len() > max_len { if let State::Running(span) = self.state { if span.0 < cmd.len() && span.1 > cmd.len() { out.set_bgcolor(textmode::Color::Rgb(16, 64, 16)); } } out.write_str(" "); if let State::Running(span) = self.state { if span.1 > cmd.len() { out.set_bgcolor(textmode::Color::Rgb(16, 64, 16)); } } out.set_fgcolor(textmode::color::BLUE); out.write_str("..."); } out.reset_attributes(); Self::set_bgcolor(out, idx, focused); let cur_pos = out.screen().cursor_position(); out.write_str(&" ".repeat( usize::from(size.1) - time.len() - 1 - usize::from(cur_pos.1), )); out.write_str(&time); out.write_str(" "); out.reset_attributes(); if vt.binary() { let msg = "This appears to be binary data. Fullscreen this entry to view anyway."; let len: u16 = msg.len().try_into().unwrap(); out.move_to( out.screen().cursor_position().0 + 1, (size.1 - len) / 2, ); out.set_fgcolor(textmode::color::RED); out.write_str(msg); out.hide_cursor(true); } else { let last_row = vt.output_lines(focused && !scrolling, self.state.running()); let mut max_lines = self.max_lines(entry_count); if last_row > max_lines { out.write(b"\r\n"); out.set_fgcolor(textmode::color::BLUE); out.write_str("..."); out.reset_attributes(); max_lines -= 1; } let mut out_row = out.screen().cursor_position().0 + 1; let screen = vt.screen(); let pos = screen.cursor_position(); let mut wrapped = false; let mut cursor_found = None; for (idx, row) in screen .rows_formatted(0, size.1) .enumerate() .take(last_row) .skip(last_row.saturating_sub(max_lines)) { let idx: u16 = idx.try_into().unwrap(); out.reset_attributes(); if !wrapped { out.move_to(out_row, 0); } out.write(&row); wrapped = screen.row_wrapped(idx); if pos.0 == idx { cursor_found = Some(out_row); } out_row += 1; } if focused && !scrolling { if let Some(row) = cursor_found { out.hide_cursor(screen.hide_cursor()); out.move_to(row, pos.1); } else { out.hide_cursor(true); } } } out.reset_attributes(); } pub fn render_fullscreen(&self, out: &mut impl textmode::Textmode) { self.pty.with_vt_mut(|vt| { out.write(&vt.screen().state_formatted()); if vt.bell(true) { out.write(b"\x07"); } out.reset_attributes(); }); } pub fn input(&self, bytes: Vec<u8>) { self.pty.input(bytes); } pub fn resize(&self, size: (u16, u16)) { self.pty.resize(size); } pub fn cmd(&self) -> &str { &self.cmdline } pub fn start_time(&self) -> time::OffsetDateTime { self.start_time } pub fn toggle_fullscreen(&mut self) { if let Some(fullscreen) = self.fullscreen { self.fullscreen = Some(!fullscreen); } else { self.fullscreen = Some(!self.pty.fullscreen()); } } pub fn set_fullscreen(&mut self, fullscreen: bool) { self.fullscreen = Some(fullscreen); } pub fn running(&self) -> bool { self.state.running() } pub fn exited(&mut self, exit_info: ExitInfo) { self.state = State::Exited(exit_info); } pub fn lines(&self, entry_count: usize, focused: bool) -> usize { let running = self.running(); 1 + std::cmp::min( self.pty.with_vt(|vt| vt.output_lines(focused, running)), self.max_lines(entry_count), ) } pub fn should_fullscreen(&self) -> bool { self.fullscreen.unwrap_or_else(|| self.pty.fullscreen()) } pub fn lock_vt(&self) -> std::sync::MutexGuard<super::pty::Vt> { self.pty.lock_vt() } pub fn set_span(&mut self, new_span: (usize, usize)) { if let State::Running(ref mut span) = self.state { *span = new_span; } } fn max_lines(&self, entry_count: usize) -> usize { if self.env.idx() == entry_count - 1 { 15 } else { 5 } } fn set_bgcolor( out: &mut impl textmode::Textmode, idx: usize, focus: bool, ) { if focus { out.set_bgcolor(textmode::Color::Rgb(0x56, 0x1b, 0x8b)); } else if idx % 2 == 0 { out.set_bgcolor(textmode::Color::Rgb(0x24, 0x21, 0x00)); } else { out.set_bgcolor(textmode::Color::Rgb(0x20, 0x20, 0x20)); } } fn spawn_command( cmdline: &str, env: &Env, pts: &pty_process::Pts, ) -> Result<(tokio::process::Child, std::fs::File)> { let mut cmd = pty_process::Command::new(crate::info::current_exe()?); cmd.args(&["-c", cmdline, "--status-fd", "3"]); env.apply(&mut cmd); let (from_r, from_w) = nix::unistd::pipe2(nix::fcntl::OFlag::O_CLOEXEC)?; let fh = unsafe { std::fs::File::from_raw_fd(from_r) }; unsafe { cmd.pre_exec(move || { nix::unistd::dup2(from_w, 3)?; Ok(()) }); } let child = cmd.spawn(pts)?; nix::unistd::close(from_w)?; Ok((child, fh)) } async fn task( mut child: tokio::process::Child, fh: std::fs::File, idx: usize, event_w: crate::shell::event::Writer, ) { enum Res { Read(crate::runner::Event), Exit(std::io::Result<std::process::ExitStatus>), } let (read_w, read_r) = tokio::sync::mpsc::unbounded_channel(); tokio::task::spawn_blocking(move || loop { let event = bincode::deserialize_from(&fh); match event { Ok(event) => { read_w.send(event).unwrap(); } Err(e) => { match &*e { bincode::ErrorKind::Io(io_e) => { assert!( io_e.kind() == std::io::ErrorKind::UnexpectedEof ); } e => { panic!("{}", e); } } break; } } }); let mut stream: futures_util::stream::SelectAll<_> = [ tokio_stream::wrappers::UnboundedReceiverStream::new(read_r) .map(Res::Read) .boxed(), futures_util::stream::once(child.wait()) .map(Res::Exit) .boxed(), ] .into_iter() .collect(); let mut exit_status = None; let mut new_env = None; while let Some(res) = stream.next().await { match res { Res::Read(event) => match event { crate::runner::Event::RunPipeline(new_span) => { event_w.send(Event::ChildRunPipeline(idx, new_span)); } crate::runner::Event::Suspend => { event_w.send(Event::ChildSuspend(idx)); } crate::runner::Event::Exit(env) => { new_env = Some(env); } }, Res::Exit(status) => { exit_status = Some(status.unwrap()); } } } event_w.send(Event::ChildExit( idx, ExitInfo::new(exit_status.unwrap()), new_env, )); } } enum State { Running((usize, usize)), Exited(ExitInfo), } impl State { fn exit_info(&self) -> Option<&ExitInfo> { match self { Self::Running(_) => None, Self::Exited(exit_info) => Some(exit_info), } } fn running(&self) -> bool { self.exit_info().is_none() } } #[derive(Debug)] pub struct ExitInfo { status: std::process::ExitStatus, instant: std::time::Instant, } impl ExitInfo { fn new(status: std::process::ExitStatus) -> Self { Self { status, instant: std::time::Instant::now(), } } }
use crate::shell::prelude::*; pub struct Entry { cmdline: String, env: Env, pty: super::pty::Pty, fullscreen: Option<bool>, start_instant: std::time::Instant, start_time: time::OffsetDateTime, state: State, } impl Entry { pub fn new( cmdline: String, env: Env, size: (u16, u16), event_w: crate::shell::event::Writer, ) -> Result<Self> { let start_instant = std::time::Instant::now(); let start_time = time::OffsetDateTime::now_utc(); let (pty, pts) = super::pty::Pty::new(size, event_w.clone()).unwrap(); let (child, fh) = Self::spawn_command(&cmdline, &env, &pts)?; tokio::spawn(Self::task(child, fh, env.idx(), event_w)); Ok(Self { cmdline, env, pty, fullscreen: None, start_instant, start_time, state: State::Running((0, 0)), }) } pub fn render( &self, out: &mut impl textmode::Textmode, entry_count: usize, vt: &mut super::pty::Vt, focused: bool, scrolling: bool, offset: time::UtcOffset, ) { let idx = self.env.idx(); let size = out.screen().size(); let time = self.state.exit_info().map_or_else( || { format!( "[{}]", crate::format::time(self.start_time.to_offset(offset)) ) }, |info| { format!( "({}) [{}]", crate::format::duration( info.instant - self.start_instant ), crate::format::time(self.start_time.to_offset(offset)), ) }, ); if vt.bell(focused) { out.write(b"\x07"); } Self::set_bgcolor(out, idx, focused); out.set_fgcolor(textmode::color::YELLOW); let entry_count_width = format!("{}", entry_count + 1).len(); let idx_str = format!("{}", idx + 1); out.write_str(&" ".repeat(entry_count_width - idx_str.len())); out.write_str(&idx_str); out.write_str(" "); out.reset_attributes(); Self::set_bgcolor(out, idx, focused); if let Some(info) = self.state.exit_info() { if info.status.signal().is_some() { out.set_fgcolor(textmode::color::MAGENTA); } else if info.status.success() { out.set_fgcolor(textmode::color::DARKGREY); } else { out.set_fgcolor(textmode::color::RED); } out.write_str(&crate::format::exit_status(info.status)); } else { out.write_str(" "); } out.reset_attributes(); if vt.is_bell() { out.set_bgcolor(textmode::Color::Rgb(64, 16, 16)); } else { Self::set_bgcolor(out, idx, focused); } out.write_str("$ "); Self::set_bgcolor(out, idx, focused); let start = usize::from(out.screen().cursor_position().1); let end = usize::from(size.1) - time.len() - 2; let max_len = end - start; let cmd = if self.cmd().len() > max_len { &self.cmd()[..(max_len - 4)] } else { self.cmd() }; if let State::Running(span) = self.state { let span = (span.0.min(cmd.len()), span.1.min(cmd.len())); if !cmd[..span.0].is_empty() { out.write_str(&cmd[..span.0]); } if !cmd[span.0..span.1].is_empty() { out.set_bgcolor(textmode::Color::Rgb(16, 64, 16)); out.write_str(&cmd[span.0..span.1]); Self::set_bgcolor(out, idx, focused); } if !cmd[span.1..].is_empty() { out.write_str(&cmd[span.1..]); } } else { out.write_str(cmd); } if self.cmd().len() > max_len { if let State::Running(span) = self.state { if span.0 < cmd.len() && span.1 > cmd.len() { out.set_bgcolor(textmode::Color::Rgb(16, 64, 16)); } } out.write_str(" "); if let State::Running(span) = self.state { if span.1 > cmd.len() { out.set_bgcolor(textmode::Color::Rgb(16, 64, 16)); } } out.set_fgcolor(textmode::color::BLUE); out.write_str("..."); } out.reset_attributes(); Self::set_bgcolor(out, idx, focused); let cur_pos = out.screen().cursor_position(); out.write_str(&" ".repeat( usize::from(size.1) - time.len() - 1 - usize::from(cur_pos.1), )); out.write_str(&time); out.write_str(" "); out.reset_attributes(); if vt.binary() { let msg = "This appears to be binary data. Fullscreen this entry to view anyway."; let len: u16 = msg.len().try_into().unwrap(); out.move_to( out.screen().cursor_position().0 + 1, (size.1 - len) / 2, ); out.set_fgcolor(textmode::color::RED); out.write_str(msg); out.hide_cursor(true); } else { let last_row = vt.output_lines(focused && !scrolling, self.state.running()); let mut max_lines = self.max_lines(entry_count); if last_row > max_lines { out.write(b"\r\n"); out.set_fgcolor(textmode::color::BLUE); out.write_str("..."); out.reset_attributes(); max_lines -= 1; } let mut out_row = out.screen().cursor_position().0 + 1; let screen = vt.screen(); let pos = screen.cursor_position(); let mut wrapped = false; let mut cursor_found = None; for (idx, row) in screen .rows_formatted(0, size.1) .enumerate() .take(last_row) .skip(last_row.saturating_sub(max_lines)) { let idx: u16 = idx.try_into().unwrap(); out.reset_attributes(); if !wrapped { out.move_to(out_row, 0); } out.write(&row); wrapped = screen.row_wrapped(idx); if pos.0 == idx { cursor_found = Some(out_row); } out_row += 1; } if focused && !scrolling { if let Some(row) = cursor_found { out.hide_cursor(screen.hide_cursor()); out.move_to(row, pos.1); } else { out.hide_cursor(true); } } } out.reset_attributes(); } pub fn render_fullscreen(&self, out: &mut impl textmode::Textmode) { self.pty.with_vt_mut(|vt| { out.write(&vt.screen().state_formatted()); if vt.bell(true) { out.write(b"\x07"); } out.reset_attributes(); }); } pub fn input(&self, bytes: Vec<u8>) { self.pty.input(bytes); } pub fn resize(&self, size: (u16, u16)) { self.pty.resize(size); } pub fn cmd(&self) -> &str { &self.cmdline } pub fn start_time(&self) -> time::OffsetDateTime { self.start_time } pub fn toggle_fullscreen(&mut self) { if let Some(fullscreen) = self.fullscreen { self.fullscreen = Some(!fullscreen); } else { self.fullscreen = Some(!self.pty.fullscreen()); } } pub fn set_fullscreen(&mut self, fullscreen: bool) { self.fullscreen = Some(fullscreen); } pub fn running(&self) -> bool { self.state.running() } pub fn exited(&mut self, exit_info: ExitInfo) { self.state = State::Exited(exit_info); }
pub fn should_fullscreen(&self) -> bool { self.fullscreen.unwrap_or_else(|| self.pty.fullscreen()) } pub fn lock_vt(&self) -> std::sync::MutexGuard<super::pty::Vt> { self.pty.lock_vt() } pub fn set_span(&mut self, new_span: (usize, usize)) { if let State::Running(ref mut span) = self.state { *span = new_span; } } fn max_lines(&self, entry_count: usize) -> usize { if self.env.idx() == entry_count - 1 { 15 } else { 5 } } fn set_bgcolor( out: &mut impl textmode::Textmode, idx: usize, focus: bool, ) { if focus { out.set_bgcolor(textmode::Color::Rgb(0x56, 0x1b, 0x8b)); } else if idx % 2 == 0 { out.set_bgcolor(textmode::Color::Rgb(0x24, 0x21, 0x00)); } else { out.set_bgcolor(textmode::Color::Rgb(0x20, 0x20, 0x20)); } } fn spawn_command( cmdline: &str, env: &Env, pts: &pty_process::Pts, ) -> Result<(tokio::process::Child, std::fs::File)> { let mut cmd = pty_process::Command::new(crate::info::current_exe()?); cmd.args(&["-c", cmdline, "--status-fd", "3"]); env.apply(&mut cmd); let (from_r, from_w) = nix::unistd::pipe2(nix::fcntl::OFlag::O_CLOEXEC)?; let fh = unsafe { std::fs::File::from_raw_fd(from_r) }; unsafe { cmd.pre_exec(move || { nix::unistd::dup2(from_w, 3)?; Ok(()) }); } let child = cmd.spawn(pts)?; nix::unistd::close(from_w)?; Ok((child, fh)) } async fn task( mut child: tokio::process::Child, fh: std::fs::File, idx: usize, event_w: crate::shell::event::Writer, ) { enum Res { Read(crate::runner::Event), Exit(std::io::Result<std::process::ExitStatus>), } let (read_w, read_r) = tokio::sync::mpsc::unbounded_channel(); tokio::task::spawn_blocking(move || loop { let event = bincode::deserialize_from(&fh); match event { Ok(event) => { read_w.send(event).unwrap(); } Err(e) => { match &*e { bincode::ErrorKind::Io(io_e) => { assert!( io_e.kind() == std::io::ErrorKind::UnexpectedEof ); } e => { panic!("{}", e); } } break; } } }); let mut stream: futures_util::stream::SelectAll<_> = [ tokio_stream::wrappers::UnboundedReceiverStream::new(read_r) .map(Res::Read) .boxed(), futures_util::stream::once(child.wait()) .map(Res::Exit) .boxed(), ] .into_iter() .collect(); let mut exit_status = None; let mut new_env = None; while let Some(res) = stream.next().await { match res { Res::Read(event) => match event { crate::runner::Event::RunPipeline(new_span) => { event_w.send(Event::ChildRunPipeline(idx, new_span)); } crate::runner::Event::Suspend => { event_w.send(Event::ChildSuspend(idx)); } crate::runner::Event::Exit(env) => { new_env = Some(env); } }, Res::Exit(status) => { exit_status = Some(status.unwrap()); } } } event_w.send(Event::ChildExit( idx, ExitInfo::new(exit_status.unwrap()), new_env, )); } } enum State { Running((usize, usize)), Exited(ExitInfo), } impl State { fn exit_info(&self) -> Option<&ExitInfo> { match self { Self::Running(_) => None, Self::Exited(exit_info) => Some(exit_info), } } fn running(&self) -> bool { self.exit_info().is_none() } } #[derive(Debug)] pub struct ExitInfo { status: std::process::ExitStatus, instant: std::time::Instant, } impl ExitInfo { fn new(status: std::process::ExitStatus) -> Self { Self { status, instant: std::time::Instant::now(), } } }
pub fn lines(&self, entry_count: usize, focused: bool) -> usize { let running = self.running(); 1 + std::cmp::min( self.pty.with_vt(|vt| vt.output_lines(focused, running)), self.max_lines(entry_count), ) }
function_block-full_function
[ { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn time(offset: time::UtcOffset) -> Result<String> {\n\n Ok(crate::format::time(\n\n time::OffsetDateTime::now_utc().to_offset(offset),\n\n ))\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 0, "score": 243241.07392747345 }, { "content": "pub fn time(time: time::OffsetDateTime) -> String {\n\n let format = if time::OffsetDateTime::now_utc() - time\n\n > std::time::Duration::from_secs(60 * 60 * 24)\n\n {\n\n time::format_description::parse(\n\n \"[year]-[month]-[day] [hour]:[minute]:[second]\",\n\n )\n\n .unwrap()\n\n } else {\n\n time::format_description::parse(\"[hour]:[minute]:[second]\").unwrap()\n\n };\n\n time.format(&format).unwrap()\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 1, "score": 235745.95889306182 }, { "content": "// the time crate is currently unable to get the local offset on unix due to\n\n// soundness concerns, so we have to do it manually/:\n\n//\n\n// https://github.com/time-rs/time/issues/380\n\npub fn get_offset() -> time::UtcOffset {\n\n let offset_str =\n\n std::process::Command::new(\"date\").args(&[\"+%:z\"]).output();\n\n if let Ok(offset_str) = offset_str {\n\n let offset_str = String::from_utf8(offset_str.stdout).unwrap();\n\n time::UtcOffset::parse(\n\n offset_str.trim(),\n\n &time::format_description::parse(\"[offset_hour]:[offset_minute]\")\n\n .unwrap(),\n\n )\n\n .unwrap_or(time::UtcOffset::UTC)\n\n } else {\n\n time::UtcOffset::UTC\n\n }\n\n}\n", "file_path": "src/info.rs", "rank": 2, "score": 192776.3076246552 }, { "content": "pub fn duration(dur: std::time::Duration) -> String {\n\n let secs = dur.as_secs();\n\n let nanos = dur.subsec_nanos();\n\n if secs > 60 {\n\n let mins = secs / 60;\n\n let secs = secs - mins * 60;\n\n format!(\"{}m{}s\", mins, secs)\n\n } else if secs > 9 {\n\n format!(\"{}.{:02}s\", secs, nanos / 10_000_000)\n\n } else if secs > 0 {\n\n format!(\"{}.{:03}s\", secs, nanos / 1_000_000)\n\n } else if nanos >= 1_000_000 {\n\n format!(\"{}ms\", nanos / 1_000_000)\n\n } else if nanos >= 1_000 {\n\n format!(\"{}us\", nanos / 1_000)\n\n } else {\n\n format!(\"{}ns\", nanos)\n\n }\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 3, "score": 181491.44460482738 }, { "content": "pub fn pid() -> String {\n\n nix::unistd::getpid().to_string()\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 4, "score": 174992.1880842878 }, { "content": "pub fn hostname() -> Result<String> {\n\n let mut hostname = hostname::get()?.to_string_lossy().into_owned();\n\n if let Some(idx) = hostname.find('.') {\n\n hostname.truncate(idx);\n\n }\n\n Ok(hostname)\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 5, "score": 164882.38248054334 }, { "content": "pub fn user() -> Result<String> {\n\n Ok(users::get_current_username()\n\n .ok_or_else(|| anyhow!(\"couldn't get username\"))?\n\n .to_string_lossy()\n\n .into_owned())\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 6, "score": 164882.38248054334 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\npub fn prompt_char() -> Result<String> {\n\n if users::get_current_uid() == 0 {\n\n Ok(\"#\".into())\n\n } else {\n\n Ok(\"$\".into())\n\n }\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 7, "score": 160226.71558381835 }, { "content": "pub fn path(path: &std::path::Path) -> String {\n\n let mut path = path.display().to_string();\n\n if let Ok(home) = std::env::var(\"HOME\") {\n\n if path.starts_with(&home) {\n\n path.replace_range(..home.len(), \"~\");\n\n }\n\n }\n\n path\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 8, "score": 141171.2981627884 }, { "content": "pub fn io_error(e: &std::io::Error) -> String {\n\n let mut s = format!(\"{}\", e);\n\n if e.raw_os_error().is_some() {\n\n let i = s.rfind('(').unwrap();\n\n s.truncate(i - 1);\n\n }\n\n s\n\n}\n", "file_path": "src/format.rs", "rank": 9, "score": 141171.2981627884 }, { "content": "fn strip_escape(s: &str) -> String {\n\n let mut new = String::new();\n\n let mut escape = false;\n\n for c in s.chars() {\n\n if escape {\n\n new.push(c);\n\n escape = false;\n\n } else {\n\n match c {\n\n '\\\\' => escape = true,\n\n _ => new.push(c),\n\n }\n\n }\n\n }\n\n new\n\n}\n\n\n", "file_path": "src/parse/ast.rs", "rank": 10, "score": 134108.22060866724 }, { "content": "pub fn exit_status(status: std::process::ExitStatus) -> String {\n\n status.signal().map_or_else(\n\n || format!(\"{:03} \", status.code().unwrap()),\n\n |sig| {\n\n nix::sys::signal::Signal::try_from(sig).map_or_else(\n\n |_| format!(\"SIG{} \", sig),\n\n |sig| format!(\"{:4} \", &sig.as_str()[3..]),\n\n )\n\n },\n\n )\n\n}\n\n\n", "file_path": "src/format.rs", "rank": 11, "score": 134070.893814002 }, { "content": "fn strip_basic_escape(s: &str) -> String {\n\n let mut new = String::new();\n\n let mut escape = false;\n\n for c in s.chars() {\n\n if escape {\n\n match c {\n\n '\\\\' | '\\'' => {}\n\n _ => new.push('\\\\'),\n\n }\n\n new.push(c);\n\n escape = false;\n\n } else {\n\n match c {\n\n '\\\\' => escape = true,\n\n _ => new.push(c),\n\n }\n\n }\n\n }\n\n new\n\n}\n\n\n", "file_path": "src/parse/ast.rs", "rank": 12, "score": 130887.03759456145 }, { "content": "fn expand_home(dir: &str) -> Result<String> {\n\n if dir.starts_with('~') {\n\n let path: std::path::PathBuf = dir.into();\n\n if let std::path::Component::Normal(prefix) =\n\n path.components().next().unwrap()\n\n {\n\n let prefix_bytes = prefix.as_bytes();\n\n let name = if prefix_bytes == b\"~\" {\n\n None\n\n } else {\n\n Some(std::ffi::OsStr::from_bytes(&prefix_bytes[1..]))\n\n };\n\n if let Some(home) = home(name) {\n\n Ok(home\n\n .join(path.strip_prefix(prefix).unwrap())\n\n .to_str()\n\n .unwrap()\n\n .to_string())\n\n } else {\n\n anyhow::bail!(\n", "file_path": "src/parse/ast.rs", "rank": 13, "score": 123612.21551583085 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[allow(clippy::unnecessary_wraps)]\n\npub fn current_exe() -> Result<std::path::PathBuf> {\n\n Ok(\"/proc/self/exe\".into())\n\n}\n\n\n", "file_path": "src/info.rs", "rank": 15, "score": 100552.7096043407 }, { "content": "pub fn setpgid_parent(\n\n pid: nix::unistd::Pid,\n\n pg: Option<nix::unistd::Pid>,\n\n) -> Result<()> {\n\n nix::unistd::setpgid(pid, pg.unwrap_or(PID0))\n\n // the child already called exec, so it must have already called\n\n // setpgid itself\n\n .allow(nix::errno::Errno::EACCES)\n\n // the child already exited, so we don't care\n\n .allow(nix::errno::Errno::ESRCH)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 16, "score": 96433.56383534288 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum Focus {\n\n Readline,\n\n History(usize),\n\n Scrolling(Option<usize>),\n\n}\n\n\n", "file_path": "src/shell/mod.rs", "rank": 17, "score": 88885.05001548617 }, { "content": "pub fn channel() -> (Writer, Reader) {\n\n let (event_w, event_r) = tokio::sync::mpsc::unbounded_channel();\n\n (Writer::new(event_w), Reader::new(event_r))\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Writer(tokio::sync::mpsc::UnboundedSender<Event>);\n\n\n\nimpl Writer {\n\n pub fn new(event_w: tokio::sync::mpsc::UnboundedSender<Event>) -> Self {\n\n Self(event_w)\n\n }\n\n\n\n pub fn send(&self, event: Event) {\n\n // the only time this should ever error is when the application is\n\n // shutting down, at which point we don't actually care about any\n\n // further dropped messages\n\n #[allow(clippy::let_underscore_drop)]\n\n let _ = self.0.send(event);\n\n }\n", "file_path": "src/shell/event.rs", "rank": 18, "score": 86946.53959291396 }, { "content": "pub fn setpgid_child(pg: Option<nix::unistd::Pid>) -> std::io::Result<()> {\n\n nix::unistd::setpgid(PID0, pg.unwrap_or(PID0))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 19, "score": 84900.94543845124 }, { "content": "pub fn history_file() -> std::path::PathBuf {\n\n data_dir().join(\"history\")\n\n}\n\n\n", "file_path": "src/dirs.rs", "rank": 20, "score": 80148.15857058254 }, { "content": "pub fn config_file() -> std::path::PathBuf {\n\n config_dir().join(\"config.toml\")\n\n}\n\n\n", "file_path": "src/dirs.rs", "rank": 21, "score": 80148.15857058254 }, { "content": "struct VisibleEntries<'a> {\n\n entries: std::collections::VecDeque<(\n\n usize,\n\n usize,\n\n std::sync::MutexGuard<'a, pty::Vt>,\n\n )>,\n\n}\n\n\n\nimpl<'a> VisibleEntries<'a> {\n\n fn new() -> Self {\n\n Self {\n\n entries: std::collections::VecDeque::new(),\n\n }\n\n }\n\n\n\n fn add(\n\n &mut self,\n\n idx: usize,\n\n offset: usize,\n\n vt: std::sync::MutexGuard<'a, pty::Vt>,\n", "file_path": "src/shell/history/mod.rs", "rank": 22, "score": 79792.60576356793 }, { "content": "pub fn id_to_pid(id: u32) -> nix::unistd::Pid {\n\n nix::unistd::Pid::from_raw(id.try_into().unwrap())\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 23, "score": 72368.13127793616 }, { "content": "pub fn set_foreground_pg(pg: nix::unistd::Pid) -> Result<()> {\n\n let pty = nix::fcntl::open(\n\n \"/dev/tty\",\n\n nix::fcntl::OFlag::empty(),\n\n nix::sys::stat::Mode::empty(),\n\n )?;\n\n\n\n // if a background process calls tcsetpgrp, the kernel will send it\n\n // SIGTTOU which suspends it. if that background process is the session\n\n // leader and doesn't have SIGTTOU blocked, the kernel will instead just\n\n // return ENOTTY from the tcsetpgrp call rather than sending a signal to\n\n // avoid deadlocking the process. therefore, we need to ensure that\n\n // SIGTTOU is blocked here.\n\n\n\n // Safety: setting a signal handler to SigIgn is always safe\n\n unsafe {\n\n nix::sys::signal::signal(\n\n nix::sys::signal::Signal::SIGTTOU,\n\n nix::sys::signal::SigHandler::SigIgn,\n\n )?;\n", "file_path": "src/runner/sys.rs", "rank": 24, "score": 70788.5098523994 }, { "content": "fn parse_fd(s: &str) -> std::os::unix::io::RawFd {\n\n match s {\n\n \"in\" => 0,\n\n \"out\" => 1,\n\n \"err\" => 2,\n\n _ => s.parse().unwrap(),\n\n }\n\n}\n\n\n", "file_path": "src/parse/ast.rs", "rank": 25, "score": 70150.69442984069 }, { "content": "pub fn pipe() -> Result<(std::fs::File, std::fs::File)> {\n\n let (r, w) = nix::unistd::pipe2(nix::fcntl::OFlag::O_CLOEXEC)?;\n\n // Safety: these file descriptors were just returned by pipe2 above, and\n\n // are only available in this function, so nothing else can be accessing\n\n // them\n\n Ok((unsafe { std::fs::File::from_raw_fd(r) }, unsafe {\n\n std::fs::File::from_raw_fd(w)\n\n }))\n\n}\n\n\n", "file_path": "src/runner/sys.rs", "rank": 26, "score": 67581.31704247482 }, { "content": "fn data_dir() -> std::path::PathBuf {\n\n PROJECT_DIRS.data_dir().to_path_buf()\n\n}\n", "file_path": "src/dirs.rs", "rank": 27, "score": 67516.10504245665 }, { "content": "pub fn neg_pid(pid: nix::unistd::Pid) -> nix::unistd::Pid {\n\n nix::unistd::Pid::from_raw(-pid.as_raw())\n\n}\n", "file_path": "src/runner/sys.rs", "rank": 28, "score": 66105.60380548141 }, { "content": "#[derive(Debug)]\n\nenum Request {\n\n Input(Vec<u8>),\n\n Resize(u16, u16),\n\n}\n\n\n\npub struct Pty {\n\n vt: std::sync::Arc<std::sync::Mutex<Vt>>,\n\n request_w: tokio::sync::mpsc::UnboundedSender<Request>,\n\n}\n\n\n\nimpl Pty {\n\n pub fn new(\n\n size: (u16, u16),\n\n event_w: crate::shell::event::Writer,\n\n ) -> Result<(Self, pty_process::Pts)> {\n\n let (request_w, request_r) = tokio::sync::mpsc::unbounded_channel();\n\n\n\n let pty = pty_process::Pty::new()?;\n\n pty.resize(pty_process::Size::new(size.0, size.1))?;\n\n let pts = pty.pts()?;\n", "file_path": "src/shell/history/pty.rs", "rank": 29, "score": 59994.99221453116 }, { "content": "#[derive(clap::Parser)]\n\n#[clap(about = \"NoteBook SHell\")]\n\nstruct Opt {\n\n #[clap(short = 'c')]\n\n command: Option<String>,\n\n\n\n #[clap(long)]\n\n status_fd: Option<std::os::unix::io::RawFd>,\n\n}\n\n\n\n#[tokio::main]\n\nasync fn async_main(opt: Opt) -> Result<i32> {\n\n if let Some(command) = opt.command {\n\n let mut shell_write = opt.status_fd.and_then(|fd| {\n\n nix::sys::stat::fstat(fd).ok().map(|_| {\n\n // Safety: we don't create File instances for or read/write\n\n // data on this fd anywhere else\n\n unsafe { tokio::fs::File::from_raw_fd(fd) }\n\n })\n\n });\n\n\n\n return runner::main(command, &mut shell_write).await;\n\n }\n\n\n\n #[cfg(nbsh_tokio_console)]\n\n console_subscriber::init();\n\n\n\n shell::main().await\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 30, "score": 58329.55639076253 }, { "content": "#[allow(clippy::option_option)]\n\n#[derive(Default)]\n\nstruct Pending {\n\n key: std::collections::VecDeque<textmode::Key>,\n\n size: Option<(u16, u16)>,\n\n pty_output: bool,\n\n child_run_pipeline: std::collections::VecDeque<(usize, (usize, usize))>,\n\n child_suspend: std::collections::VecDeque<usize>,\n\n child_exit: Option<(usize, super::history::ExitInfo, Option<Env>)>,\n\n git_info: Option<Option<super::inputs::GitInfo>>,\n\n clock_timer: bool,\n\n done: bool,\n\n}\n\n\n\nimpl Pending {\n\n fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n fn get_event(&mut self) -> Option<Option<Event>> {\n\n if self.done {\n\n return Some(None);\n", "file_path": "src/shell/event.rs", "rank": 31, "score": 56790.2305916849 }, { "content": "struct Stack {\n\n frames: Vec<Frame>,\n\n}\n\n\n\nimpl Stack {\n\n fn new() -> Self {\n\n Self { frames: vec![] }\n\n }\n\n\n\n fn push(&mut self, frame: Frame) {\n\n self.frames.push(frame);\n\n }\n\n\n\n fn pop(&mut self) -> Frame {\n\n self.frames.pop().unwrap()\n\n }\n\n\n\n fn top(&self) -> Option<&Frame> {\n\n self.frames.last()\n\n }\n", "file_path": "src/runner/mod.rs", "rank": 32, "score": 56790.2305916849 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq)]\n\nstruct Redirect {\n\n from: std::os::unix::io::RawFd,\n\n to: Word,\n\n dir: super::Direction,\n\n}\n\n\n\nimpl Redirect {\n\n fn build_ast(pair: pest::iterators::Pair<Rule>) -> Self {\n\n assert!(matches!(pair.as_rule(), Rule::redirect));\n\n let mut iter = pair.into_inner();\n\n\n\n let prefix = iter.next().unwrap().as_str();\n\n let (from, dir) = prefix.strip_suffix(\">>\").map_or_else(\n\n || {\n\n prefix.strip_suffix('>').map_or_else(\n\n || {\n\n (\n\n prefix.strip_suffix('<').unwrap(),\n\n super::Direction::In,\n\n )\n", "file_path": "src/parse/ast.rs", "rank": 33, "score": 56790.2305916849 }, { "content": "#[derive(pest_derive::Parser)]\n\n#[grammar = \"shell.pest\"]\n\nstruct Shell;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct Commands {\n\n commands: Vec<Command>,\n\n}\n\n\n\nimpl Commands {\n\n pub fn parse(full_cmd: &str) -> Result<Self, super::Error> {\n\n Ok(Self::build_ast(\n\n Shell::parse(Rule::line, full_cmd)\n\n .map_err(|e| super::Error::new(full_cmd.to_string(), e))?\n\n .next()\n\n .unwrap()\n\n .into_inner()\n\n .next()\n\n .unwrap(),\n\n ))\n\n }\n\n\n", "file_path": "src/parse/ast.rs", "rank": 34, "score": 56790.2305916849 }, { "content": "struct InnerReader {\n\n pending: std::sync::Mutex<Pending>,\n\n cvar: tokio::sync::Notify,\n\n}\n\n\n\nimpl InnerReader {\n\n fn new() -> Self {\n\n Self {\n\n pending: std::sync::Mutex::new(Pending::new()),\n\n cvar: tokio::sync::Notify::new(),\n\n }\n\n }\n\n\n\n async fn recv(&self) -> Option<Event> {\n\n loop {\n\n if let Some(event) = self.pending.lock().unwrap().get_event() {\n\n return event;\n\n }\n\n self.cvar.notified().await;\n\n }\n\n }\n\n\n\n fn new_event(&self, event: Option<Event>) {\n\n self.pending.lock().unwrap().new_event(event);\n\n self.cvar.notify_one();\n\n }\n\n}\n\n\n", "file_path": "src/shell/event.rs", "rank": 35, "score": 55388.31897823922 }, { "content": "#[derive(pest_derive::Parser)]\n\n#[grammar = \"history.pest\"]\n\nstruct HistoryLine;\n\n\n\npub struct History {\n\n entries: std::sync::Arc<std::sync::Mutex<Vec<Entry>>>,\n\n}\n\n\n\nimpl History {\n\n pub fn new() -> Self {\n\n let entries = std::sync::Arc::new(std::sync::Mutex::new(vec![]));\n\n tokio::spawn(Self::task(std::sync::Arc::clone(&entries)));\n\n Self { entries }\n\n }\n\n\n\n pub fn entry_count(&self) -> usize {\n\n self.entries.lock().unwrap().len()\n\n }\n\n\n\n async fn task(entries: std::sync::Arc<std::sync::Mutex<Vec<Entry>>>) {\n\n // TODO: we should actually read this in reverse order, because we\n\n // want to populate the most recent entries first\n", "file_path": "src/shell/old_history.rs", "rank": 36, "score": 54106.20749056259 }, { "content": "fn main() {\n\n match async_main(Opt::parse()) {\n\n Ok(code) => {\n\n std::process::exit(code);\n\n }\n\n Err(e) => {\n\n eprintln!(\"nbsh: {}\", e);\n\n std::process::exit(1);\n\n }\n\n };\n\n}\n", "file_path": "src/main.rs", "rank": 37, "score": 52832.71219828648 }, { "content": " pub trait Result {\n\n type T;\n\n type E;\n\n\n\n fn allow(self, allow_e: Self::E) -> Self;\n\n fn allow_with(self, allow_e: Self::E, default_t: Self::T) -> Self;\n\n }\n\n\n\n impl<T, E> Result for std::result::Result<T, E>\n\n where\n\n T: std::default::Default,\n\n E: std::cmp::PartialEq,\n\n {\n\n type T = T;\n\n type E = E;\n\n\n\n fn allow(self, allow_e: Self::E) -> Self {\n\n self.or_else(|e| {\n\n if e == allow_e {\n\n Ok(std::default::Default::default())\n", "file_path": "src/prelude.rs", "rank": 38, "score": 52635.26406661101 }, { "content": "fn and(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n if env.latest_status().success() {\n\n let mut cmd = crate::runner::Command::new(exe, cfg.io().clone());\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n } else {\n\n let status = env.latest_status();\n\n Ok(command::Child::new_task(move || status))\n\n }\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 39, "score": 51336.81277274103 }, { "content": "fn or(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n if env.latest_status().success() {\n\n let status = env.latest_status();\n\n Ok(command::Child::new_task(move || status))\n\n } else {\n\n let mut cmd = crate::runner::Command::new(exe, cfg.io().clone());\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n }\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 40, "score": 51336.81277274103 }, { "content": "fn home(user: Option<&std::ffi::OsStr>) -> Option<std::path::PathBuf> {\n\n let user = user.map_or_else(\n\n || users::get_user_by_uid(users::get_current_uid()),\n\n users::get_user_by_name,\n\n );\n\n user.map(|user| user.home_dir().to_path_buf())\n\n}\n\n\n\n#[cfg(test)]\n\n#[path = \"test_ast.rs\"]\n\nmod test;\n", "file_path": "src/parse/ast.rs", "rank": 41, "score": 50846.09432819966 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn unset(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n let k = if let Some(k) = exe.args().get(0).map(String::as_str) {\n\n k\n\n } else {\n\n bail!(cfg, exe, \"usage: unset key\");\n\n };\n\n\n\n std::env::remove_var(k);\n\n std::process::ExitStatus::from_raw(0)\n\n }))\n\n}\n\n\n\n// clippy can't tell that the type is necessary\n\n#[allow(clippy::unnecessary_wraps)]\n", "file_path": "src/runner/builtins/mod.rs", "rank": 42, "score": 49978.39726770975 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn read(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n let var = if let Some(var) = exe.args().get(0).map(String::as_str) {\n\n var\n\n } else {\n\n bail!(cfg, exe, \"usage: read var\");\n\n };\n\n\n\n let (val, done) = match cfg.io().read_line_stdin() {\n\n Ok((line, done)) => (line, done),\n\n Err(e) => {\n\n bail!(cfg, exe, e);\n\n }\n\n };\n\n\n\n std::env::set_var(var, val);\n\n std::process::ExitStatus::from_raw(if done { 1 << 8 } else { 0 })\n\n }))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 43, "score": 49978.39726770975 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn cd(\n\n exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n let prev_pwd = env.prev_pwd();\n\n let home = env.var(\"HOME\");\n\n Ok(command::Child::new_task(move || {\n\n let dir = if let Some(dir) = exe.args().get(0) {\n\n if dir.is_empty() {\n\n \".\".to_string().into()\n\n } else if dir == \"-\" {\n\n prev_pwd\n\n } else {\n\n dir.into()\n\n }\n\n } else {\n\n let dir = home;\n\n if let Some(dir) = dir {\n\n dir.into()\n", "file_path": "src/runner/builtins/mod.rs", "rank": 44, "score": 49978.39726770975 }, { "content": "#[allow(clippy::unnecessary_wraps)]\n\nfn set(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n let k = if let Some(k) = exe.args().get(0).map(String::as_str) {\n\n k\n\n } else {\n\n bail!(cfg, exe, \"usage: set key value\");\n\n };\n\n let v = if let Some(v) = exe.args().get(1).map(String::as_str) {\n\n v\n\n } else {\n\n bail!(cfg, exe, \"usage: set key value\");\n\n };\n\n\n\n std::env::set_var(k, v);\n\n std::process::ExitStatus::from_raw(0)\n\n }))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 45, "score": 49978.39726770975 }, { "content": "// mostly just for testing and ensuring that builtins work, i'll likely remove\n\n// this later, since the binary seems totally fine\n\nfn echo(\n\n exe: crate::parse::Exe,\n\n _env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n Ok(command::Child::new_task(move || {\n\n macro_rules! write_stdout {\n\n ($bytes:expr) => {\n\n if let Err(e) = cfg.io().write_stdout($bytes) {\n\n cfg.io()\n\n .write_stderr(format!(\"echo: {}\", e).as_bytes())\n\n .unwrap();\n\n return std::process::ExitStatus::from_raw(1 << 8);\n\n }\n\n };\n\n }\n\n let count = exe.args().len();\n\n for (i, arg) in exe.args().iter().enumerate() {\n\n write_stdout!(arg.as_bytes());\n\n if i == count - 1 {\n\n write_stdout!(b\"\\n\");\n\n } else {\n\n write_stdout!(b\" \");\n\n }\n\n }\n\n\n\n std::process::ExitStatus::from_raw(0)\n\n }))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 46, "score": 49977.773992797855 }, { "content": "fn spawn_children(\n\n mut cmds: Vec<Command>,\n\n env: &Env,\n\n interactive: bool,\n\n) -> Result<(Vec<Child>, Option<nix::unistd::Pid>)> {\n\n for i in 0..(cmds.len() - 1) {\n\n let (r, w) = sys::pipe()?;\n\n cmds[i].stdout(w);\n\n cmds[i + 1].stdin(r);\n\n }\n\n\n\n let mut children = vec![];\n\n let mut pg_pid = None;\n\n for mut cmd in cmds {\n\n // Safety: setpgid is an async-signal-safe function\n\n unsafe {\n\n cmd.pre_exec(move || {\n\n sys::setpgid_child(pg_pid)?;\n\n Ok(())\n\n });\n", "file_path": "src/runner/mod.rs", "rank": 47, "score": 49974.450900760785 }, { "content": "fn command(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n let mut cmd = crate::runner::Command::new_binary(&exe);\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n}\n\n\n", "file_path": "src/runner/builtins/mod.rs", "rank": 48, "score": 49974.450900760785 }, { "content": "fn builtin(\n\n mut exe: crate::parse::Exe,\n\n env: &Env,\n\n cfg: command::Cfg,\n\n) -> Result<command::Child> {\n\n exe.shift();\n\n let mut cmd = crate::runner::Command::new_builtin(exe, cfg.io().clone());\n\n cfg.setup_command(&mut cmd);\n\n Ok(command::Child::new_wrapped(cmd.spawn(env)?))\n\n}\n", "file_path": "src/runner/builtins/mod.rs", "rank": 49, "score": 49974.450900760785 }, { "content": "fn apply_redirects(\n\n redirects: &[crate::parse::Redirect],\n\n) -> std::io::Result<()> {\n\n for redirect in redirects {\n\n match &redirect.to {\n\n crate::parse::RedirectTarget::Fd(fd) => {\n\n nix::unistd::dup2(*fd, redirect.from)?;\n\n }\n\n crate::parse::RedirectTarget::File(path) => {\n\n let fd = redirect.dir.open(path)?;\n\n if fd != redirect.from {\n\n nix::unistd::dup2(fd, redirect.from)?;\n\n nix::unistd::close(fd)?;\n\n }\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/runner/command.rs", "rank": 50, "score": 49974.450900760785 }, { "content": "#[test]\n\nfn test_parse() {\n\n let entry: Entry =\n\n \": 1646779848:1234.56;vim ~/.zsh_history\".parse().unwrap();\n\n assert_eq!(entry.cmdline, \"vim ~/.zsh_history\");\n\n assert_eq!(\n\n entry.duration,\n\n Some(std::time::Duration::from_nanos(1_234_560_000_000))\n\n );\n\n assert_eq!(\n\n entry.start_time,\n\n Some(time::macros::datetime!(2022-03-08 22:50:48).assume_utc())\n\n );\n\n\n\n let entry: Entry = \": 1646779848:1;vim ~/.zsh_history\".parse().unwrap();\n\n assert_eq!(entry.cmdline, \"vim ~/.zsh_history\");\n\n assert_eq!(entry.duration, Some(std::time::Duration::from_secs(1)));\n\n assert_eq!(\n\n entry.start_time,\n\n Some(time::macros::datetime!(2022-03-08 22:50:48).assume_utc())\n\n );\n\n\n\n let entry: Entry = \"vim ~/.zsh_history\".parse().unwrap();\n\n assert_eq!(entry.cmdline, \"vim ~/.zsh_history\");\n\n assert_eq!(entry.duration, None);\n\n assert_eq!(entry.start_time, None);\n\n}\n", "file_path": "src/shell/old_history.rs", "rank": 51, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_whitespace() {\n\n parse_eq!(\" foo \", cs!(p!((3, 6), e!(w!(\"foo\")))));\n\n parse_eq!(\n\n \" foo # this is a comment\",\n\n cs!(p!((3, 6), e!(w!(\"foo\"))))\n\n );\n\n parse_eq!(\"foo#comment\", cs!(p!((0, 3), e!(w!(\"foo\")))));\n\n parse_eq!(\n\n \"foo;bar|baz;quux#comment\",\n\n cs!(\n\n p!((0, 3), e!(w!(\"foo\"))),\n\n p!((4, 11), e!(w!(\"bar\")), e!(w!(\"baz\"))),\n\n p!((12, 16), e!(w!(\"quux\")))\n\n )\n\n );\n\n parse_eq!(\n\n \"foo | bar \",\n\n cs!(p!((0, 12), e!(w!(\"foo\")), e!(w!(\"bar\"))))\n\n );\n\n parse_eq!(\n", "file_path": "src/parse/test_ast.rs", "rank": 52, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_redirect() {\n\n parse_eq!(\n\n \"foo > bar\",\n\n cs!(p!((0, 9), e!(w!(\"foo\") ; r!(1, w!(\"bar\"), Out))))\n\n );\n\n parse_eq!(\n\n \"foo <bar\",\n\n cs!(p!((0, 8), e!(w!(\"foo\") ; r!(0, w!(\"bar\"), In))))\n\n );\n\n parse_eq!(\n\n \"foo > /dev/null 2>&1\",\n\n cs!(p!(\n\n (0, 20),\n\n e!(\n\n w!(\"foo\") ;\n\n r!(1, w!(\"/dev/null\"), Out), r!(2, w!(\"&1\"), Out)\n\n )\n\n ))\n\n );\n\n parse_eq!(\n", "file_path": "src/parse/test_ast.rs", "rank": 53, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_basic() {\n\n parse_eq!(\"foo\", cs!(p!((0, 3), e!(w!(\"foo\")))));\n\n parse_eq!(\"foo bar\", cs!(p!((0, 7), e!(w!(\"foo\"), w!(\"bar\")))));\n\n parse_eq!(\n\n \"foo bar baz\",\n\n cs!(p!((0, 11), e!(w!(\"foo\"), w!(\"bar\"), w!(\"baz\"))))\n\n );\n\n parse_eq!(\"foo | bar\", cs!(p!((0, 9), e!(w!(\"foo\")), e!(w!(\"bar\")))));\n\n parse_eq!(\n\n \"command ls; perl -E 'say foo' | tr a-z A-Z; builtin echo bar\",\n\n cs!(\n\n p!((0, 10), e!(w!(\"command\"), w!(\"ls\"))),\n\n p!(\n\n (12, 42),\n\n e!(w!(\"perl\"), w!(\"-E\"), w!(wps!(\"say foo\"))),\n\n e!(w!(\"tr\"), w!(\"a-z\"), w!(\"A-Z\"))\n\n ),\n\n p!((44, 60), e!(w!(\"builtin\"), w!(\"echo\"), w!(\"bar\")))\n\n )\n\n );\n", "file_path": "src/parse/test_ast.rs", "rank": 54, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_deserialize() {\n\n deserialize_eq!(\"foo\", e!(w!(\"foo\")));\n\n deserialize_eq!(\"foo bar baz\", e!(w!(\"foo\"), w!(\"bar\"), w!(\"baz\")));\n\n}\n", "file_path": "src/parse/test_ast.rs", "rank": 55, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_alternation() {\n\n parse_eq!(\n\n \"echo {foo,bar}\",\n\n cs!(p!((0, 14), e!(w!(\"echo\"), w!(wpa!(w!(\"foo\"), w!(\"bar\"))))))\n\n );\n\n parse_eq!(\n\n \"echo {foo,bar}.rs\",\n\n cs!(p!(\n\n (0, 17),\n\n e!(w!(\"echo\"), w!(wpa!(w!(\"foo\"), w!(\"bar\")), wpb!(\".rs\")))\n\n ))\n\n );\n\n parse_eq!(\n\n \"echo {foo,bar,baz}.rs\",\n\n cs!(p!(\n\n (0, 21),\n\n e!(\n\n w!(\"echo\"),\n\n w!(wpa!(w!(\"foo\"), w!(\"bar\"), w!(\"baz\")), wpb!(\".rs\"))\n\n )\n", "file_path": "src/parse/test_ast.rs", "rank": 56, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_escape() {\n\n parse_eq!(\"foo\\\\ bar\", cs!(p!((0, 8), e!(w!(\"foo bar\")))));\n\n parse_eq!(\"'foo\\\\ bar'\", cs!(p!((0, 10), e!(w!(wps!(\"foo\\\\ bar\"))))));\n\n parse_eq!(\"\\\"foo\\\\ bar\\\"\", cs!(p!((0, 10), e!(w!(wpd!(\"foo bar\"))))));\n\n parse_eq!(\"\\\"foo\\\\\\\"bar\\\"\", cs!(p!((0, 10), e!(w!(wpd!(\"foo\\\"bar\"))))));\n\n parse_eq!(\n\n \"'foo\\\\'bar\\\\\\\\'\",\n\n cs!(p!((0, 12), e!(w!(wps!(\"foo'bar\\\\\")))))\n\n );\n\n parse_eq!(\n\n \"foo > bar\\\\ baz\",\n\n cs!(p!((0, 14), e!(w!(\"foo\") ; r!(1, w!(\"bar baz\"), Out))))\n\n );\n\n}\n\n\n", "file_path": "src/parse/test_ast.rs", "rank": 57, "score": 48728.50943791392 }, { "content": "#[test]\n\nfn test_parts() {\n\n parse_eq!(\n\n \"echo \\\"$HOME/bin\\\"\",\n\n cs!(p!((0, 16), e!(w!(\"echo\"), w!(wpv!(\"HOME\"), wpd!(\"/bin\")))))\n\n );\n\n parse_eq!(\n\n \"echo \\\"dir: $HOME/bin\\\"\",\n\n cs!(p!(\n\n (0, 21),\n\n e!(w!(\"echo\"), w!(wpd!(\"dir: \"), wpv!(\"HOME\"), wpd!(\"/bin\")))\n\n ))\n\n );\n\n parse_eq!(\n\n \"echo $HOME/bin\",\n\n cs!(p!((0, 14), e!(w!(\"echo\"), w!(wpv!(\"HOME\"), wpb!(\"/bin\")))))\n\n );\n\n parse_eq!(\n\n \"echo '$HOME/bin'\",\n\n cs!(p!((0, 16), e!(w!(\"echo\"), w!(wps!(\"$HOME/bin\")))))\n\n );\n", "file_path": "src/parse/test_ast.rs", "rank": 58, "score": 48728.50943791392 }, { "content": "fn resize_event() -> Event {\n\n Event::Resize(terminal_size::terminal_size().map_or(\n\n (24, 80),\n\n |(terminal_size::Width(w), terminal_size::Height(h))| (h, w),\n\n ))\n\n}\n", "file_path": "src/shell/inputs/signals.rs", "rank": 59, "score": 45313.39467652333 }, { "content": "fn config_dir() -> std::path::PathBuf {\n\n PROJECT_DIRS.config_dir().to_path_buf()\n\n}\n\n\n", "file_path": "src/dirs.rs", "rank": 60, "score": 40937.72138189035 }, { "content": "type Builtin = &'static (dyn for<'a> Fn(\n\n crate::parse::Exe,\n\n &'a Env,\n\n command::Cfg,\n\n) -> Result<command::Child>\n\n + Sync\n\n + Send);\n\n\n\n#[allow(clippy::as_conversions)]\n\nstatic BUILTINS: once_cell::sync::Lazy<\n\n std::collections::HashMap<&'static str, Builtin>,\n\n> = once_cell::sync::Lazy::new(|| {\n\n let mut builtins = std::collections::HashMap::new();\n\n builtins.insert(\"cd\", &cd as Builtin);\n\n builtins.insert(\"set\", &set);\n\n builtins.insert(\"unset\", &unset);\n\n builtins.insert(\"echo\", &echo);\n\n builtins.insert(\"read\", &read);\n\n builtins.insert(\"and\", &and);\n\n builtins.insert(\"or\", &or);\n", "file_path": "src/runner/builtins/mod.rs", "rank": 61, "score": 38625.210231049976 }, { "content": "use crate::prelude::*;\n\n\n", "file_path": "src/format.rs", "rank": 62, "score": 34692.130930308864 }, { "content": " self.set_var(__NBSH_PREV_PWD, prev_pwd);\n\n }\n\n\n\n pub fn apply(&self, cmd: &mut pty_process::Command) {\n\n match self {\n\n Self::V0(env) => {\n\n cmd.current_dir(&env.pwd);\n\n cmd.env_clear();\n\n cmd.envs(env.vars.iter());\n\n }\n\n }\n\n }\n\n\n\n pub fn update(&mut self) -> Result<()> {\n\n let idx = self.idx();\n\n let status = self.latest_status();\n\n let prev_pwd = self.prev_pwd();\n\n *self = Self::new()?;\n\n self.set_idx(idx);\n\n self.set_status(status);\n", "file_path": "src/env.rs", "rank": 63, "score": 34670.878677063825 }, { "content": " match self {\n\n Self::V0(env) => &env.pwd,\n\n }\n\n }\n\n\n\n pub fn var(&self, k: &str) -> Option<String> {\n\n match self {\n\n Self::V0(env) => self.special_var(k).or_else(|| {\n\n env.vars\n\n .get(std::ffi::OsStr::new(k))\n\n .map(|v| v.to_str().unwrap().to_string())\n\n }),\n\n }\n\n }\n\n\n\n pub fn set_var<\n\n K: Into<std::ffi::OsString>,\n\n V: Into<std::ffi::OsString>,\n\n >(\n\n &mut self,\n", "file_path": "src/env.rs", "rank": 64, "score": 34668.52944863409 }, { "content": "use crate::prelude::*;\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub enum Env {\n\n V0(V0),\n\n}\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct V0 {\n\n pwd: std::path::PathBuf,\n\n vars: std::collections::HashMap<std::ffi::OsString, std::ffi::OsString>,\n\n}\n\n\n\nconst __NBSH_IDX: &str = \"__NBSH_IDX\";\n\nconst __NBSH_LATEST_STATUS: &str = \"__NBSH_LATEST_STATUS\";\n\nconst __NBSH_PREV_PWD: &str = \"__NBSH_PREV_PWD\";\n\n\n\nimpl Env {\n\n pub fn new() -> Result<Self> {\n\n let pwd = std::env::current_dir()?;\n", "file_path": "src/env.rs", "rank": 65, "score": 34668.07006977346 }, { "content": " k: K,\n\n v: V,\n\n ) {\n\n match self {\n\n Self::V0(env) => {\n\n env.vars.insert(k.into(), v.into());\n\n }\n\n }\n\n }\n\n\n\n pub fn idx(&self) -> usize {\n\n self.var(__NBSH_IDX).unwrap().parse().unwrap()\n\n }\n\n\n\n pub fn set_idx(&mut self, idx: usize) {\n\n self.set_var(__NBSH_IDX, format!(\"{}\", idx));\n\n }\n\n\n\n pub fn latest_status(&self) -> std::process::ExitStatus {\n\n std::process::ExitStatus::from_raw(\n", "file_path": "src/env.rs", "rank": 66, "score": 34665.14811696575 }, { "content": " Ok(Self::V0(V0 {\n\n pwd: pwd.clone(),\n\n vars: std::env::vars_os()\n\n .chain(Self::defaults(pwd).into_iter())\n\n .collect(),\n\n }))\n\n }\n\n\n\n pub fn new_from_env() -> Result<Self> {\n\n let pwd = std::env::current_dir()?;\n\n Ok(Self::V0(V0 {\n\n pwd: pwd.clone(),\n\n vars: Self::defaults(pwd)\n\n .into_iter()\n\n .chain(std::env::vars_os())\n\n .collect(),\n\n }))\n\n }\n\n\n\n pub fn pwd(&self) -> &std::path::Path {\n", "file_path": "src/env.rs", "rank": 67, "score": 34659.13516866979 }, { "content": " self.set_prev_pwd(prev_pwd);\n\n Ok(())\n\n }\n\n\n\n fn special_var(&self, k: &str) -> Option<String> {\n\n Some(match k {\n\n \"$\" => crate::info::pid(),\n\n \"?\" => {\n\n let status = self.latest_status();\n\n status\n\n .signal()\n\n .map_or_else(\n\n || status.code().unwrap(),\n\n |signal| signal + 128,\n\n )\n\n .to_string()\n\n }\n\n _ => return None,\n\n })\n\n }\n", "file_path": "src/env.rs", "rank": 68, "score": 34657.74260918248 }, { "content": "\n\n fn defaults(\n\n pwd: std::path::PathBuf,\n\n ) -> [(std::ffi::OsString, std::ffi::OsString); 3] {\n\n [\n\n (__NBSH_IDX.into(), \"0\".into()),\n\n (__NBSH_LATEST_STATUS.into(), \"0\".into()),\n\n (__NBSH_PREV_PWD.into(), pwd.into()),\n\n ]\n\n }\n\n}\n", "file_path": "src/env.rs", "rank": 69, "score": 34652.65701269525 }, { "content": " self.var(__NBSH_LATEST_STATUS).unwrap().parse().unwrap(),\n\n )\n\n }\n\n\n\n pub fn set_status(&mut self, status: std::process::ExitStatus) {\n\n self.set_var(\n\n __NBSH_LATEST_STATUS,\n\n format!(\n\n \"{}\",\n\n (status.code().unwrap_or(0) << 8)\n\n | status.signal().unwrap_or(0)\n\n ),\n\n );\n\n }\n\n\n\n pub fn prev_pwd(&self) -> std::path::PathBuf {\n\n std::path::PathBuf::from(self.var(__NBSH_PREV_PWD).unwrap())\n\n }\n\n\n\n pub fn set_prev_pwd(&mut self, prev_pwd: std::path::PathBuf) {\n", "file_path": "src/env.rs", "rank": 70, "score": 34652.162659363064 }, { "content": "use crate::prelude::*;\n\n\n", "file_path": "src/info.rs", "rank": 71, "score": 34628.78883879625 }, { "content": "enum Frame {\n\n If(bool, bool),\n\n While(bool, usize),\n\n For(bool, usize, Vec<String>),\n\n}\n\n\n\npub async fn main(\n\n commands: String,\n\n shell_write: &mut Option<tokio::fs::File>,\n\n) -> Result<i32> {\n\n let mut env = Env::new_from_env()?;\n\n let config = crate::config::Config::load()?;\n\n run_commands(commands, &mut env, &config, shell_write).await?;\n\n let status = env.latest_status();\n\n write_event(shell_write, Event::Exit(env)).await?;\n\n\n\n if let Some(signal) = status.signal() {\n\n nix::sys::signal::raise(signal.try_into().unwrap())?;\n\n }\n\n Ok(status.code().unwrap())\n", "file_path": "src/runner/mod.rs", "rank": 72, "score": 31446.92677015463 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum Scene {\n\n Readline,\n\n Fullscreen,\n\n}\n\n\n\npub enum Action {\n\n Refresh,\n\n HardRefresh,\n\n Resize(u16, u16),\n\n Quit,\n\n}\n\n\n\npub struct Shell {\n\n readline: readline::Readline,\n\n history: history::History,\n\n old_history: old_history::History,\n\n env: Env,\n\n git: Option<inputs::GitInfo>,\n\n focus: Focus,\n\n scene: Scene,\n", "file_path": "src/shell/mod.rs", "rank": 73, "score": 31446.92677015463 }, { "content": " self.real_bell_pending = false;\n\n }\n\n if focused {\n\n self.bell = false;\n\n }\n\n should\n\n }\n\n\n\n pub fn binary(&self) -> bool {\n\n self.vt.screen().errors() > 5\n\n }\n\n\n\n pub fn output_lines(&self, focused: bool, running: bool) -> usize {\n\n if self.binary() {\n\n return 1;\n\n }\n\n\n\n let screen = self.vt.screen();\n\n let mut last_row = 0;\n\n for (idx, row) in screen.rows(0, screen.size().1).enumerate() {\n", "file_path": "src/shell/history/pty.rs", "rank": 74, "score": 31387.10637639309 }, { "content": "impl Vt {\n\n pub fn new(size: (u16, u16)) -> Self {\n\n Self {\n\n vt: vt100::Parser::new(size.0, size.1, 0),\n\n bell_state: 0,\n\n bell: false,\n\n real_bell_pending: false,\n\n }\n\n }\n\n\n\n pub fn process(&mut self, bytes: &[u8]) {\n\n self.vt.process(bytes);\n\n let screen = self.vt.screen();\n\n\n\n let new_bell_state = screen.audible_bell_count();\n\n if new_bell_state != self.bell_state {\n\n self.bell = true;\n\n self.real_bell_pending = true;\n\n self.bell_state = new_bell_state;\n\n }\n", "file_path": "src/shell/history/pty.rs", "rank": 75, "score": 31385.710672923047 }, { "content": " },\n\n Res::Request(Request::Input(bytes)) => {\n\n pty_w.write(&bytes).await.unwrap();\n\n }\n\n Res::Request(Request::Resize(row, col)) => {\n\n pty_w.resize(pty_process::Size::new(row, col)).unwrap();\n\n vt.lock().unwrap().set_size((row, col));\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct Vt {\n\n vt: vt100::Parser,\n\n bell_state: usize,\n\n bell: bool,\n\n real_bell_pending: bool,\n\n}\n\n\n", "file_path": "src/shell/history/pty.rs", "rank": 76, "score": 31380.777280980205 }, { "content": " }\n\n\n\n pub fn screen(&self) -> &vt100::Screen {\n\n self.vt.screen()\n\n }\n\n\n\n pub fn set_size(&mut self, size: (u16, u16)) {\n\n self.vt.set_size(size.0, size.1);\n\n }\n\n\n\n pub fn is_bell(&self) -> bool {\n\n self.bell\n\n }\n\n\n\n pub fn bell(&mut self, focused: bool) -> bool {\n\n let mut should = false;\n\n if self.real_bell_pending {\n\n if self.bell {\n\n should = true;\n\n }\n", "file_path": "src/shell/history/pty.rs", "rank": 77, "score": 31380.541878835567 }, { "content": " f(&mut *vt)\n\n }\n\n\n\n pub fn lock_vt(&self) -> std::sync::MutexGuard<Vt> {\n\n self.vt.lock().unwrap()\n\n }\n\n\n\n pub fn fullscreen(&self) -> bool {\n\n self.with_vt(|vt| vt.screen().alternate_screen())\n\n }\n\n\n\n pub fn input(&self, bytes: Vec<u8>) {\n\n #[allow(clippy::let_underscore_drop)]\n\n let _ = self.request_w.send(Request::Input(bytes));\n\n }\n\n\n\n pub fn resize(&self, size: (u16, u16)) {\n\n #[allow(clippy::let_underscore_drop)]\n\n let _ = self.request_w.send(Request::Resize(size.0, size.1));\n\n }\n", "file_path": "src/shell/history/pty.rs", "rank": 78, "score": 31378.55086808254 }, { "content": "\n\n let vt = std::sync::Arc::new(std::sync::Mutex::new(Vt::new(size)));\n\n\n\n tokio::spawn(Self::task(\n\n pty,\n\n std::sync::Arc::clone(&vt),\n\n request_r,\n\n event_w,\n\n ));\n\n\n\n Ok((Self { vt, request_w }, pts))\n\n }\n\n\n\n pub fn with_vt<T>(&self, f: impl FnOnce(&Vt) -> T) -> T {\n\n let vt = self.vt.lock().unwrap();\n\n f(&*vt)\n\n }\n\n\n\n pub fn with_vt_mut<T>(&self, f: impl FnOnce(&mut Vt) -> T) -> T {\n\n let mut vt = self.vt.lock().unwrap();\n", "file_path": "src/shell/history/pty.rs", "rank": 79, "score": 31378.16954278057 }, { "content": " if !row.is_empty() {\n\n last_row = idx + 1;\n\n }\n\n }\n\n if focused && running {\n\n last_row = std::cmp::max(\n\n last_row,\n\n usize::from(screen.cursor_position().0) + 1,\n\n );\n\n }\n\n last_row\n\n }\n\n}\n", "file_path": "src/shell/history/pty.rs", "rank": 80, "score": 31367.983162648183 }, { "content": "\n\n async fn task(\n\n pty: pty_process::Pty,\n\n vt: std::sync::Arc<std::sync::Mutex<Vt>>,\n\n request_r: tokio::sync::mpsc::UnboundedReceiver<Request>,\n\n event_w: crate::shell::event::Writer,\n\n ) {\n\n enum Res {\n\n Read(Result<bytes::Bytes, std::io::Error>),\n\n Request(Request),\n\n }\n\n\n\n let (pty_r, mut pty_w) = pty.into_split();\n\n let mut stream: futures_util::stream::SelectAll<_> = [\n\n tokio_util::io::ReaderStream::new(pty_r)\n\n .map(Res::Read)\n\n .boxed(),\n\n tokio_stream::wrappers::UnboundedReceiverStream::new(request_r)\n\n .map(Res::Request)\n\n .boxed(),\n", "file_path": "src/shell/history/pty.rs", "rank": 81, "score": 31365.96924832588 }, { "content": " ]\n\n .into_iter()\n\n .collect();\n\n while let Some(res) = stream.next().await {\n\n match res {\n\n Res::Read(res) => match res {\n\n Ok(bytes) => {\n\n vt.lock().unwrap().process(&bytes);\n\n event_w.send(Event::PtyOutput);\n\n }\n\n Err(e) => {\n\n // this means that there are no longer any open pts\n\n // fds. we could alternately signal this through an\n\n // explicit channel at ChildExit time, but this seems\n\n // reliable enough.\n\n if e.raw_os_error() == Some(libc::EIO) {\n\n return;\n\n }\n\n panic!(\"pty read failed: {:?}\", e);\n\n }\n", "file_path": "src/shell/history/pty.rs", "rank": 82, "score": 31363.85786939356 }, { "content": "use crate::shell::prelude::*;\n\n\n\n#[derive(Debug)]\n", "file_path": "src/shell/history/pty.rs", "rank": 83, "score": 31354.776257825408 }, { "content": "\n\n pub fn render(\n\n &self,\n\n out: &mut impl textmode::Textmode,\n\n repl_lines: usize,\n\n focus: Option<usize>,\n\n scrolling: bool,\n\n offset: time::UtcOffset,\n\n ) {\n\n let mut cursor = None;\n\n for (idx, used_lines, mut vt) in\n\n self.visible(repl_lines, focus, scrolling).rev()\n\n {\n\n let focused = focus.map_or(false, |focus| idx == focus);\n\n out.move_to(\n\n (usize::from(self.size.0) - used_lines).try_into().unwrap(),\n\n 0,\n\n );\n\n self.entries[idx].render(\n\n out,\n", "file_path": "src/shell/history/mod.rs", "rank": 99, "score": 45.2793595725506 } ]
Rust
code_analysis/rust_parser/src/generics.rs
LucaCappelletti94/EnsmallenGraph
572532b6d3f4352bf58f9ccca955376acd95fd89
use super::*; #[derive(Debug, Clone, PartialEq)] pub enum GenericValue{ Type(Type), Lifetime(Lifetime), TypeAssignement(Type, Type), TypeInheritance(Type, Type), } impl Parse for GenericValue { fn parse(mut data: &[u8]) -> (&[u8], Self) { if data.starts_with(b"'") { let res = GenericValue::Lifetime(parse!(data, Lifetime)); return (data, res); } let start = parse!(data, Type); if data.starts_with(b":") { data = &data[1..]; let end = parse!(data, Type); return (data, GenericValue::TypeInheritance(start, end)); } if data.starts_with(b"=") { data = &data[1..]; let end = parse!(data, Type); return (data, GenericValue::TypeAssignement(start, end)); } (data, GenericValue::Type(start)) } } impl CmpWithoutModifiers for GenericValue { fn cmp_without_modifiers(&self, other: &GenericValue) -> bool { match (self, other) { (GenericValue::Type(t1), GenericValue::Type(t2)) => t1.cmp_without_modifiers(t2), (GenericValue::TypeAssignement(t1, t2), GenericValue::TypeAssignement(o1, o2)) => { t1.cmp_without_modifiers(o1) && t2.cmp_without_modifiers(o2) } _ => false, } } } impl PartialEq<&str> for GenericValue { fn eq(&self, other:&&str) -> bool { match self { GenericValue::Type(t) => t == other, GenericValue::TypeAssignement(t1, t2) => { match other.split_once("=") { Some((v1, v2)) => { t1 == v1 && t2 == v2 } None => false } } _ => false, } } } impl From<GenericValue> for String { fn from(x: GenericValue) -> String { match x { GenericValue::Lifetime(lt) => { format!("'{}", lt.0) } GenericValue::Type(t) => { String::from(t) } GenericValue::TypeAssignement(t1, t2) => { format!("{} = {}", String::from(t1), String::from(t2)) } GenericValue::TypeInheritance(t1, t2) => { format!("{} : {}", String::from(t1), String::from(t2)) } } } } #[derive(Debug, Clone, PartialEq)] pub struct Generics(pub Vec<GenericValue>); impl std::ops::Index<usize> for Generics { type Output = GenericValue; fn index(&self, index: usize) -> &Self::Output { &self.0[index] } } impl CanParse for Generics { fn can_parse(mut data: &[u8]) -> bool { data = skip_whitespace(data); data.starts_with(b"<") } } impl Parse for Generics { fn parse(mut data: &[u8]) -> (&[u8], Self) { let mut generics = Vec::new(); let (remainder, mut matching) = get_next_matching(data, b'<', b'>'); data = remainder; while !matching.is_empty() { generics.push(parse!(matching, GenericValue)); if matching.starts_with(b",") { matching = &matching[1..]; } } (data, Generics(generics)) } } impl Default for Generics { fn default() -> Self { Generics(Vec::new()) } } impl From<Generics> for String { fn from(x: Generics) -> String { if x.0.is_empty(){ return String::new(); } let mut result = "<".to_string(); for gen_val in x.0 { result.push_str(&String::from(gen_val)); result.push_str(", "); } result = result.trim_end_matches(&", ").to_string(); result.push('>'); result } } impl CmpWithoutModifiers for Generics { fn cmp_without_modifiers(&self, other:&Generics) -> bool { if self.0.len() != other.0.len() { return false; } for i in 0..self.0.len(){ if !self.0[i].cmp_without_modifiers(&other.0[i]){ return false; } } true } }
use super::*; #[derive(Debug, Clone, PartialEq)] pub enum GenericValue{ Type(Type), Lifetime(Lifetime), TypeAssignement(Type, Type), TypeInheritance(Type, Type), } impl Parse for GenericValue { fn parse(mut data: &[u8]) -> (&[u8], Self) { if data.starts_with(b"'") { let res = GenericValue::Lifetime(parse!(data, Lifetime)); return (data, res); } let start = parse!(data, Type); if data.starts_with(b":") { data = &data[1..]; let end = parse!(data, Type); return (data, GenericValue::TypeInheritance(start, end)); } if data.starts_with(b"=") { data = &data[1..]; let end = parse!(data, Type); return (data, GenericValue::TypeAssignement(start, end)); } (data, GenericValue::Type(start)) } } impl CmpWithoutModifiers for GenericValue { fn cmp_without_modifiers(&self, other: &GenericValue) -> bool { match (self, other) { (GenericValue::Type(t1), GenericValue::Type(t2)) => t1.cmp_without_modifiers(t2), (GenericValue::TypeAssignement(t1, t2), GenericValue::TypeAssignement(o1, o2)) => { t1.cmp_without_modifiers(o1) && t2.cmp_without_modifiers(o2) } _ => false, } } } impl PartialEq<&str> for GenericValue { fn eq(&self, other:&&str) -> bool { match self { GenericValue::Type(t) => t == other, GenericValue::TypeAssignement(t1, t2) => { match other.split_once("=") { Some((v1, v2)) => { t1 == v1 && t2 == v2 } None => false } } _ => false, } } } impl From<GenericValue> for String {
} #[derive(Debug, Clone, PartialEq)] pub struct Generics(pub Vec<GenericValue>); impl std::ops::Index<usize> for Generics { type Output = GenericValue; fn index(&self, index: usize) -> &Self::Output { &self.0[index] } } impl CanParse for Generics { fn can_parse(mut data: &[u8]) -> bool { data = skip_whitespace(data); data.starts_with(b"<") } } impl Parse for Generics { fn parse(mut data: &[u8]) -> (&[u8], Self) { let mut generics = Vec::new(); let (remainder, mut matching) = get_next_matching(data, b'<', b'>'); data = remainder; while !matching.is_empty() { generics.push(parse!(matching, GenericValue)); if matching.starts_with(b",") { matching = &matching[1..]; } } (data, Generics(generics)) } } impl Default for Generics { fn default() -> Self { Generics(Vec::new()) } } impl From<Generics> for String { fn from(x: Generics) -> String { if x.0.is_empty(){ return String::new(); } let mut result = "<".to_string(); for gen_val in x.0 { result.push_str(&String::from(gen_val)); result.push_str(", "); } result = result.trim_end_matches(&", ").to_string(); result.push('>'); result } } impl CmpWithoutModifiers for Generics { fn cmp_without_modifiers(&self, other:&Generics) -> bool { if self.0.len() != other.0.len() { return false; } for i in 0..self.0.len(){ if !self.0[i].cmp_without_modifiers(&other.0[i]){ return false; } } true } }
fn from(x: GenericValue) -> String { match x { GenericValue::Lifetime(lt) => { format!("'{}", lt.0) } GenericValue::Type(t) => { String::from(t) } GenericValue::TypeAssignement(t1, t2) => { format!("{} = {}", String::from(t1), String::from(t2)) } GenericValue::TypeInheritance(t1, t2) => { format!("{} : {}", String::from(t1), String::from(t2)) } } }
function_block-full_function
[]
Rust
crates/shell/build.rs
BSFishy/carton
0edcde473e381d526eecb6777d20349ff751b7d3
#![allow(clippy::if_same_then_else)] use std::env; use std::path::PathBuf; macro_rules! cargo { ($value:expr) => { println!("cargo:{}", $value) } } macro_rules! warning { ($message:expr) => { cargo!(format!("warning={}", $message)); }; ($message:expr $(, $extra:expr)*) => { warning!(format!($message $(, $extra)*)); } } macro_rules! config { ($key:expr) => { cargo!(format!("rustc-cfg={}", $key)); }; ($key:expr => $value:expr) => { config!(format!("{}=\"{}\"", $key, $value)); } } #[derive(Debug, Eq, PartialEq, Copy, Clone)] enum Platform { Windows, Mac, X11, Wayland, OpenGL, Vulkan, } fn main() { let mut platforms: Vec<Platform> = Vec::new(); add_platforms(&mut platforms); validate_platforms(&mut platforms); let platform = finalize_platform(platforms); link_with_platform(&platform); config!("platform" => match platform { Platform::Windows => "windows", Platform::Mac => "macos", Platform::X11 => "x11", Platform::Wayland => "wayland", Platform::OpenGL => "opengl", Platform::Vulkan => "vulkan", }); println!("cargo:rerun-if-changed=wrapper.h"); let mut bindings = bindgen::Builder::default() .header("wrapper.h") .clang_arg(format!("-D{}", get_clang_definition(&platform))) .use_core() .default_enum_style(bindgen::EnumVariation::Rust { non_exhaustive: true }) .parse_callbacks(Box::new(bindgen::CargoCallbacks)); if platform == Platform::Wayland { bindings = bindings .blacklist_item("FP_NAN") .blacklist_item("FP_INFINITE") .blacklist_item("FP_ZERO") .blacklist_item("FP_SUBNORMAL") .blacklist_item("FP_NORMAL"); } let bindings = bindings .generate() .expect("Unable to generate bindings"); let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); } fn add_platforms(platforms: &mut Vec<Platform>) { if cfg!(feature = "windows") { platforms.push(Platform::Windows) } if cfg!(feature = "macos") { platforms.push(Platform::Mac) } if cfg!(feature = "x11") { platforms.push(Platform::X11) } if cfg!(feature = "wayland") { platforms.push(Platform::Wayland) } if cfg!(feature = "opengl") { platforms.push(Platform::OpenGL) } if cfg!(feature = "vulkan") { platforms.push(Platform::Vulkan) } } fn validate_platforms(platforms: &mut Vec<Platform>) { if platforms.contains(&Platform::X11) && platforms.contains(&Platform::Wayland) { panic!("X11 and Wayland are mutually exclusive") } } fn finalize_platform(platforms: Vec<Platform>) -> Platform { let mut platform: Option<Platform> = None; if platforms.contains(&Platform::Vulkan) { platform = Some(Platform::Vulkan) } if platforms.contains(&Platform::OpenGL) { platform = Some(Platform::OpenGL); } if platform.is_none() { if cfg!(target_os = "windows") { if platforms.contains(&Platform::Windows) { platform = Some(Platform::Windows) } } else if cfg!(target_os = "macos") { if platforms.contains(&Platform::Mac) { platform = Some(Platform::Mac) } } else if cfg!(target_os = "linux") { if platforms.contains(&Platform::X11) { platform = Some(Platform::X11) } if platforms.contains(&Platform::Wayland) { platform = Some(Platform::Wayland) } } } if platform.is_none() { if cfg!(target_os = "windows") { platform = Some(Platform::Windows) } else if cfg!(target_os = "macos") { platform = Some(Platform::Mac) } else if cfg!(target_os = "linux") { platform = Some(Platform::X11) } if let Some(platform) = platform { warning!("No suitable platform specified. Defaulting to {:?}.", platform) } } platform.expect("Could not find a suitable platform") } fn link_with_platform(platform: &Platform) { match platform { Platform::X11 => { pkg_config::Config::new() .cargo_metadata(true) .atleast_version("1.14") .probe("xcb") .expect("Unable to find XCB"); } Platform::Wayland => { pkg_config::Config::new() .cargo_metadata(true) .atleast_version("1.18") .probe("wayland-client") .expect("Unable to find Wayland Client library"); } Platform::OpenGL => { if cfg!(target_os = "linux") { pkg_config::Config::new() .cargo_metadata(true) .atleast_version("4.5") .probe("opengl") .expect("Unable to find OpenGL"); } else { unimplemented!() } } _ => unimplemented!(), } } fn get_clang_definition(platform: &Platform) -> &'static str { match platform { Platform::Windows => "USE_WINDOWS", Platform::Mac => "USE_MACOS", Platform::X11 => "USE_XCB", Platform::Wayland => "USE_WAYLAND", Platform::OpenGL => "USE_OPENGL", Platform::Vulkan => "USE_VULKAN", } }
#![allow(clippy::if_same_then_else)] use std::env; use std::path::PathBuf; macro_rules! cargo { ($value:expr) => { println!("cargo:{}", $value) } } macro_rules! warning { ($message:expr) => { cargo!(format!("warning={}", $message)); }; ($message:expr $(, $extra:expr)*) => { warning!(format!($message $(, $extra)*)); } } macro_rules! config { ($key:expr) => { cargo!(format!("rustc-cfg={}", $key)); }; ($key:expr => $value:expr) => { config!(format!("{}=\"{}\"", $key, $value)); } } #[derive(Debug, Eq, PartialEq, Copy, Clone)] enum Platform { Windows, Mac, X11, Wayland, OpenGL, Vulkan, } fn main() { let mut platforms: Vec<Platform> = Vec::new(); add_platforms(&mut platforms); validate_platforms(&mut platforms); let platform = finalize_platform(platforms); link_with_platform(&platform); config!("platform" => match platform { Platform::Windows => "windows", Platform::Mac => "macos", Platform::X11 => "x11", Platform::Wayland => "wayland", Platform::OpenGL => "opengl", Platform::Vulkan => "vulkan", }); println!("cargo:rerun-if-changed=wrapper.h"); let mut bindings = bindgen::Builder::default() .header("wrapper.h") .clang_arg(format!("-D{}", get_clang_definition(&platform))) .use_core() .default_enum_style(bindgen::EnumVariation::Rust { non_exhaustive: true }) .parse_callbacks(Box::new(bindgen::CargoCallbacks));
let bindings = bindings .generate() .expect("Unable to generate bindings"); let out_path = PathBuf::from(env::var("OUT_DIR").unwrap()); bindings .write_to_file(out_path.join("bindings.rs")) .expect("Couldn't write bindings!"); } fn add_platforms(platforms: &mut Vec<Platform>) { if cfg!(feature = "windows") { platforms.push(Platform::Windows) } if cfg!(feature = "macos") { platforms.push(Platform::Mac) } if cfg!(feature = "x11") { platforms.push(Platform::X11) } if cfg!(feature = "wayland") { platforms.push(Platform::Wayland) } if cfg!(feature = "opengl") { platforms.push(Platform::OpenGL) } if cfg!(feature = "vulkan") { platforms.push(Platform::Vulkan) } } fn validate_platforms(platforms: &mut Vec<Platform>) { if platforms.contains(&Platform::X11) && platforms.contains(&Platform::Wayland) { panic!("X11 and Wayland are mutually exclusive") } } fn finalize_platform(platforms: Vec<Platform>) -> Platform { let mut platform: Option<Platform> = None; if platforms.contains(&Platform::Vulkan) { platform = Some(Platform::Vulkan) } if platforms.contains(&Platform::OpenGL) { platform = Some(Platform::OpenGL); } if platform.is_none() { if cfg!(target_os = "windows") { if platforms.contains(&Platform::Windows) { platform = Some(Platform::Windows) } } else if cfg!(target_os = "macos") { if platforms.contains(&Platform::Mac) { platform = Some(Platform::Mac) } } else if cfg!(target_os = "linux") { if platforms.contains(&Platform::X11) { platform = Some(Platform::X11) } if platforms.contains(&Platform::Wayland) { platform = Some(Platform::Wayland) } } } if platform.is_none() { if cfg!(target_os = "windows") { platform = Some(Platform::Windows) } else if cfg!(target_os = "macos") { platform = Some(Platform::Mac) } else if cfg!(target_os = "linux") { platform = Some(Platform::X11) } if let Some(platform) = platform { warning!("No suitable platform specified. Defaulting to {:?}.", platform) } } platform.expect("Could not find a suitable platform") } fn link_with_platform(platform: &Platform) { match platform { Platform::X11 => { pkg_config::Config::new() .cargo_metadata(true) .atleast_version("1.14") .probe("xcb") .expect("Unable to find XCB"); } Platform::Wayland => { pkg_config::Config::new() .cargo_metadata(true) .atleast_version("1.18") .probe("wayland-client") .expect("Unable to find Wayland Client library"); } Platform::OpenGL => { if cfg!(target_os = "linux") { pkg_config::Config::new() .cargo_metadata(true) .atleast_version("4.5") .probe("opengl") .expect("Unable to find OpenGL"); } else { unimplemented!() } } _ => unimplemented!(), } } fn get_clang_definition(platform: &Platform) -> &'static str { match platform { Platform::Windows => "USE_WINDOWS", Platform::Mac => "USE_MACOS", Platform::X11 => "USE_XCB", Platform::Wayland => "USE_WAYLAND", Platform::OpenGL => "USE_OPENGL", Platform::Vulkan => "USE_VULKAN", } }
if platform == Platform::Wayland { bindings = bindings .blacklist_item("FP_NAN") .blacklist_item("FP_INFINITE") .blacklist_item("FP_ZERO") .blacklist_item("FP_SUBNORMAL") .blacklist_item("FP_NORMAL"); }
if_condition
[]
Rust
src/data.rs
Tenebryo/file-system-stats
9a8d02e8782bb0a3be4d7bfbb2b81ba642986050
/* Copyright 2021 Sam Blazes Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::{path::PathBuf, sync::{Arc, atomic::AtomicBool}}; use std::time::Duration; use std::sync::atomic::{AtomicUsize, Ordering}; use tokio::fs::*; use tokio::sync::Mutex; use async_channel::*; use tokio::sync::broadcast::channel; #[derive(Debug, Clone)] pub struct Entry { pub parent : usize, pub size : u64, pub path : PathBuf, pub children : Vec<usize>, pub num_files : u64, pub is_file : bool, pub expanded : bool, } #[derive(Debug)] pub struct ProgressState { pub count : AtomicUsize, pub done : AtomicUsize, pub complete : AtomicBool, } impl ProgressState { pub fn new() -> Self { Self { count : AtomicUsize::new(0), done : AtomicUsize::new(0), complete : AtomicBool::new(false), } } } #[derive(Debug, Clone)] pub struct DirectoryTreeData { pub entries : Arc<Mutex<Vec<Entry>>>, pub file_types : Arc<Mutex<std::collections::HashMap<Option<String>, u64>>>, } impl DirectoryTreeData { pub fn new() -> DirectoryTreeData { DirectoryTreeData { entries : empty_entry_list(), file_types : Arc::new(Mutex::new(std::collections::HashMap::new())), } } pub async fn from_root_path(path : PathBuf) -> DirectoryTreeData { DirectoryTreeData { entries : directory_entry_list(path).await, file_types : Arc::new(Mutex::new(std::collections::HashMap::new())), } } pub async fn recreate_from_root_path(&self, path : PathBuf) { recreate_directory_entry_list(path, self.entries.clone()).await; self.file_types.lock().await.clear(); } } const NTASKS : usize = 8; pub fn empty_entry_list() -> Arc<Mutex<Vec<Entry>>> { Arc::new(Mutex::new(vec![])) } pub async fn directory_entry_list(path : PathBuf) -> Arc<Mutex<Vec<Entry>>> { let meta = metadata(path.clone()).await.unwrap(); let is_file = meta.is_file(); Arc::new(Mutex::new(vec![Entry{ parent : usize::MAX, size : if is_file {meta.len()} else {0}, num_files : if is_file {1} else {0}, children : vec![], path, is_file, expanded : true, }])) } pub async fn recreate_directory_entry_list(path : PathBuf, entries : Arc<Mutex<Vec<Entry>>>) { let meta = metadata(path.clone()).await.unwrap(); let is_file = meta.is_file(); let mut lock = entries.lock().await; lock.clear(); lock.push(Entry{ parent : usize::MAX, size : if is_file {meta.len()} else {0}, num_files : if is_file {1} else {0}, children : vec![], path, is_file, expanded : true, }); } pub async fn scan_directory(root : usize, DirectoryTreeData{entries, file_types} : DirectoryTreeData, progress : Arc<ProgressState>) { let (tx, rx) : (Sender<Entry>, Receiver<Entry>)= unbounded(); let (done_tx, _) = channel(1); let waiting = Arc::new(AtomicUsize::new(0)); let mut handles = vec![]; for _ in 0..NTASKS { let rx = rx.clone(); let tx = tx.clone(); let mut done_rx = done_tx.subscribe(); let paths = entries.clone(); let progress = progress.clone(); let waiting = waiting.clone(); let file_types = file_types.clone(); let h = tokio::task::spawn(async move { done_rx.recv().await.unwrap(); loop { waiting.fetch_add(1, Ordering::SeqCst); tokio::select! { _ = done_rx.recv() => { break; } Ok(entry) = rx.recv() => { waiting.fetch_sub(1, Ordering::SeqCst); let i = { let mut locked = paths.lock().await; locked.push(entry.clone()); locked.len() - 1 }; if entry.is_file { let mut locked = paths.lock().await; let mut j = locked[i].parent; let n = locked.len(); let file_type = entry.path.extension().map(|ext| String::from(ext.to_string_lossy()).to_uppercase()); file_types.lock().await.entry(file_type) .and_modify(|v| { *v += entry.size; }) .or_insert(entry.size); locked[j].children.push(i); while j < n { locked[j].size += entry.size; locked[j].num_files += 1; j = locked[j].parent; } } else { { let mut locked = paths.lock().await; let j = locked[i].parent; locked[j].children.push(i); } if let Err(e) = process_directory(i, entry.path.clone(), &tx, progress.clone()).await { eprintln!("{:?}: {:?}", e, entry.path); } } progress.done.fetch_add(1, Ordering::SeqCst); } } } }); handles.push(h); } { let locked = entries.lock().await; let root_path = locked[root].path.clone(); if let Err(e) = process_directory(root, root_path, &tx, progress.clone()).await { eprintln!("{:?}", e); } done_tx.send(()).unwrap(); } let mut clock = tokio::time::interval(Duration::from_millis(10)); loop { clock.tick().await; if rx.is_empty() && waiting.load(Ordering::SeqCst) as usize == NTASKS { break; } } done_tx.send(()).unwrap(); for handle in &mut handles { handle.await.unwrap(); } progress.complete.store(true, Ordering::SeqCst); } async fn process_directory(i : usize, path : PathBuf, queue : &Sender<Entry>, progress : Arc<ProgressState>) -> std::io::Result<()> { let mut dir = read_dir(path).await?; loop { match dir.next_entry().await { Ok(Some(subdir)) => { let meta = subdir.metadata().await; if meta.is_err() { continue; } let meta = meta.unwrap(); let size = meta.len(); progress.count.fetch_add(1, Ordering::SeqCst); if meta.is_dir() { queue.send(Entry{ parent : i, size, num_files : 0, path : subdir.path().into(), children : vec![], is_file : false, expanded : false, }).await.unwrap(); } else if meta.is_file() { queue.send(Entry{ parent : i, size, num_files : 1, path : subdir.path().into(), children : vec![], is_file : true, expanded : false, }).await.unwrap(); } } Err(_) => {} _ => break } } Ok(()) } pub fn recalculate_data(entries : &mut [Entry]) { for e in entries.iter_mut() { if e.is_file { e.size = 0; e.num_files = 0; } e.children.clear(); } for i in 1..(entries.len()) { let p = entries[i].parent; entries[p].children.push(i); } fn r_help(entries : &mut [Entry], i : usize) -> (u64, u64) { let mut size = 0; let mut files = 0; for c in entries[i].children.clone() { let (s, f) = r_help(entries, c); size += s; files += f; } entries[i].size = size; entries[i].num_files = files; (size, files) } r_help(entries, 0); } pub fn delete_subtree(entries : &mut Vec<Entry>, subtree : usize) { let mut new_idx = entries.iter().map(|_| usize::MAX).collect::<Vec<_>>(); let mut next_i = 0; for i in 0..(entries.len()) { let parent = entries[i].parent; if parent == subtree || i == subtree { entries[i].parent = subtree; } else { new_idx[i] = next_i; next_i += 1; } } for (i, ni) in new_idx.iter().cloned().enumerate() { if i == ni || ni == usize::MAX { continue; } entries.swap(i, ni); } entries.truncate(next_i); recalculate_data(entries) } pub fn sort_subtree_by_size(i : usize, entries : &mut [Entry]) { for c in entries[i].children.clone() { sort_subtree_by_size(c, entries); } let mut j = i; while entries[j].parent < entries.len() { let mut children = entries[j].children.clone(); children.sort_by_cached_key(|&e| entries[e].size); entries[j].children = children; j = entries[j].parent; } } pub fn find_path_index(path : PathBuf, entries : &[Entry]) -> Option<usize> { let mut i = 0; 'outer: loop { for &c in entries[i].children.iter() { let p = &entries[c].path; if path == *p { return Some(c); } if path.starts_with(p) { i = c; continue 'outer; } } break } None } #[test] fn test_perf() { use std::time::Instant; let root = PathBuf::from("C:\\"); let progress = Arc::new(ProgressState::new()); let start = Instant::now(); tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .unwrap() .block_on(async { let entries = directory_entry_list(root).await; let file_types = Arc::new(Mutex::new(std::collections::HashMap::new())); scan_directory(0, DirectoryTreeData{entries, file_types}, progress).await }); let elapsed = start.elapsed(); eprintln!("Elapsed: {:?}", elapsed); }
/* Copyright 2021 Sam Blazes Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ use std::{path::PathBuf, sync::{Arc, atomic::AtomicBool}}; use std::time::Duration; use std::sync::atomic::{AtomicUsize, Ordering}; use tokio::fs::*; use tokio::sync::Mutex; use async_channel::*; use tokio::sync::broadcast::channel; #[derive(Debug, Clone)] pub struct Entry { pub parent : usize, pub size : u64, pub path : PathBuf, pub children : Vec<usize>, pub num_files : u64, pub is_file : bool, pub expanded : bool, } #[derive(Debug)] pub struct ProgressState { pub count : AtomicUsize, pub done : AtomicUsize, pub complete : AtomicBool, } impl ProgressState { pub fn new() -> Self { Self { count : AtomicUsize::new(0), done : AtomicUsize::new(0), complete : AtomicBool::new(false), } } } #[derive(Debug, Clone)] pub struct DirectoryTreeData { pub entries : Arc<Mutex<Vec<Entry>>>, pub file_types : Arc<Mutex<std::collections::HashMap<Option<String>, u64>>>, } impl DirectoryTreeData { pub fn new() -> DirectoryTreeData { DirectoryTreeData { entries : empty_entry_list(), file_types : Arc::new(Mutex::new(std::collections::HashMap::new())), } } pub async fn from_root_path(path : PathBuf) -> DirectoryTreeData { DirectoryTreeData { entries : directory_entry_list(path).await, file_types : Arc::new(Mutex::new(std::collections::HashMap::new())), } } pub async fn recreate_from_root_path(&self, path : PathBuf) { recreate_directory_entry_list(path, self.entries.clone()).await; self.file_types.lock().await.clear(); } } const NTASKS : usize = 8; pub fn empty_entry_list() -> Arc<Mutex<Vec<Entry>>> { Arc::new(Mutex::new(vec![])) } pub async fn directory_entry_list(path : PathBuf) -> Arc<Mutex<Vec<Entry>>> { let meta = metadata(path.clone()).await.unwrap(); let is_file = meta.is_file(); Arc::new(Mutex::new(vec![Entry{ parent : usize::MAX, size : if is_file {meta.len()} else {0}, num_files : if is_file {1} else {0}, children : vec![], path, is_file, expanded : true, }])) } pub async fn recreate_directory_entry_list(path : PathBuf, entries : Arc<Mutex<Vec<Entry>>>) { let meta = metadata(path.clone()).await.unwrap(); let is_file = meta.is_file(); let mut lock = entries.lock().await; lock.clear(); lock.push(Entry{ parent : usize::MAX, size : if is_file {meta.len()} else {0}, num_files : if is_file {1} else {0}, children : vec![], path, is_file, expanded : true, }); } pub async fn scan_directory(root : usize, DirectoryTreeData{entries, file_types} : DirectoryTreeData, progress : Arc<ProgressState>) { let (tx, rx) : (Sender<Entry>, Receiver<Entry>)= unbounded(); let (done_tx, _) = channel(1); let waiting = Arc::new(AtomicUsize::new(0)); let mut handles = vec![]; for _ in 0..NTASKS { let rx = rx.clone(); let tx = tx.clone(); let mut done_rx = done_tx.subscribe(); let paths = entries.clone(); let progress = progress.clone(); let waiting = waiting.clone(); let file_types = file_types.clone(); let h = tokio::task::spawn(async move { done_rx.recv().await.unwrap(); loop { waiting.fetch_add(1, Ordering::SeqCst); tokio::select! { _ = done_rx.recv() => { break; } Ok(entry) = rx.recv() => { waiting.fetch_sub(1, Ordering::SeqCst); let i = { let mut locked = paths.lock().await; locked.push(entry.clone()); locked.len() - 1 }; if entry.is_file { let mut locked = paths.lock().await; let mut j = locked[i].parent; let n = locked.len(); let file_type = entry.path.extension().map(|ext| String::from(ext.to_string_lossy()).to_uppercase()); file_types.lock().await.entry(file_type) .and_modify(|v| { *v += entry.size; }) .or_insert(entry.size); locked[j].children.push(i); while j < n { locked[j].size += entry.size; locked[j].num_files += 1; j = locked[j].parent; } } else { { let mut locked = paths.lock().await; let j = locked[i].parent; locked[j].children.push(i); } if let Err(e) = process_directory(i, entry.path.clone(), &tx, progress.clone()).await { eprintln!("{:?}: {:?}", e, entry.path); } } progress.done.fetch_add(1, Ordering::SeqCst); } } } }); handles.push(h); } { let locked = entries.lock().await; let root_path = locked[root].path.clone(); if let Err(e) = process_directory(root, root_path, &tx, progress.clone()).await { eprintln!("{:?}", e); } done_tx.send(()).unwrap(); } let mut clock = tokio::time::interval(Duration::from_millis(10)); loop { clock.tick().await; if rx.is_empty() && waiting.load(Ordering::SeqCst) as usize == NTASKS { break; } } done_tx.send(()).unwrap(); for handle in &mut handles { handle.await.unwrap(); } progress.complete.store(true, Ordering::SeqCst); } async fn process_directory(i : usize, path : PathBuf, queue : &Sender<Entry>, progress : Arc<ProgressState>) -> std::io::Result<()> { let mut dir = read_dir(path).await?; loop { match dir.next_entry().await { Ok(Some(subdir)) => { let meta = subdir.metadata().await; if meta.is_err() { continue; } let meta = meta.unwrap(); let size = meta.len(); progress.count.fetch_add(1, Ordering::SeqCst); if meta.is_dir() { queue.send(Entry{ parent : i, size, num_files : 0, path : subdir.path().into(), children : vec![], is_file : false, expanded : false, }).await.unwrap(); } else if meta.is_file() { queue.send(Entry{ parent : i, size, num_files : 1, path : subdir.path().into(), children : vec![], is_file : true, expanded : false, }).await.unwrap(); } } Err(_) => {} _ => break } } Ok(()) } pub fn recalculate_data(entries : &mut [Entry]) { for e in entries.iter_mut() { if e.is_file { e.size = 0; e.num_files = 0; } e.children.clear(); } for i in 1..(entries.len()) { let p = entries[i].parent; entries[p].children.push(i); } fn r_help(entries : &mut [Entry], i : usize) -> (u64, u64) { let mut size = 0; let mut files = 0; for c in entries[i].children.clone() { let (s, f) = r_help(entries, c); size += s; files += f; } entries[i].size = size; entries[i].num_files = files; (size, files) } r_help(entries, 0); }
pub fn sort_subtree_by_size(i : usize, entries : &mut [Entry]) { for c in entries[i].children.clone() { sort_subtree_by_size(c, entries); } let mut j = i; while entries[j].parent < entries.len() { let mut children = entries[j].children.clone(); children.sort_by_cached_key(|&e| entries[e].size); entries[j].children = children; j = entries[j].parent; } } pub fn find_path_index(path : PathBuf, entries : &[Entry]) -> Option<usize> { let mut i = 0; 'outer: loop { for &c in entries[i].children.iter() { let p = &entries[c].path; if path == *p { return Some(c); } if path.starts_with(p) { i = c; continue 'outer; } } break } None } #[test] fn test_perf() { use std::time::Instant; let root = PathBuf::from("C:\\"); let progress = Arc::new(ProgressState::new()); let start = Instant::now(); tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .unwrap() .block_on(async { let entries = directory_entry_list(root).await; let file_types = Arc::new(Mutex::new(std::collections::HashMap::new())); scan_directory(0, DirectoryTreeData{entries, file_types}, progress).await }); let elapsed = start.elapsed(); eprintln!("Elapsed: {:?}", elapsed); }
pub fn delete_subtree(entries : &mut Vec<Entry>, subtree : usize) { let mut new_idx = entries.iter().map(|_| usize::MAX).collect::<Vec<_>>(); let mut next_i = 0; for i in 0..(entries.len()) { let parent = entries[i].parent; if parent == subtree || i == subtree { entries[i].parent = subtree; } else { new_idx[i] = next_i; next_i += 1; } } for (i, ni) in new_idx.iter().cloned().enumerate() { if i == ni || ni == usize::MAX { continue; } entries.swap(i, ni); } entries.truncate(next_i); recalculate_data(entries) }
function_block-full_function
[ { "content": "/// Build ui elements\n\nfn display_file_tree(display_entries : &mut Vec<Entry>, ui : &Ui) {\n\n \n\n let cw = ui.window_content_region_width();\n\n\n\n #[derive(Debug, Clone, Copy, Default)]\n\n struct DrawParams {\n\n psize : u64,\n\n content_width : f32,\n\n }\n\n\n\n let size = display_entries[0].size;\n\n r_help(0, display_entries, ui, DrawParams{psize : size, content_width: cw});\n\n fn r_help(i : usize, entries : &mut [Entry], ui : &Ui, dp : DrawParams) {\n\n\n\n let name = ImString::from(\n\n format!(\"{:80}\",\n\n if i == 0 {\n\n entries[i].path.to_str().unwrap()\n\n } else {\n\n entries[i].path.components().last().unwrap().as_os_str().to_str().unwrap()\n", "file_path": "src/main.rs", "rank": 5, "score": 118118.95742589721 }, { "content": "struct ByteSize(u64);\n\n\n\nimpl std::fmt::Display for ByteSize {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n const SUFFIXES : [&str; 6] = [\"B\", \"KB\", \"MB\", \"GB\", \"TB\", \"PB\"];\n\n\n\n let mut size = self.0 as f32;\n\n let mut i = 0;\n\n for _ in SUFFIXES.iter() {\n\n if size > 1000.0 {\n\n size /= 1000.0;\n\n i += 1;\n\n }\n\n }\n\n\n\n write!(f, \"{:.1}{}\", size, SUFFIXES[i])\n\n }\n\n}", "file_path": "src/main.rs", "rank": 6, "score": 85798.22596421184 }, { "content": "pub fn init(title: &str) -> System {\n\n let title = match title.rfind('/') {\n\n Some(idx) => title.split_at(idx + 1).1,\n\n None => title,\n\n };\n\n let events_loop = EventsLoop::new();\n\n let builder = WindowBuilder::new()\n\n .with_title(title.to_owned())\n\n .with_inner_size(LogicalSize::new(1024f64, 768f64));\n\n\n\n let mut imgui = Context::create();\n\n imgui.set_ini_filename(None);\n\n\n\n let mut platform = WinitPlatform::init(&mut imgui);\n\n\n\n let hidpi_factor = platform.hidpi_factor();\n\n let font_size = (13.0 * hidpi_factor) as f32;\n\n imgui.fonts().add_font(&[\n\n FontSource::DefaultFontData {\n\n config: Some(FontConfig {\n", "file_path": "src/renderer.rs", "rank": 7, "score": 52064.22846741766 }, { "content": "#[derive(Debug)]\n\nstruct Task {\n\n name : String,\n\n id : u64,\n\n start : Instant,\n\n end : Option<Instant>,\n\n progress : Arc<ProgressState>,\n\n handle : JoinHandle<()>,\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 8, "score": 37258.674056947086 }, { "content": "struct AppState {\n\n async_runtime : tokio::runtime::Runtime,\n\n entry_list : Arc<tokio::sync::Mutex<Vec<Entry>>>,\n\n task_handles : Vec<tokio::task::JoinHandle<()>>,\n\n}\n\n\n\nmacro_rules! cloned_scope {\n\n ($(name:ident)*, $block:stmt) => {\n\n {\n\n $(let $name = $name.clone();)*\n\n\n\n $block\n\n }\n\n };\n\n}\n\n\n\nmacro_rules! build_layout {\n\n ($name:ident [$width:expr, $height:expr] {$($args:tt)*}) => {\n\n let mut $name = stretch::node::Stretch::new();\n\n build_layout!(($name) $($args)*);\n\n let $name = $name.compute_layout();\n\n };\n\n (($name:ident) $($rest:tt)*) => {\n\n\n\n build_layout!(($name) $($rest)*)\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 35897.32646114815 }, { "content": "fn main() {\n\n let mut _clipboard_ctx: ClipboardContext = ClipboardProvider::new().unwrap();\n\n\n\n let filesystem_data = DirectoryTreeData::new();\n\n\n\n let async_runtime = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n\n\n let system = renderer::init(\"File System Stats\");\n\n\n\n let root_path = Arc::new(std::sync::Mutex::new(ImString::new(String::new())));\n\n\n\n let choosing_path = Arc::new(AtomicBool::new(false));\n\n\n\n let tree_map : Arc<tokio::sync::Mutex<Option<TreeMap>>> = Arc::new(tokio::sync::Mutex::new(None));\n\n\n\n let mut tasks = vec![];\n\n\n", "file_path": "src/main.rs", "rank": 10, "score": 33261.0736669814 }, { "content": "// hack to use newer winit version\n\ntype EventsLoop = EventLoop<()>;\n\n\n\npub struct System {\n\n pub events_loop: EventsLoop,\n\n pub imgui: Context,\n\n pub platform: WinitPlatform,\n\n pub render_sys: RenderSystem,\n\n pub font_size: f32,\n\n}\n\n\n", "file_path": "src/renderer.rs", "rank": 12, "score": 24530.376500890045 }, { "content": "# File System Stats\n\n\n\nFile System Stats is a simple tool to diagnose where large amounts of storage\n\nare being used, inspired by WinDirStat. The goal of this project was to make\n\na faster version of WinDirStat with perhaps not all the features. The current\n\nversion scans 350k files on my 1TB SSD in under 45 seconds.\n\n\n\n## Installation\n\n\n\n * Install nightly Rust via [rustup.rs](https://rustup.rs/)\n\n * Build with the command `cargo +nightly build --release`\n\n * Executable will be in the `target/release/` directory\n\n\n\n## Implemented Features\n\n\n\n - [x] Directory walking and scanning\n\n - [x] Simple UI with Dear IMGUI\n\n - [x] Directory tree orderered by size of contents\n\n - [x] Total file size by file extension\n\n - [x] Directory scan progress\n\n - [ ] File size treemap (planned but not complete)\n", "file_path": "README.md", "rank": 13, "score": 11808.203638691952 }, { "content": "/*\n\n\n\n Copyright 2021 Sam Blazes\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n\n\n*/\n\n\n\nuse crate::data::*;\n\n\n", "file_path": "src/treemap.rs", "rank": 15, "score": 49.68434303312315 }, { "content": "/*\n\n\n\n Copyright 2021 Sam Blazes\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n\n\n\n*/\n\n\n\n#![feature(drain_filter)]\n\n#![allow(dead_code, unused_macros)]\n", "file_path": "src/main.rs", "rank": 16, "score": 46.82256907886414 }, { "content": " .framed(true)\n\n .leaf(entries[i].is_file)\n\n .opened(entries[i].expanded, Condition::Once)\n\n .build(ui, || {\n\n\n\n entry_display!(size, size as f32 / dp.psize as f32, progress_bar_start, file_size_location);\n\n\n\n if entries[i].expanded {\n\n for &c in entries[i].children.clone().iter() {\n\n // if entries[c].is_file {\n\n // ui.text(entries[c].path.file_name().unwrap().to_str().unwrap());\n\n // entry_display!(entries[c].size, entries[c].size as f32 / size as f32);\n\n // } else {\n\n r_help(c, entries, ui, DrawParams{psize : size, ..dp});\n\n // }\n\n }\n\n }\n\n expanded = true;\n\n });\n\n \n", "file_path": "src/main.rs", "rank": 27, "score": 21.481519730477714 }, { "content": " if !display_entries.is_empty() {\n\n\n\n display_file_tree(&mut display_entries, ui);\n\n\n\n let mut elock = async_runtime.block_on(filesystem_data.entries.lock());\n\n \n\n for e in display_entries.iter_mut() {\n\n if e.parent < elock.len() {\n\n elock[e.parent].expanded = e.expanded;\n\n }\n\n }\n\n }\n\n\n\n if ui.button(im_str!(\"Clear\"), [64.0, 16.0]) {\n\n let mut elock = async_runtime.block_on(filesystem_data.entries.lock());\n\n elock.clear();\n\n }\n\n });\n\n // this window shows the progress of tasks such as the \n\n Window::new(im_str!(\"File Type Statistics\"))\n", "file_path": "src/main.rs", "rank": 29, "score": 20.184011154213295 }, { "content": " Window::new(im_str!(\"Directory Tree\"))\n\n .size([w as f32 - 256.0, h as f32 - root_window_height - tree_map_height], Condition::Always)\n\n .position([0.0, root_window_height], Condition::Always)\n\n .collapsible(false)\n\n .resizable(false)\n\n .no_nav()\n\n .build(ui, || {\n\n\n\n let mut display_entries = vec![];\n\n {\n\n let elock = async_runtime.block_on(filesystem_data.entries.lock());\n\n\n\n if !elock.is_empty() {\n\n\n\n let mut i = 0;\n\n display_entries.push(elock[0].clone());\n\n display_entries[0].parent = 0;\n\n\n\n while i < display_entries.len() {\n\n if display_entries[i].expanded {\n", "file_path": "src/main.rs", "rank": 31, "score": 19.719576932707675 }, { "content": " self.nodes.clear();\n\n self.nodes.extend(std::iter::repeat(TreeMapNode::default()).take(nlen));\n\n\n\n self.nodes[0].max_x = region.0;\n\n self.nodes[0].max_y = region.1;\n\n\n\n progress.count.store(nlen, Ordering::SeqCst);\n\n\n\n r_help(0, entries, &mut self.nodes, &mut self.leaf, progress);\n\n fn r_help(i : usize, entries : &[Entry], nodes : &mut [TreeMapNode], leaf : &mut [bool], progress: &ProgressState) {\n\n let mut c = entries[i].children.clone();\n\n leaf[i] = entries[i].is_file;\n\n\n\n c.sort_by_key(|&j| entries[j].size);\n\n\n\n let cnode = nodes[i];\n\n\n\n let w = cnode.max_x - cnode.min_x;\n\n let h = cnode.max_y - cnode.min_y;\n\n\n", "file_path": "src/treemap.rs", "rank": 32, "score": 19.631380563918 }, { "content": "use std::sync::atomic::Ordering;\n\n\n\n#[derive(Debug, Clone, Copy, Default)]\n\npub struct TreeMapNode {\n\n pub min_x : f32,\n\n pub min_y : f32,\n\n pub max_x : f32,\n\n pub max_y : f32,\n\n}\n\n\n\npub struct TreeMap {\n\n pub nodes : Vec<TreeMapNode>,\n\n pub leaf : Vec<bool>,\n\n}\n\n\n\nimpl TreeMap {\n\n pub fn new() -> TreeMap {\n\n TreeMap {\n\n nodes : vec![],\n\n leaf : vec![],\n", "file_path": "src/treemap.rs", "rank": 33, "score": 15.902297176336706 }, { "content": "\n\n let n = display_entries[i].children.len();\n\n\n\n display_entries[i].children.sort_by_cached_key(|&c| -(elock[c].size as i64));\n\n\n\n for j in 0..n {\n\n let k = display_entries[i].children[j];\n\n display_entries.push(elock[k].clone());\n\n display_entries.last_mut().unwrap().parent = k;\n\n display_entries[i].children[j] = display_entries.len() - 1;\n\n }\n\n } else {\n\n\n\n }\n\n\n\n i += 1;\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 34, "score": 15.728930268824527 }, { "content": "\n\n ui.same_line(ui.window_content_region_width() - 2.0*72.0);\n\n\n\n if ui.button(im_str!(\"Browse\"), [64.0, 24.0]) && !choosing_path.load(Ordering::SeqCst) {\n\n choosing_path.store(true, Ordering::SeqCst);\n\n\n\n let choosing_path = choosing_path.clone();\n\n let root_path = root_path.clone();\n\n async_runtime.spawn_blocking(move || {\n\n\n\n match nfd::open_pick_folder(None) {\n\n Ok(nfd::Response::Okay(file_path)) => {\n\n println!(\"File path = {:?}\", file_path);\n\n *root_path.lock().unwrap() = file_path.into();\n\n },\n\n Ok(nfd::Response::Cancel) => println!(\"User canceled\"),\n\n _ => {}\n\n }\n\n\n\n choosing_path.store(false, Ordering::SeqCst);\n", "file_path": "src/main.rs", "rank": 35, "score": 15.549463160112612 }, { "content": " .no_nav()\n\n .build(ui, || {\n\n let tree_map_built = async_runtime.block_on(tree_map.lock()).is_none();\n\n\n\n let w_min = ui.window_content_region_min();\n\n let w_max = ui.window_content_region_max();\n\n\n\n if tree_map_built {\n\n if ui.button(im_str!(\"Create Treemap\"), [128.0, 24.0]) {\n\n let tree_map = tree_map.clone();\n\n let entries = filesystem_data.entries.clone();\n\n let start = Instant::now();\n\n let id = gen_id.next().unwrap();\n\n let progress = Arc::new(ProgressState::new());\n\n let pg = progress.clone();\n\n\n\n let jh = async_runtime.spawn(async move {\n\n let mut treemap = TreeMap::new();\n\n\n\n treemap.rebuild_tree_map((w_max[0] - w_min[0], w_max[1] - w_min[1]), &*entries.lock().await, pg.as_ref());\n", "file_path": "src/main.rs", "rank": 36, "score": 15.517974017424194 }, { "content": " }\n\n ));\n\n let mut expanded = false;\n\n\n\n let progress_bar_start = dp.content_width - 96.0;\n\n let file_size_location = progress_bar_start - 96.0;\n\n\n\n macro_rules! entry_display {\n\n ($size:expr, $prog:expr, $pbs:expr, $fsl:expr) => {\n\n ui.same_line($fsl);\n\n ui.text(format!(\"{: >9}\", format!(\"{}\", ByteSize($size))));\n\n ui.same_line($pbs);\n\n ProgressBar::new($prog).build(ui);\n\n }\n\n }\n\n\n\n let size = entries[i].size;\n\n\n\n TreeNode::new(&name)\n\n .allow_item_overlap(true)\n", "file_path": "src/main.rs", "rank": 38, "score": 14.630175336754775 }, { "content": " progress.done.fetch_add(1, Ordering::SeqCst);\n\n\n\n let size = entries[i].size as f32;\n\n\n\n if w > h {\n\n let mut x = cnode.min_x;\n\n for j in c {\n\n let r = w * entries[j].size as f32 / size;\n\n nodes[j] = TreeMapNode{min_x : x, max_x : x + r, ..cnode};\n\n x += r;\n\n r_help(j, entries, nodes, leaf, progress);\n\n }\n\n } else {\n\n let mut y = cnode.min_y;\n\n for j in c {\n\n let r = h * entries[j].size as f32 / size;\n\n nodes[j] = TreeMapNode{min_y : y, max_y : y + r, ..cnode};\n\n y += r;\n\n r_help(j, entries, nodes, leaf, progress);\n\n }\n", "file_path": "src/treemap.rs", "rank": 39, "score": 13.856731452988985 }, { "content": " pub fn main_loop<F: FnMut(&mut bool, &mut Ui, &winit::window::Window)>(self, mut run_ui: F) {\n\n let System {\n\n mut events_loop,\n\n mut imgui,\n\n mut platform,\n\n mut render_sys,\n\n ..\n\n } = self;\n\n let mut encoder: gfx::Encoder<_, _> = render_sys.factory.create_command_buffer().into();\n\n\n\n let mut last_frame = Instant::now();\n\n let mut run = true;\n\n\n\n while run {\n\n events_loop.run_return(|event, _, control_flow| {\n\n platform.handle_event(imgui.io_mut(), render_sys.window(), &event);\n\n if let Event::WindowEvent { event, .. } = event {\n\n match event {\n\n WindowEvent::Resized(_) => render_sys.update_views(),\n\n WindowEvent::CloseRequested => {\n", "file_path": "src/renderer.rs", "rank": 40, "score": 13.506334492906724 }, { "content": " });\n\n }\n\n \n\n ui.same_line(ui.window_content_region_width() - 72.0);\n\n\n\n if ui.button(im_str!(\"Scan\"), [64.0, 24.0]) {\n\n\n\n let id = gen_id.next().unwrap();\n\n\n\n let progress = Arc::new(ProgressState::new());\n\n\n\n let root_path = root_path.lock().unwrap().to_string();\n\n\n\n let start = Instant::now();\n\n // tx.send(AsyncMessage::Scan(id, root_path.clone(), progress.clone())).unwrap();\n\n\n\n println!(\"start scan of {:?}\", root_path);\n\n \n\n let filesystem_data = filesystem_data.clone();\n\n let path = root_path.clone();\n", "file_path": "src/main.rs", "rank": 41, "score": 13.388533440468061 }, { "content": " if !expanded {\n\n entry_display!(size, size as f32 / dp.psize as f32, progress_bar_start, file_size_location);\n\n }\n\n \n\n entries[i].expanded = expanded;\n\n }\n\n\n\n\n\n\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 13.02319925880358 }, { "content": " ui.text(n);\n\n ui.same_line(ui.window_content_region_width() - 20.0);\n\n\n\n if ui.button(im_str!(\"X\"), [16.0, 16.0]) {\n\n p.complete.store(true, Ordering::SeqCst);\n\n // tx.send(AsyncMessage::AbortTask(*id)).unwrap();\n\n handle.abort();\n\n }\n\n\n\n ui.text(format!(\"{} / {}\", done, count));\n\n\n\n let overlay = ImString::from(format!(\"{:.1}s\", rem));\n\n ProgressBar::new(frac)\n\n .overlay_text(&overlay)\n\n .build(&ui);\n\n }\n\n\n\n ui.separator();\n\n\n\n for Task{progress: p, name : n, start, end, ..} in tasks.iter().rev() {\n", "file_path": "src/main.rs", "rank": 43, "score": 12.643829720339003 }, { "content": " .size([256.0, h as f32 - root_window_height - 128.0], Condition::Always)\n\n .position([w as f32 - 256.0, root_window_height], Condition::Always)\n\n .collapsible(false)\n\n .resizable(false)\n\n .no_nav()\n\n .build(ui, || {\n\n \n\n ui.text(\"Extension\");\n\n ui.same_line(ui.window_content_region_width() - 96.0);\n\n ui.text(\"Total Space\");\n\n ui.separator();\n\n\n\n let mut data = {\n\n let ft_lock = async_runtime.block_on(filesystem_data.file_types.lock());\n\n \n\n ft_lock.iter().map(|(k,v)| (k.clone(), v.clone())).collect::<Vec<_>>()\n\n };\n\n\n\n data.sort_by_key(|(_,v)| u64::MAX - v);\n\n\n", "file_path": "src/main.rs", "rank": 44, "score": 12.418898075619008 }, { "content": " .collapsible(false)\n\n .resizable(false)\n\n .no_nav()\n\n .build(ui, || {\n\n\n\n for Task{progress: p, start : s, name : n, handle, end, ..} in tasks.iter_mut() {\n\n\n\n if p.complete.load(Ordering::SeqCst) {\n\n if end.is_none() {\n\n *end = Some(Instant::now());\n\n }\n\n continue;\n\n }\n\n\n\n let count = p.count.load(Ordering::SeqCst) as f32;\n\n let done = p.done.load(Ordering::SeqCst) as f32;\n\n let frac = done/count;\n\n let time = s.elapsed().as_secs_f32();\n\n let rem = time * (1.0 / frac - 1.0);\n\n\n", "file_path": "src/main.rs", "rank": 45, "score": 12.263674253258605 }, { "content": " let pg = progress.clone();\n\n\n\n let h = async_runtime.spawn(async move {\n\n filesystem_data.recreate_from_root_path(path.into()).await;\n\n\n\n scan_directory(0, filesystem_data, pg).await;\n\n });\n\n \n\n tasks.push( Task {\n\n id,\n\n name : format!(\"Scan [{}]\", root_path),\n\n start,\n\n end : None,\n\n progress,\n\n handle : h,\n\n });\n\n }\n\n });\n\n\n\n // this window shows the directory tree\n", "file_path": "src/main.rs", "rank": 46, "score": 11.919604147061687 }, { "content": " let mut gen_id = 0..;\n\n\n\n system.main_loop(|_, ui, win| {\n\n\n\n let w = win.inner_size().width;\n\n let h = win.inner_size().height;\n\n\n\n let root_window_height = 64.0;\n\n let tree_map_height = 256.0;\n\n\n\n Window::new(im_str!(\"Root Selection\"))\n\n .size([w as f32, root_window_height], Condition::Always)\n\n .position([0.0, 0.0], Condition::Always)\n\n .collapsible(false)\n\n .resizable(false)\n\n .no_nav()\n\n .build(ui, || {\n\n\n\n ui.input_text(im_str!(\"\"), &mut *root_path.lock().unwrap())\n\n .build();\n", "file_path": "src/main.rs", "rank": 47, "score": 11.516853851898903 }, { "content": " \n\n *tree_map.lock().await = Some(treemap);\n\n\n\n pg.complete.store(true, Ordering::SeqCst);\n\n });\n\n\n\n \n\n tasks.push( Task {\n\n id,\n\n name : format!(\"Treemap Layout\"),\n\n start,\n\n end : None,\n\n progress,\n\n handle : jh,\n\n });\n\n }\n\n } else {\n\n }\n\n });\n\n });\n\n\n\n println!(\"Exiting...\");\n\n\n\n for Task{handle,..} in tasks {\n\n handle.abort();\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 11.483848663841119 }, { "content": "\n\n if !p.complete.load(Ordering::SeqCst) {\n\n continue;\n\n }\n\n\n\n \n\n ui.text(format!(\"{} - Complete\", n));\n\n if let Some(end) = *end {\n\n ui.text(format!(\" {:4.2}s\", end.duration_since(*start).as_secs_f32()));\n\n }\n\n }\n\n });\n\n\n\n \n\n // this window shows the treemap of file sizes\n\n Window::new(im_str!(\"Treemap\"))\n\n .size([w as f32 - 256.0, tree_map_height], Condition::Always)\n\n .position([0.0, h as f32 - tree_map_height], Condition::Always)\n\n .collapsible(false)\n\n .resizable(false)\n", "file_path": "src/main.rs", "rank": 49, "score": 10.975937147735918 }, { "content": " }\n\n }\n\n\n\n pub fn rebuild_tree_map(&mut self, region : (f32, f32), entries : &[Entry], progress: &ProgressState) {\n\n self.nodes.clear();\n\n \n\n let nlen = entries.len();\n\n let olen1 = self.nodes.len();\n\n let olen2 = self.leaf.len();\n\n if nlen > olen1 {\n\n self.nodes.reserve(nlen - olen1);\n\n }\n\n\n\n if nlen > olen2 {\n\n self.leaf.reserve(nlen - olen2);\n\n }\n\n\n\n self.nodes.clear();\n\n self.nodes.extend(std::iter::repeat(TreeMapNode::default()).take(nlen));\n\n\n", "file_path": "src/treemap.rs", "rank": 50, "score": 9.40366649103992 }, { "content": " }\n\n\n\n progress.done.fetch_add(1, Ordering::SeqCst);\n\n }\n\n }\n\n\n\n pub fn redraw_tree_map(&self, w : usize, h : usize, buf : &mut [u8]) {\n\n let TreeMapNode {min_x, min_y, max_x, max_y} = self.nodes[0];\n\n\n\n let dx = max_x - min_x;\n\n let dy = max_y - min_y;\n\n\n\n fn fill_pixels(col : [u8; 4], x0 : usize, y0 : usize, x1 : usize, y1 : usize, w : usize, h : usize, pix : &mut [u8]) {\n\n for y in y0..y1 {\n\n for x in x0..x1 {\n\n let i = 4 * (x + w * y);\n\n\n\n pix[i+0] = col[0];\n\n pix[i+1] = col[1];\n\n pix[i+2] = col[2];\n", "file_path": "src/treemap.rs", "rank": 51, "score": 8.855133878217906 }, { "content": " pub type Device = gfx_device_gl::Device;\n\n pub type Factory = gfx_device_gl::Factory;\n\n pub type Resources = gfx_device_gl::Resources;\n\n}\n\n\n\npub struct RenderSystem {\n\n pub renderer: Renderer<ColorFormat, types::Resources>,\n\n pub windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,\n\n pub device: types::Device,\n\n pub factory: types::Factory,\n\n pub main_color: Option<gfx::handle::RenderTargetView<types::Resources, ColorFormat>>,\n\n pub main_depth: gfx::handle::DepthStencilView<types::Resources, DepthFormat>,\n\n}\n\n\n\nimpl RenderSystem {\n\n pub fn init(\n\n imgui: &mut Context,\n\n builder: WindowBuilder,\n\n events_loop: &EventsLoop,\n\n ) -> RenderSystem {\n", "file_path": "src/renderer.rs", "rank": 52, "score": 8.681611645848422 }, { "content": " size_pixels: font_size,\n\n ..FontConfig::default()\n\n }),\n\n },\n\n ]);\n\n\n\n imgui.io_mut().font_global_scale = (1.0 / hidpi_factor) as f32;\n\n\n\n let render_sys = RenderSystem::init(&mut imgui, builder, &events_loop);\n\n platform.attach_window(imgui.io_mut(), render_sys.window(), HiDpiMode::Rounded);\n\n System {\n\n events_loop,\n\n imgui,\n\n platform,\n\n render_sys,\n\n font_size,\n\n }\n\n}\n\n\n\nimpl System {\n", "file_path": "src/renderer.rs", "rank": 53, "score": 8.189842223734427 }, { "content": "\n\nuse std::sync::Arc;\n\nuse std::sync::atomic::Ordering;\n\nuse std::time::Instant;\n\n\n\nmod data;\n\nmod renderer;\n\nmod treemap;\n\nuse data::*;\n\nuse tokio::task::JoinHandle;\n\nuse treemap::TreeMap;\n\n\n\nuse std::sync::atomic::AtomicBool;\n\n\n\nuse imgui::*;\n\n\n\nuse clipboard::ClipboardProvider;\n\nuse clipboard::ClipboardContext;\n\n\n\n#[derive(Debug)]\n", "file_path": "src/main.rs", "rank": 54, "score": 7.767825070760402 }, { "content": " for (k, v) in data.into_iter() {\n\n\n\n if let Some(ext) = k {\n\n ui.text(ext);\n\n } else {\n\n ui.text(\"<none>\");\n\n }\n\n \n\n ui.same_line(ui.window_content_region_width() - 96.0);\n\n ui.text(format!(\"{}\", ByteSize(v)));\n\n\n\n }\n\n \n\n });\n\n\n\n \n\n // this window shows the progress of tasks such as the \n\n Window::new(im_str!(\"Tasks\"))\n\n .size([256.0, 128.0], Condition::Always)\n\n .position([w as f32 - 256.0, h as f32 - 128.0], Condition::Always)\n", "file_path": "src/main.rs", "rank": 55, "score": 7.659423706462275 }, { "content": "/*\n\n Adapted from the [`imgui-rs`](https://github.com/imgui-rs/imgui-rs) examples\n\n*/\n\n\n\nuse gfx::Device;\n\nuse glutin::{\n\n dpi::LogicalSize,\n\n event::{Event, WindowEvent},\n\n event_loop::{ControlFlow, EventLoop},\n\n // XXX for easier porting...\n\n platform::run_return::EventLoopExtRunReturn,\n\n window::WindowBuilder,\n\n};\n\nuse imgui::{Context, FontConfig, FontSource, Ui};\n\nuse imgui_gfx_renderer::{Renderer, Shaders};\n\nuse imgui_winit_support::{HiDpiMode, WinitPlatform};\n\nuse old_school_gfx_glutin_ext::*;\n\nuse std::time::Instant;\n\n\n", "file_path": "src/renderer.rs", "rank": 56, "score": 6.1604048459786025 }, { "content": " .build_windowed(builder, events_loop)\n\n .expect(\"Failed to initialize graphics\")\n\n .init_gfx::<ColorFormat, DepthFormat>();\n\n\n\n let shaders = {\n\n let version = device.get_info().shading_language;\n\n if version.is_embedded {\n\n if version.major >= 3 {\n\n Shaders::GlSlEs300\n\n } else {\n\n Shaders::GlSlEs100\n\n }\n\n } else if version.major >= 4 {\n\n Shaders::GlSl400\n\n } else if version.major >= 3 {\n\n if version.minor >= 2 {\n\n Shaders::GlSl150\n\n } else {\n\n Shaders::GlSl130\n\n }\n", "file_path": "src/renderer.rs", "rank": 57, "score": 6.103612129483139 }, { "content": " } else {\n\n Shaders::GlSl110\n\n }\n\n };\n\n let renderer =\n\n Renderer::init(imgui, &mut factory, shaders).expect(\"Failed to initialize renderer\");\n\n RenderSystem {\n\n renderer,\n\n windowed_context,\n\n device,\n\n factory,\n\n main_color: Some(main_color),\n\n main_depth,\n\n }\n\n }\n\n pub fn window(&self) -> &glutin::window::Window {\n\n self.windowed_context.window()\n\n }\n\n pub fn update_views(&mut self) {\n\n if let Some(main_color) = self.main_color.as_mut() {\n\n self.windowed_context\n\n .update_gfx(main_color, &mut self.main_depth);\n\n }\n\n }\n\n pub fn swap_buffers(&mut self) {\n\n self.windowed_context.swap_buffers().unwrap();\n\n }\n\n}", "file_path": "src/renderer.rs", "rank": 58, "score": 4.790915201782161 }, { "content": " {\n\n // Fix incorrect colors with sRGB framebuffer\n\n fn imgui_gamma_to_linear(col: [f32; 4]) -> [f32; 4] {\n\n let x = col[0].powf(2.2);\n\n let y = col[1].powf(2.2);\n\n let z = col[2].powf(2.2);\n\n let w = 1.0 - (1.0 - col[3]).powf(2.2);\n\n [x, y, z, w]\n\n }\n\n\n\n let style = imgui.style_mut();\n\n for col in 0..style.colors.len() {\n\n style.colors[col] = imgui_gamma_to_linear(style.colors[col]);\n\n }\n\n }\n\n\n\n let (windowed_context, device, mut factory, main_color, main_depth) =\n\n glutin::ContextBuilder::new()\n\n .with_vsync(true)\n\n .with_gfx_color_depth::<ColorFormat, DepthFormat>()\n", "file_path": "src/renderer.rs", "rank": 59, "score": 4.468408686331243 }, { "content": " run = false;\n\n }\n\n _ => (),\n\n }\n\n }\n\n *control_flow = ControlFlow::Exit;\n\n });\n\n if !run {\n\n break;\n\n }\n\n\n\n let io = imgui.io_mut();\n\n platform\n\n .prepare_frame(io, render_sys.window())\n\n .expect(\"Failed to start frame\");\n\n let now = Instant::now();\n\n io.update_delta_time(now - last_frame);\n\n last_frame = now;\n\n let mut ui = imgui.frame();\n\n run_ui(&mut run, &mut ui, render_sys.windowed_context.window());\n", "file_path": "src/renderer.rs", "rank": 60, "score": 3.9023654688516967 }, { "content": " pix[i+3] = col[3];\n\n }\n\n }\n\n }\n\n\n\n let col = [0; 4];\n\n\n\n for (&TreeMapNode {min_x: cmin_x, min_y: cmin_y, max_x: cmax_x, max_y: cmax_y}, &l) in self.nodes.iter().zip(self.leaf.iter()) {\n\n if l {\n\n let x0 = ((cmin_x - min_x) / dx).floor() as usize;\n\n let x1 = ((cmax_x - min_x) / dx).floor() as usize;\n\n let y0 = ((cmin_y - min_y) / dy).floor() as usize;\n\n let y1 = ((cmax_y - min_y) / dy).floor() as usize;\n\n\n\n fill_pixels(col, x0, y0, x1, y1, w, h, buf)\n\n }\n\n }\n\n }\n\n}", "file_path": "src/treemap.rs", "rank": 61, "score": 2.7136932161874623 }, { "content": "\n\n if let Some(main_color) = render_sys.main_color.as_mut() {\n\n encoder.clear(main_color, [1.0, 1.0, 1.0, 1.0]);\n\n }\n\n platform.prepare_render(&ui, render_sys.window());\n\n let draw_data = ui.render();\n\n if let Some(main_color) = render_sys.main_color.as_mut() {\n\n render_sys\n\n .renderer\n\n .render(&mut render_sys.factory, &mut encoder, main_color, draw_data)\n\n .expect(\"Rendering failed\");\n\n }\n\n encoder.flush(&mut render_sys.device);\n\n render_sys.swap_buffers();\n\n render_sys.device.cleanup();\n\n }\n\n }\n\n}\n\n\n\nmod types {\n", "file_path": "src/renderer.rs", "rank": 62, "score": 2.485726283501706 } ]
Rust
src/connectivity/overnet/lib/core/src/fidl_tests/mod.rs
casey/fuchsia
2b965e9a1e8f2ea346db540f3611a5be16bb4d6b
#![cfg(test)] use crate::future_help::log_errors; use crate::link::Link; use crate::router::test_util::{run, test_router_options}; use crate::router::Router; use crate::runtime::spawn; use fidl::HandleBased; use fuchsia_zircon_status as zx_status; use futures::prelude::*; use std::rc::Rc; mod channel; mod socket; struct Service(futures::channel::mpsc::Sender<fidl::Channel>); impl fidl_fuchsia_overnet::ServiceProviderProxyInterface for Service { fn connect_to_service( &self, chan: fidl::Channel, _connection_info: fidl_fuchsia_overnet::ConnectionInfo, ) -> std::result::Result<(), fidl::Error> { let mut sender = self.0.clone(); spawn(log_errors( async move { sender.send(chan).await.map_err(Into::into) }, "failed to send incoming request handle", )); Ok(()) } } struct Fixture { dist_a_to_b: fidl::Channel, dist_b: fidl::Channel, dist_a_to_c: fidl::Channel, dist_c: fidl::Channel, tx_fin: Option<futures::channel::oneshot::Sender<()>>, } fn forward(sender: Rc<Link>, receiver: Rc<Link>) { spawn(log_errors( async move { let mut frame = [0u8; 2048]; while let Some(n) = sender.next_send(&mut frame).await? { receiver.received_packet(&mut frame[..n]).await?; } Ok(()) }, "forwarder failed", )); } async fn link(a: &Rc<Router>, b: &Rc<Router>) { let ab = a.new_link(b.node_id()).await.unwrap(); let ba = b.new_link(a.node_id()).await.unwrap(); forward(ab.clone(), ba.clone()); forward(ba, ab); } #[derive(Clone, Copy, Debug)] enum Target { A, B, C, } impl Fixture { fn new() -> Fixture { let (tx_init, rx_init) = std::sync::mpsc::channel(); let (tx_fin, rx_fin) = futures::channel::oneshot::channel(); std::thread::spawn(move || { run(|| async move { let router1 = Router::new(test_router_options()).unwrap(); let router2 = Router::new(test_router_options()).unwrap(); let router3 = Router::new(test_router_options()).unwrap(); link(&router1, &router2).await; link(&router2, &router3).await; link(&router3, &router1).await; const SERVICE: &'static str = "distribute_handle"; let (send_handle, mut recv_handle) = futures::channel::mpsc::channel(1); router2 .service_map() .register_service(SERVICE.to_string(), Box::new(Service(send_handle.clone()))) .await; router3 .service_map() .register_service(SERVICE.to_string(), Box::new(Service(send_handle))) .await; let (dist_a_to_b, dist_b) = fidl::Channel::create().unwrap(); let (dist_a_to_c, dist_c) = fidl::Channel::create().unwrap(); router1.connect_to_service(router2.node_id(), SERVICE, dist_b).await.unwrap(); let dist_b = recv_handle.next().await.unwrap(); router1.connect_to_service(router3.node_id(), SERVICE, dist_c).await.unwrap(); let dist_c = recv_handle.next().await.unwrap(); tx_init.send((dist_a_to_b, dist_b)).unwrap(); tx_init.send((dist_a_to_c, dist_c)).unwrap(); rx_fin.await.unwrap(); }) }); let (dist_a_to_b, dist_b) = rx_init.recv().unwrap(); let (dist_a_to_c, dist_c) = rx_init.recv().unwrap(); Fixture { dist_a_to_b, dist_b, dist_a_to_c, dist_c, tx_fin: Some(tx_fin) } } fn distribute_handle<H: HandleBased>(&self, h: H, target: Target) -> H { let h = h.into_handle(); log::trace!("distribute_handle: make {:?} on {:?}", h, target); let (dist_local, dist_remote) = match target { Target::A => return H::from_handle(h), Target::B => (&self.dist_a_to_b, &self.dist_b), Target::C => (&self.dist_a_to_c, &self.dist_c), }; assert!(dist_local.write(&[], &mut vec![h]) == Ok(())); loop { let (mut bytes, mut handles) = (Vec::new(), Vec::new()); match dist_remote.read_split(&mut bytes, &mut handles) { Ok(()) => { assert_eq!(bytes, vec![]); assert_eq!(handles.len(), 1); let h = handles.into_iter().next().unwrap(); log::trace!("distribute_handle: remote is {:?}", h); return H::from_handle(h); } Err(zx_status::Status::SHOULD_WAIT) => { continue; } Err(e) => panic!("Unexpected error {:?}", e), } } } } impl Drop for Fixture { fn drop(&mut self) { self.tx_fin.take().unwrap().send(()).unwrap(); } }
#![cfg(test)] use crate::future_help::log_errors; use crate::link::Link; use crate::router::test_util::{run, test_router_options}; use crate::router::Router; use crate::runtime::spawn; use fidl::HandleBased; use fuchsia_zircon_status as zx_status; use futures::prelude::*; use std::rc::Rc; mod channel; mod socket; struct Service(futures::channel::mpsc::Sender<fidl::Channel>); impl fidl_fuchsia_overnet::ServiceProviderProxyInterface for Service { fn connect_to_service( &self, chan: fidl::Channel, _connection_info: fidl_fuchsia_overnet::ConnectionInfo, ) -> std::result::Result<(), fidl::Error> { let mut sender = self.0.clone(); spawn(log_errors( async move { sender.send(chan).await.map_err(Into::into) }, "failed to send incoming request handle", )); Ok(()) } } struct Fixture { dist_a_to_b: fidl::Channel, dist_b: fidl::Channel, dist_a_to_c: fidl::Channel, dist_c: fidl::Channel, tx_fin: Option<futures::channel::oneshot::Sender<()>>, } fn forward(sender: Rc<Link>, receiver: Rc<Link>) { spawn(log_errors( async move { let mut frame = [0u8; 2048]; while let Some(n) = sender.next_send(&mut frame).await? { receiver.received_packet(&mut frame[..n]).await?; } Ok(()) }, "forwarder failed", )); } async fn link(a: &Rc<Router>, b: &Rc<Router>) { let ab = a.new_link(b.node_id()).await.unwrap(); let ba = b.new_link(a.node_id()).await.unwrap(); forward(ab.clone(), ba.clone()); forward(ba, ab); } #[derive(Clone, Copy, Debug)] enum Target { A, B, C, } impl Fixture { fn new() -> Fixture { let (tx_init, rx_init) = std::sync::mpsc::channel(); let (tx_fin, rx_fin) = futures::channel::oneshot::channel(); std::thread::spawn(move || { run(|| async move { let router1 = Router::new(test_router_options()).unwrap(); let router2 = Router::new(test_router_options()).unwrap(); let router3 = Router::new(test_router_options()).unwrap(); link(&router1, &router2).await; link(&router2, &router3).await; link(&router3, &router1).await; const SERVICE: &'static str = "distribute_handle"; let (send_handle, mut recv_handle) = futures::channel::mpsc::channel(1); router2 .service_map() .register_service(SERVICE.to_string(), Box::new(Service(send_handle.clone()))) .await; router3 .service_map() .register_service(SERVICE.to_string(), Box::new(Service(send_handle))) .await; let (dist_a_to_b, dist_b) = fidl::Channel::create().unwrap(); let (dist_a_to_c, dist_c) = fidl::Channel::create().unwrap(); router1.connect_to_service(router2.node_id(), SERVICE, dist_b).await.unwrap(); let dist_b = recv_handle.next().await.unwrap(); router1.connect_to_service(router3.node_id(), SERVICE, dist_c).await.unwrap(); let dist_c = recv_handle.next().await.unwrap(); tx_init.send((dist_a_to_b, dist_b)).unwrap(); tx_init.send((dist_a_to_c, dist_c)).unwrap(); rx_fin.await.unwrap(); }) }); let (dist_a_to_b, dist_b) = rx_init.recv().unwrap(); let (dist_a_to_c, dist_c) = rx_init.recv().unwrap(); Fixture { dist_a_to_b, dist_b, dist_a_to_c, dist_c, tx_fin: Some(tx_fin) } }
} impl Drop for Fixture { fn drop(&mut self) { self.tx_fin.take().unwrap().send(()).unwrap(); } }
fn distribute_handle<H: HandleBased>(&self, h: H, target: Target) -> H { let h = h.into_handle(); log::trace!("distribute_handle: make {:?} on {:?}", h, target); let (dist_local, dist_remote) = match target { Target::A => return H::from_handle(h), Target::B => (&self.dist_a_to_b, &self.dist_b), Target::C => (&self.dist_a_to_c, &self.dist_c), }; assert!(dist_local.write(&[], &mut vec![h]) == Ok(())); loop { let (mut bytes, mut handles) = (Vec::new(), Vec::new()); match dist_remote.read_split(&mut bytes, &mut handles) { Ok(()) => { assert_eq!(bytes, vec![]); assert_eq!(handles.len(), 1); let h = handles.into_iter().next().unwrap(); log::trace!("distribute_handle: remote is {:?}", h); return H::from_handle(h); } Err(zx_status::Status::SHOULD_WAIT) => { continue; } Err(e) => panic!("Unexpected error {:?}", e), } } }
function_block-full_function
[]
Rust
src/tasks/git.rs
jokeyrhyme/dotfiles-rs
8e723ee271675ba783e15e608164813a4a136efb
use std::{collections::HashMap, fs, str}; use serde_derive::Deserialize; use crate::{ lib::task::{self, Status, Task}, utils, }; const COMMAND_DELIMITERS: &[char] = &[';', '|', '&']; const ERROR_MSG: &str = "error: git"; pub fn task() -> Task { Task { name: String::from("git"), sync, ..Default::default() } } #[derive(Debug, Deserialize, PartialEq)] struct ComplexConfigEntry { value: String, #[serde(default)] when: String, } impl<S> From<S> for ComplexConfigEntry where S: AsRef<str>, { fn from(s: S) -> ComplexConfigEntry { ComplexConfigEntry { value: String::from(s.as_ref()), when: String::new(), } } } #[derive(Debug, Deserialize, PartialEq)] struct Config { config: HashMap<String, ConfigEntry>, } impl Config { fn new() -> Config { Config { config: HashMap::<String, ConfigEntry>::new(), } } } impl<S> From<S> for Config where S: AsRef<str>, { fn from(s: S) -> Config { match toml::from_str(&s.as_ref()) { Ok(c) => c, Err(error) => { println!("warning: git: unable to parse TOML, {}", error); Config::new() } } } } #[derive(Debug, Deserialize, PartialEq)] #[serde(untagged)] enum ConfigEntry { Basic(String), Complex(ComplexConfigEntry), } fn extract_commands<S>(s: S) -> Vec<String> where S: AsRef<str>, { s.as_ref() .split(|c: char| COMMAND_DELIMITERS.contains(&c)) .filter_map(|s| match s.trim().split(' ').next() { Some("") => None, Some(s) => Some(String::from(s)), None => None, }) .collect() } fn sync() -> task::Result { if !utils::git::has() { return Ok(Status::Skipped); } let cfg = Config::from(load_config()); for (key, ce) in cfg.config { let cce = match ce { ConfigEntry::Basic(s) => ComplexConfigEntry::from(s), ConfigEntry::Complex(c) => c, }; match cce.when.as_str() { "unset" => { let current = utils::process::command_output("git", &["config", "--global", "--get", &key])?; if str::from_utf8(&current.stdout) .unwrap_or_default() .trim() .is_empty() { utils::process::command_spawn_wait( "git", &["config", "--global", &key, &cce.value], )?; } } "which" => { if extract_commands(cce.value.clone()) .iter() .all(|v| which::which(v).is_ok()) { utils::process::command_spawn_wait( "git", &["config", "--global", &key, &cce.value], )?; } } _ => { utils::process::command_spawn_wait( "git", &["config", "--global", &key, &cce.value], )?; } }; } if !utils::nodejs::has_npx() { return Ok(Status::Skipped); } utils::process::command_spawn_wait("npx", &["-q", "npm-merge-driver", "install", "--global"]) .expect(ERROR_MSG); if utils::nodejs::has_yarn() { utils::process::command_spawn_wait( "npx", &[ "-q", "npm-merge-driver", "install", "--global", "--driver-name", "yarn-merge-driver", "--driver", "npx npm-merge-driver merge %A %O %B %P -c yarn", "--files", "yarn.lock", ], ) .expect(ERROR_MSG); } Ok(Status::Done) } fn load_config() -> String { let cfg_path = utils::env::home_dir().join(".dotfiles/config/git.toml"); match fs::read_to_string(&cfg_path) { Ok(s) => s, Err(error) => { println!("git: ignoring config: {}", error); String::new() } } } #[cfg(test)] mod test { use super::*; #[test] fn extract_commands_from_strings() { assert_eq!(extract_commands("gpg"), vec![String::from("gpg")]); assert_eq!( extract_commands("diff | less"), vec![String::from("diff"), String::from("less")] ); assert_eq!( extract_commands("diff foo bar; less"), vec![String::from("diff"), String::from("less")] ); assert_eq!( extract_commands("diff && less foo"), vec![String::from("diff"), String::from("less")] ); } #[test] fn parse_config_toml() { let input = r#" [config] "color.ui" = "foo.bar" "gpg.program" = { value = "gpg", when = "which" } "#; let mut want = HashMap::<String, ConfigEntry>::new(); want.insert( String::from("color.ui"), ConfigEntry::Basic(String::from("foo.bar")), ); want.insert( String::from("gpg.program"), ConfigEntry::Complex(ComplexConfigEntry { value: String::from("gpg"), when: String::from("which"), }), ); let cfg = Config::from(input); assert_eq!(want, cfg.config); } }
use std::{collections::HashMap, fs, str}; use serde_derive::Deserialize; use crate::{ lib::task::{self, Status, Task}, utils, }; const COMMAND_DELIMITERS: &[char] = &[';', '|', '&']; const ERROR_MSG: &str = "error: git"; pub fn task() -> Task { Task { name: String::from("git"), sync, ..Default::default() } } #[derive(Debug, Deserialize, PartialEq)] struct ComplexConfigEntry { value: String, #[serde(default)] when: String, } impl<S> From<S> for ComplexConfigEntry where S: AsRef<str>, { fn from(s: S) -> ComplexConfigEntry { ComplexConfigEntry { value: String::from(s.as_ref()), when: String::new(), } } } #[derive(Debug, Deserialize, PartialEq)] struct Config { config: HashMap<String, ConfigEntry>, } impl Config { fn new() -> Config { Config { config: HashMap::<String, ConfigEntry>::new(), } } } impl<S> From<S> for Config where S: AsRef<str>, { fn from(s: S) -> Config { match toml::from_str(&s.as_ref()) { Ok(c) => c, Err(error) => { println!("warning: git: unable to parse TOML, {}", error); Config::new() } } } } #[derive(Debug, Deserialize, PartialEq)] #[serde(untagged)] enum ConfigEntry { Basic(String), Complex(ComplexConfigEntry), }
fn sync() -> task::Result { if !utils::git::has() { return Ok(Status::Skipped); } let cfg = Config::from(load_config()); for (key, ce) in cfg.config { let cce = match ce { ConfigEntry::Basic(s) => ComplexConfigEntry::from(s), ConfigEntry::Complex(c) => c, }; match cce.when.as_str() { "unset" => { let current = utils::process::command_output("git", &["config", "--global", "--get", &key])?; if str::from_utf8(&current.stdout) .unwrap_or_default() .trim() .is_empty() { utils::process::command_spawn_wait( "git", &["config", "--global", &key, &cce.value], )?; } } "which" => { if extract_commands(cce.value.clone()) .iter() .all(|v| which::which(v).is_ok()) { utils::process::command_spawn_wait( "git", &["config", "--global", &key, &cce.value], )?; } } _ => { utils::process::command_spawn_wait( "git", &["config", "--global", &key, &cce.value], )?; } }; } if !utils::nodejs::has_npx() { return Ok(Status::Skipped); } utils::process::command_spawn_wait("npx", &["-q", "npm-merge-driver", "install", "--global"]) .expect(ERROR_MSG); if utils::nodejs::has_yarn() { utils::process::command_spawn_wait( "npx", &[ "-q", "npm-merge-driver", "install", "--global", "--driver-name", "yarn-merge-driver", "--driver", "npx npm-merge-driver merge %A %O %B %P -c yarn", "--files", "yarn.lock", ], ) .expect(ERROR_MSG); } Ok(Status::Done) } fn load_config() -> String { let cfg_path = utils::env::home_dir().join(".dotfiles/config/git.toml"); match fs::read_to_string(&cfg_path) { Ok(s) => s, Err(error) => { println!("git: ignoring config: {}", error); String::new() } } } #[cfg(test)] mod test { use super::*; #[test] fn extract_commands_from_strings() { assert_eq!(extract_commands("gpg"), vec![String::from("gpg")]); assert_eq!( extract_commands("diff | less"), vec![String::from("diff"), String::from("less")] ); assert_eq!( extract_commands("diff foo bar; less"), vec![String::from("diff"), String::from("less")] ); assert_eq!( extract_commands("diff && less foo"), vec![String::from("diff"), String::from("less")] ); } #[test] fn parse_config_toml() { let input = r#" [config] "color.ui" = "foo.bar" "gpg.program" = { value = "gpg", when = "which" } "#; let mut want = HashMap::<String, ConfigEntry>::new(); want.insert( String::from("color.ui"), ConfigEntry::Basic(String::from("foo.bar")), ); want.insert( String::from("gpg.program"), ConfigEntry::Complex(ComplexConfigEntry { value: String::from("gpg"), when: String::from("which"), }), ); let cfg = Config::from(input); assert_eq!(want, cfg.config); } }
fn extract_commands<S>(s: S) -> Vec<String> where S: AsRef<str>, { s.as_ref() .split(|c: char| COMMAND_DELIMITERS.contains(&c)) .filter_map(|s| match s.trim().split(' ').next() { Some("") => None, Some(s) => Some(String::from(s)), None => None, }) .collect() }
function_block-full_function
[ { "content": "pub fn latest_version() -> Result<String, task::Error> {\n\n let tags: Vec<utils::github::Tag> = utils::github::fetch_tags(\"golang\", \"go\")?;\n\n let release_tags: Vec<utils::github::Tag> = tags\n\n .into_iter()\n\n .filter(|t| {\n\n // release tags look like \"go1.10.2\"\n\n // other tags start with \"weekly.\", or \"release.\", etc\n\n t.id.starts_with(\"go\") && version::is_stable(t.id.as_str())\n\n })\n\n .collect();\n\n\n\n if release_tags.is_empty() {\n\n return Err(task::Error::NoTags {});\n\n }\n\n match release_tags.last() {\n\n Some(latest) => Ok(latest.clone().id),\n\n None => Err(task::Error::NoTags {}),\n\n }\n\n}\n\n\n", "file_path": "src/utils/golang.rs", "rank": 0, "score": 345791.6388557494 }, { "content": "fn format_strings(key: &str, value: &Option<Vec<String>>) -> String {\n\n match value {\n\n Some(v) => format!(\"{} {}\\n\", key, v.join(\",\")),\n\n None => String::from(\"\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils/ssh.rs", "rank": 1, "score": 265481.052173029 }, { "content": "fn format_pathbuf(key: &str, value: &Option<PathBuf>) -> String {\n\n match value {\n\n Some(v) => format!(\"{} {}\\n\", key, v.display()),\n\n None => String::from(\"\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils/ssh.rs", "rank": 4, "score": 244840.8823846156 }, { "content": "pub fn current_version() -> String {\n\n match utils::process::command_output(\"node\", &[\"--version\"]) {\n\n Ok(output) => String::from(\n\n std::str::from_utf8(&output.stdout)\n\n .unwrap_or_default()\n\n .trim(),\n\n ),\n\n Err(_error) => String::from(\"\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils/nodejs.rs", "rank": 7, "score": 234838.4250005251 }, { "content": "pub fn current_version() -> String {\n\n let exe_path = bin_dir().join(if OS == \"windows\" { \"go.exe\" } else { \"go\" });\n\n match utils::process::command_output(exe_path.to_str().unwrap(), &[\"version\"]) {\n\n Ok(output) => {\n\n let stdout = str::from_utf8(&output.stdout).unwrap_or_default().trim();\n\n let trailer = format!(\" {}/{}\", os(), arch());\n\n let headless = str::replace(stdout, \"go version \", \"\");\n\n str::replace(&headless, &trailer, \"\")\n\n }\n\n Err(_error) => String::from(\"\"),\n\n }\n\n}\n\n\n", "file_path": "src/utils/golang.rs", "rank": 8, "score": 234838.4250005251 }, { "content": "pub fn os() -> &'static str {\n\n if OS == \"macos\" {\n\n \"darwin\"\n\n } else {\n\n OS\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn arch_is_not_x86_64() {\n\n assert_ne!(arch(), \"x86_64\");\n\n }\n\n\n\n #[test]\n\n fn latest_version_found() {\n\n assert!(!latest_version().unwrap().is_empty());\n\n }\n\n\n\n #[test]\n\n fn os_is_not_macos() {\n\n assert_ne!(os(), \"macos\");\n\n }\n\n}\n", "file_path": "src/utils/golang.rs", "rank": 9, "score": 232505.78814240335 }, { "content": "pub fn os() -> &'static str {\n\n match OS {\n\n \"macos\" => \"darwin\",\n\n \"windows\" => \"win\",\n\n _ => OS,\n\n }\n\n}\n\n\n", "file_path": "src/utils/nodejs.rs", "rank": 10, "score": 232505.78814240335 }, { "content": "pub fn arch() -> &'static str {\n\n if ARCH == \"x86_64\" {\n\n \"amd64\"\n\n } else {\n\n ARCH\n\n }\n\n}\n\n\n", "file_path": "src/utils/golang.rs", "rank": 11, "score": 232505.78814240335 }, { "content": "pub fn arch() -> &'static str {\n\n match ARCH {\n\n \"x86_64\" => \"x64\",\n\n _ => ARCH,\n\n }\n\n}\n\n\n", "file_path": "src/utils/nodejs.rs", "rank": 12, "score": 232505.78814240335 }, { "content": "pub fn arch() -> &'static str {\n\n if ARCH == \"x86_64\" {\n\n \"64-bit\"\n\n } else {\n\n ARCH\n\n }\n\n}\n\n\n", "file_path": "src/tasks/vale.rs", "rank": 13, "score": 229970.5914867731 }, { "content": "pub fn os() -> &'static str {\n\n match OS {\n\n \"linux\" => \"Linux\",\n\n \"macos\" => \"macOS\",\n\n \"windows\" => \"Windows\",\n\n _ => OS,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/vale.rs", "rank": 14, "score": 229970.5914867731 }, { "content": "// the OS information in the release JSON is a bit different again :shrug:\n\npub fn release_os() -> &'static str {\n\n match OS {\n\n \"macos\" => \"osx\",\n\n _ => os(),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn arch_is_not_x86_64() {\n\n assert_ne!(arch(), \"x86_64\");\n\n }\n\n\n\n #[test]\n\n fn latest_version_found() {\n\n let version = latest_version().expect(\"must fetch\");\n\n assert!(version.starts_with('v'));\n\n }\n\n\n\n #[test]\n\n fn os_is_not_macos_or_windows() {\n\n assert_ne!(os(), \"macos\");\n\n assert_ne!(os(), \"windows\");\n\n }\n\n}\n", "file_path": "src/utils/nodejs.rs", "rank": 15, "score": 229296.21184807486 }, { "content": "pub fn symbolic_link_if_exists<P>(src: P, dest: P) -> task::Result\n\nwhere\n\n P: AsRef<Path> + Debug,\n\n{\n\n let d = dest.as_ref();\n\n let s = src.as_ref();\n\n match std::fs::read_link(&d) {\n\n Ok(target) => {\n\n if s == target {\n\n return Ok(Status::NoChange(format!(\n\n \"already symlinked: {} -> {}\",\n\n d.display(),\n\n target.display(),\n\n )));\n\n }\n\n }\n\n Err(_error) => {\n\n // does not exist, or not a symlink\n\n }\n\n };\n", "file_path": "src/utils/fs.rs", "rank": 16, "score": 224825.23058989405 }, { "content": "pub fn latest_version() -> io::Result<String> {\n\n let req = utils::http::create_request(DIST_JSON_URL, None);\n\n let mut res = utils::http::fetch_request(req)?;\n\n let mut body = String::new();\n\n res.read_to_string(&mut body)?;\n\n let releases: Vec<Release> = serde_json::from_str(&body)?;\n\n\n\n let latest_release: &Release = releases\n\n .iter()\n\n .find(|r| {\n\n version::is_stable(r.version.as_str())\n\n && !r.files.is_empty()\n\n && r.files.iter().any(|f| {\n\n f.starts_with(&format!(\n\n \"{}-{}\",\n\n utils::nodejs::release_os(),\n\n utils::nodejs::arch()\n\n ))\n\n })\n\n })\n\n .unwrap();\n\n\n\n Ok(String::from(latest_release.version.as_str().trim()))\n\n}\n\n\n", "file_path": "src/utils/nodejs.rs", "rank": 17, "score": 221579.70792944392 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"vscodejson\"),\n\n sync,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tasks/vscodejson.rs", "rank": 18, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"ssh\"),\n\n sync,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tasks/ssh.rs", "rank": 19, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"npm\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/npm.rs", "rank": 20, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"hadolint\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"hadolint\",\n\n repo: (\"hadolint\", \"hadolint\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/hadolint.rs", "rank": 21, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"atlantis\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHRA_TASK: GHRATask = GHRATask {\n\n asset_filter,\n\n command: \"atlantis\",\n\n repo: (\"runatlantis\", \"atlantis\"),\n\n trim_version,\n\n version_arg: \"version\",\n\n};\n\n\n", "file_path": "src/tasks/atlantis.rs", "rank": 22, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"vale\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHRA_TASK: GHRATask = GHRATask {\n\n asset_filter,\n\n command: \"vale\",\n\n repo: (\"errata-ai\", \"vale\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/vale.rs", "rank": 23, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"brew\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/brew.rs", "rank": 24, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"bazel\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"bazel\",\n\n repo: (\"bazelbuild\", \"bazel\"),\n\n trim_version,\n\n version_arg: \"version\",\n\n};\n\n\n", "file_path": "src/tasks/bazel.rs", "rank": 25, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"macos\"),\n\n sync,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tasks/macos.rs", "rank": 26, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"rustc\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/rustc.rs", "rank": 27, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"dotfiles\"),\n\n sync,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tasks/dotfiles.rs", "rank": 28, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"minikube\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"minikube\",\n\n repo: (\"kubernetes\", \"minikube\"),\n\n trim_version,\n\n version_arg: \"version\",\n\n};\n\n\n", "file_path": "src/tasks/minikube.rs", "rank": 29, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"dep\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"dep\",\n\n repo: (\"golang\", \"dep\"),\n\n trim_version,\n\n version_arg: \"version\",\n\n};\n\n\n", "file_path": "src/tasks/dep.rs", "rank": 30, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"vim\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/vim.rs", "rank": 31, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"gitsizer\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHRA_TASK: GHRATask = GHRATask {\n\n asset_filter,\n\n command: \"git-sizer\",\n\n repo: (\"github\", \"git-sizer\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/gitsizer.rs", "rank": 32, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"tmux\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/tmux.rs", "rank": 33, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"skaffold\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"skaffold\",\n\n repo: (\"GoogleCloudPlatform\", \"skaffold\"),\n\n trim_version,\n\n version_arg: \"version\",\n\n};\n\n\n", "file_path": "src/tasks/skaffold.rs", "rank": 34, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"bash\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/bash.rs", "rank": 35, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"gitleaks\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"gitleaks\",\n\n repo: (\"zricethezav\", \"gitleaks\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/gitleaks.rs", "rank": 36, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"jq\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"jq\",\n\n repo: (\"stedolan\", \"jq\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/jq.rs", "rank": 37, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"nodejs\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/nodejs.rs", "rank": 38, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"pip\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/pip.rs", "rank": 39, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"windows\"),\n\n sync,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tasks/windows.rs", "rank": 40, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"yq\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"yq\",\n\n repo: (\"mikefarah\", \"yq\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/yq.rs", "rank": 41, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"goget\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/goget.rs", "rank": 42, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"rustup\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/rustup.rs", "rank": 43, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"shfmt\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n\nconst GHR_TASK: GHRTask = GHRTask {\n\n asset_filter,\n\n command: \"shfmt\",\n\n repo: (\"mvdan\", \"sh\"),\n\n trim_version,\n\n version_arg: \"--version\",\n\n};\n\n\n", "file_path": "src/tasks/shfmt.rs", "rank": 44, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"rust\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/rust.rs", "rank": 45, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"googlecloudsdk\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/googlecloudsdk.rs", "rank": 46, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"zsh\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/zsh.rs", "rank": 47, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"golang\"),\n\n sync,\n\n update,\n\n }\n\n}\n\n\n", "file_path": "src/tasks/golang.rs", "rank": 48, "score": 215366.57216127403 }, { "content": "pub fn task() -> Task {\n\n Task {\n\n name: String::from(\"vscode\"),\n\n sync,\n\n ..Default::default()\n\n }\n\n}\n\n\n", "file_path": "src/tasks/vscode.rs", "rank": 49, "score": 215366.57216127403 }, { "content": "fn sync() -> task::Result {\n\n if !has_code() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let src = utils::env::home_dir().join(\".dotfiles/config/vscode.json\");\n\n\n\n let settings_path = match OS {\n\n \"macos\" => \"Library/Application Support/Code/User/settings.json\",\n\n \"windows\" => \"AppData/Roaming/Code/User/settings.json\",\n\n _ => \".config/Code/User/settings.json\",\n\n };\n\n let dest = utils::env::home_dir().join(Path::new(settings_path));\n\n\n\n utils::fs::symbolic_link_if_exists(&src, &dest)\n\n}\n", "file_path": "src/tasks/vscodejson.rs", "rank": 50, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHRA_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/atlantis.rs", "rank": 51, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/bazel.rs", "rank": 52, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !rust::has_rustup() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let toolchains = rust::rustup_output(&[\"toolchain\", \"list\"])?;\n\n\n\n for t in TOOLCHAINS {\n\n let re = regex::Regex::new(&format!(\"^{}-\", t)).unwrap();\n\n if !re.is_match(&toolchains) {\n\n rust::rustup(&[\"toolchain\", \"install\", t])?;\n\n }\n\n }\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/rustc.rs", "rank": 53, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !utils::golang::is_installed() {\n\n return Ok(Status::Done);\n\n }\n\n\n\n let mut favs = read_config();\n\n Favourites::fill_and_status(&mut favs)?;\n\n Favourites::cull_and_status(&mut favs)?;\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/goget.rs", "rank": 54, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !has_code() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let cfg_path = utils::env::home_dir().join(\".dotfiles/config/vscode.toml\");\n\n\n\n let contents = match fs::read_to_string(&cfg_path) {\n\n Ok(s) => s,\n\n Err(error) => {\n\n return Err(task::Error::Io(String::from(\"ignoring config\"), error));\n\n }\n\n };\n\n\n\n let config: Config = toml::from_str(&contents).expect(\"cannot parse .../vscode.toml\");\n\n\n\n let exts = exts_installed();\n\n\n\n for ext in config.install {\n\n if !exts.contains(&ext) {\n", "file_path": "src/tasks/vscode.rs", "rank": 55, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/hadolint.rs", "rank": 56, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/jq.rs", "rank": 57, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/gitleaks.rs", "rank": 58, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if utils::golang::is_installed() {\n\n Ok(Status::NoChange(String::from(\"present\")))\n\n } else {\n\n let latest_version = match utils::golang::latest_version() {\n\n Ok(v) => v,\n\n Err(error) => {\n\n return Err(error);\n\n }\n\n };\n\n\n\n install_golang(latest_version)\n\n }\n\n\n\n // TODO: cleanup GOPATH/pkg: https://github.com/golang/go/issues/4719\n\n}\n\n\n", "file_path": "src/tasks/golang.rs", "rank": 59, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if utils::nodejs::has_node() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let latest = utils::nodejs::latest_version()?;\n\n match install_nodejs(latest.clone()) {\n\n Ok(()) => Ok(Status::Changed(String::from(\"unknown\"), latest)),\n\n Err(error) => Err(task::Error::Io(\n\n String::from(\"unable to install Node.js\"),\n\n error,\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/tasks/nodejs.rs", "rank": 60, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/dep.rs", "rank": 61, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if rust::has_rustup() {\n\n Ok(Status::NoChange(rust::rustup_version()))\n\n } else {\n\n // TODO: automate installation from https://rustup.rs\n\n Ok(Status::Skipped)\n\n }\n\n}\n\n\n", "file_path": "src/tasks/rustup.rs", "rank": 62, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !utils::nodejs::has_node() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n match configure_npm() {\n\n Ok(_) => {}\n\n Err(error) => {\n\n println!(\"warning: npm: unable to configure npm: {}\", error);\n\n }\n\n };\n\n\n\n // these often are included with the Windows version,\n\n // and prevent `npm` from updating itself\n\n for filename in &[\"npm\", \"npm.cmd\", \"npx\", \"npx.cmd\"] {\n\n utils::fs::delete_if_exists(&utils::nodejs::bin_dir().join(Path::new(&filename)));\n\n }\n\n\n\n if !utils::nodejs::has_npm() {\n\n let npm_cli_path = utils::nodejs::lib_dir().join(\"node_modules/npm/bin/npm-cli.js\");\n", "file_path": "src/tasks/npm.rs", "rank": 63, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHRA_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/vale.rs", "rank": 64, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !pip::has() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let mut favs = read_config()?;\n\n\n\n Favourites::fill_and_status(&mut favs)?;\n\n Favourites::cull_and_status(&mut favs)?;\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/pip.rs", "rank": 65, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !cargo::has_cargo() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let cfg_path = utils::env::home_dir().join(\".dotfiles/config/rust.toml\");\n\n\n\n let contents = match fs::read_to_string(&cfg_path) {\n\n Ok(s) => s,\n\n Err(error) => {\n\n return Err(task::Error::Io(String::from(\"ignoring config\"), error));\n\n }\n\n };\n\n\n\n let mut favs: CargoFavourites = toml::from_str(&contents).expect(\"cannot parse .../rust.toml\");\n\n Favourites::fill_and_status(&mut favs)?;\n\n Favourites::cull_and_status(&mut favs)?;\n\n\n\n match fix_cargo_fmt() {\n\n Ok(()) => {}\n\n Err(error) => println!(\"error: rust: unable to fix `cargo fmt`: {:?}\", error),\n\n };\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/rust.rs", "rank": 66, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if OS == \"windows\" || !has_bash() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let it_path = utils::env::home_dir().join(\".bash_it\");\n\n if !utils::git::path_is_git_repository(&it_path) {\n\n utils::fs::delete_if_exists(&it_path);\n\n let it_url = \"https://github.com/Bash-it/bash-it.git\";\n\n match utils::git::shallow_clone(it_url, &it_path.to_string_lossy()) {\n\n Ok(()) => {}\n\n Err(error) => println!(\"bash: unable to install bash-it: {}\", error),\n\n }\n\n }\n\n\n\n match utils::process::command_spawn_wait(\n\n \"bash\",\n\n &[\n\n utils::env::home_dir()\n\n .join(\".bash_it/install.sh\")\n", "file_path": "src/tasks/bash.rs", "rank": 67, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/minikube.rs", "rank": 68, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if OS != \"windows\" {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n println!(\"windows: manually configure %PATH% to include:\");\n\n\n\n let bin_path = utils::env::home_dir().join(\".local\").join(\"bin\");\n\n println!(\"- {}\", bin_path.display());\n\n\n\n let go_bin_path = utils::env::home_dir().join(\".local\").join(\"go\").join(\"bin\");\n\n println!(\"- {}\", go_bin_path.display());\n\n\n\n let node_bin_path = utils::env::home_dir().join(\".local\").join(\"node\");\n\n println!(\"- {}\", node_bin_path.display());\n\n\n\n Ok(Status::Done)\n\n}\n", "file_path": "src/tasks/windows.rs", "rank": 69, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if OS == \"windows\" || !has_zsh() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let oh_path = utils::env::home_dir().join(\".oh-my-zsh\");\n\n if !utils::git::path_is_git_repository(&oh_path) {\n\n utils::fs::delete_if_exists(&oh_path);\n\n let oh_url = \"https://github.com/robbyrussell/oh-my-zsh.git\";\n\n match utils::git::shallow_clone(oh_url, &oh_path.to_string_lossy()) {\n\n Ok(()) => {}\n\n Err(error) => println!(\"zsh: unable to install oh-my-zsh: {}\", error),\n\n }\n\n }\n\n\n\n utils::fs::delete_if_exists(utils::env::home_dir().join(\".zsh-pure\"));\n\n utils::fs::delete_if_exists(utils::env::home_dir().join(\".oh-my-zsh/custom/pure.zsh-theme\"));\n\n utils::fs::delete_if_exists(utils::env::home_dir().join(\".oh-my-zsh/custom/async.zsh\"));\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/zsh.rs", "rank": 70, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/shfmt.rs", "rank": 71, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHRA_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/gitsizer.rs", "rank": 72, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/skaffold.rs", "rank": 73, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if OS != \"macos\" {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n utils::process::command_spawn_wait(\"qlmanage\", &[\"-d\", \"1\", \"-r\", \"cache\"])?;\n\n\n\n Ok(Status::Done)\n\n}\n", "file_path": "src/tasks/macos.rs", "rank": 74, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n GHR_TASK.sync()\n\n}\n\n\n", "file_path": "src/tasks/yq.rs", "rank": 75, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !utils::ssh::has() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let source_path = utils::env::home_dir()\n\n .join(\".dotfiles\")\n\n .join(\"config\")\n\n .join(\"ssh\");\n\n let source = match fs::read_to_string(&source_path) {\n\n Ok(s) => s,\n\n Err(_error) => String::from(\"\"),\n\n };\n\n\n\n let target_path = utils::env::home_dir().join(\".ssh\").join(\"config\");\n\n let target = match fs::read_to_string(&target_path) {\n\n Ok(s) => s,\n\n Err(_error) => String::from(\"\"),\n\n };\n\n\n", "file_path": "src/tasks/ssh.rs", "rank": 76, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n let sdk_path = utils::env::home_dir()\n\n .join(\".local\")\n\n .join(\"google-cloud-sdk\");\n\n if utils::git::path_is_git_repository(&sdk_path) {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n utils::fs::delete_if_exists(&sdk_path);\n\n\n\n let sdk_url = \"https://github.com/google-cloud-sdk/google-cloud-sdk.git\";\n\n utils::git::shallow_clone(sdk_url, &sdk_path.to_string_lossy())?;\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/googlecloudsdk.rs", "rank": 77, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !brew::has_brew() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n brew::brew(&[\"cleanup\"])?;\n\n\n\n Ok(Status::Done)\n\n}\n\n\n", "file_path": "src/tasks/brew.rs", "rank": 78, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n let target = utils::env::home_dir().join(\".dotfiles\");\n\n\n\n if utils::git::has() && utils::git::path_is_git_repository(&target) {\n\n utils::git::pull(&target);\n\n }\n\n Ok(Status::Done)\n\n}\n", "file_path": "src/tasks/dotfiles.rs", "rank": 79, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n if !has_tmux() {\n\n return Ok(Status::Skipped);\n\n }\n\n\n\n let tpm_path = utils::env::home_dir().join(\".tmux/plugins/tpm\");\n\n if !utils::git::path_is_git_repository(&tpm_path) {\n\n utils::fs::delete_if_exists(&tpm_path);\n\n let tpm_url = \"https://github.com/tmux-plugins/tpm.git\";\n\n match utils::git::shallow_clone(tpm_url, &tpm_path.to_string_lossy()) {\n\n Ok(()) => {}\n\n Err(error) => println!(\"tmux: unable to install tpm: {}\", error),\n\n }\n\n }\n\n\n\n if utils::git::path_is_git_repository(&tpm_path) {\n\n let empty_args: &[&str] = &[];\n\n\n\n let tpm_install_path = tpm_path.join(\"bin/install_plugins\");\n\n utils::process::command_spawn_wait(\n", "file_path": "src/tasks/tmux.rs", "rank": 80, "score": 207807.94258333527 }, { "content": "fn sync() -> task::Result {\n\n let src = utils::env::home_dir().join(\".dotfiles/config/vimrc\");\n\n\n\n for vim in &VIMS {\n\n if !vim.exists() {\n\n continue;\n\n }\n\n\n\n utils::fs::symbolic_link_if_exists(&src, &vim.rc_path())?;\n\n\n\n if !vim.has_vim_plug() {\n\n match vim.install_vim_plug() {\n\n Ok(_) => {}\n\n Err(error) => {\n\n // warn, but continue\n\n println!(\n\n \"error: {}: unable to install vim-plug: {:?}\",\n\n &vim.command, error\n\n );\n\n }\n", "file_path": "src/tasks/vim.rs", "rank": 81, "score": 207807.94258333527 }, { "content": "pub fn has() -> bool {\n\n match utils::process::command_output(\"git\", &[\"--version\"]) {\n\n Ok(output) => output.status.success(),\n\n Err(_error) => false,\n\n }\n\n}\n\n\n", "file_path": "src/utils/git.rs", "rank": 82, "score": 206013.1218007125 }, { "content": "pub fn fetch_request(req: Request) -> io::Result<impl Read> {\n\n let url = req.url().clone();\n\n if let Ok(rm) = cache::load_response_metadata(&url) {\n\n let a_while_ago = Utc::now() - Duration::minutes(15);\n\n if rm.date > a_while_ago {\n\n return cache::load_response_body(&url);\n\n }\n\n }\n\n // proceed with fresh HTTP request\n\n\n\n let client = create_client();\n\n let res = match client.execute(req) {\n\n Ok(r) => r,\n\n Err(e) => {\n\n return Err(io::Error::new(io::ErrorKind::Other, format!(\"{:?}\", e)));\n\n }\n\n };\n\n\n\n if res.status().is_success() {\n\n cache::store_response(&url, res)?;\n\n cache::load_response_body(&url)\n\n } else {\n\n println!(\"{:?} GET {}\", &res.version(), &res.url());\n\n let result = io::Error::new(io::ErrorKind::Other, \"non-success\");\n\n Err(result)\n\n }\n\n}\n\n\n", "file_path": "src/utils/http.rs", "rank": 83, "score": 205782.11501253129 }, { "content": "pub fn found_versions() -> HashMap<String, String> {\n\n if !has_cargo() {\n\n return HashMap::<String, String>::new();\n\n };\n\n let stdout = cargo_output(&[\"install\", \"--list\"]).unwrap_or_default();\n\n parse_installed(stdout)\n\n}\n\n\n", "file_path": "src/lib/cargo.rs", "rank": 84, "score": 203060.27104873618 }, { "content": "fn mapping() -> HashMap<String, Task> {\n\n let mut map = HashMap::<String, Task>::new();\n\n map.insert(String::from(\"atlantis\"), atlantis::task());\n\n map.insert(String::from(\"bash\"), bash::task());\n\n map.insert(String::from(\"bazel\"), bazel::task());\n\n map.insert(String::from(\"brew\"), brew::task());\n\n map.insert(String::from(\"dep\"), dep::task());\n\n map.insert(String::from(\"dotfiles\"), dotfiles::task());\n\n map.insert(String::from(\"git\"), git::task());\n\n map.insert(String::from(\"gitleaks\"), gitleaks::task());\n\n map.insert(String::from(\"gitsizer\"), gitsizer::task());\n\n map.insert(String::from(\"goget\"), goget::task());\n\n map.insert(String::from(\"golang\"), golang::task());\n\n map.insert(String::from(\"googlecloudsdk\"), googlecloudsdk::task());\n\n map.insert(String::from(\"hadolint\"), hadolint::task());\n\n map.insert(String::from(\"jq\"), jq::task());\n\n map.insert(String::from(\"macos\"), macos::task());\n\n map.insert(String::from(\"minikube\"), minikube::task());\n\n map.insert(String::from(\"nodejs\"), nodejs::task());\n\n map.insert(String::from(\"npm\"), npm::task());\n", "file_path": "src/tasks/mod.rs", "rank": 85, "score": 199679.845620938 }, { "content": "pub fn rustup_version() -> String {\n\n match command_output(rustup_exe(), &[\"--version\"]) {\n\n Ok(output) => String::from_utf8_lossy(&output.stdout).trim().to_string(),\n\n Err(_) => String::new(),\n\n }\n\n}\n\n\n", "file_path": "src/lib/rust.rs", "rank": 86, "score": 199002.79087805212 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Config {\n\n install: Vec<String>,\n\n uninstall: Vec<String>,\n\n}\n\n\n\nimpl Config {\n\n fn new() -> Config {\n\n Config {\n\n install: Vec::<String>::new(),\n\n uninstall: Vec::<String>::new(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tasks/npm.rs", "rank": 87, "score": 196717.48392172455 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Config {\n\n install: Vec<String>,\n\n uninstall: Vec<String>,\n\n}\n\n\n", "file_path": "src/tasks/vscode.rs", "rank": 88, "score": 196717.48392172455 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/hadolint.rs", "rank": 89, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHRA_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/gitsizer.rs", "rank": 90, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHRA_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/vale.rs", "rank": 91, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/gitleaks.rs", "rank": 92, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/dep.rs", "rank": 93, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/bazel.rs", "rank": 94, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/yq.rs", "rank": 95, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/shfmt.rs", "rank": 96, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn trim_version_output() {\n\n let stdout = String::from(\"minikube version: v0.28.2\\n\");\n\n let got = trim_version(stdout);\n\n assert_eq!(got, String::from(\"v0.28.2\"));\n\n }\n\n}\n", "file_path": "src/tasks/minikube.rs", "rank": 97, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHRA_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/atlantis.rs", "rank": 98, "score": 196318.81864395464 }, { "content": "fn update(sync: Status) -> task::Result {\n\n GHR_TASK.update(sync)\n\n}\n", "file_path": "src/tasks/skaffold.rs", "rank": 99, "score": 196318.81864395464 } ]
Rust
src/db/raw.rs
wrenger/schiller-lib
fd1e45797f602db7ec037a6892b6d1bff26d287f
use std::collections::HashMap; pub trait DatabaseExt { fn fetch(&self, statement: &str) -> Result<Vec<Vec<String>>, sqlite::Error>; fn transaction(&self) -> Result<Transaction, sqlite::Error>; } impl DatabaseExt for sqlite::Connection { fn fetch(&self, statement: &str) -> Result<Vec<Vec<String>>, sqlite::Error> { let mut result = vec![]; self.iterate(statement, |pairs| { result.push( pairs .iter() .map(|&(_, value)| value.unwrap_or_default().into()) .collect(), ); true })?; Ok(result) } fn transaction(&self) -> Result<Transaction, sqlite::Error> { self.execute("begin")?; Ok(Transaction { db: self }) } } pub struct Transaction<'a> { db: &'a sqlite::Connection, } impl<'a> Transaction<'a> { pub fn commit(self) -> Result<(), sqlite::Error> { self.db.execute("commit")?; std::mem::forget(self); Ok(()) } } impl<'a> Drop for Transaction<'a> { fn drop(&mut self) { self.db.execute("rollback").ok(); } } pub trait StatementExt { fn columns(&self) -> HashMap<String, usize>; } impl<'a> StatementExt for sqlite::Statement<'a> { fn columns(&self) -> HashMap<String, usize> { (0..self.column_count()) .map(|i| (self.column_name(i).to_string(), i)) .collect() } } #[cfg(test)] mod tests { use super::*; #[test] fn connection() { sqlite::Connection::open(":memory:").unwrap(); } #[test] fn fetch() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute("create table abc (a, b, c)").unwrap(); db.execute("insert into abc values ('a', 'b', 'c')") .unwrap(); let result = db.fetch("select * from abc").unwrap(); assert_eq!( vec![vec![ String::from("a"), String::from("b"), String::from("c") ]], result ); } #[test] fn multiple() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute( "begin; \ create table abc (a, b, c); \ insert into abc values ('d', 'e', 'f'); \ commit;", ) .unwrap(); let result = db .fetch( "insert into abc values ('a', 'b', 'c'); \ select * from abc order by a", ) .unwrap(); assert_eq!( vec![ vec![String::from("a"), String::from("b"), String::from("c")], vec![String::from("d"), String::from("e"), String::from("f")] ], result ); } #[test] fn prepare() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute("create table abc (a, b, c)").unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(1, "1").unwrap(); stmt.bind(2, "2").unwrap(); stmt.bind(3, "3").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); let mut stmt = db.prepare("insert into abc values (?3, ?2, ?1)").unwrap(); stmt.bind(1, "4").unwrap(); stmt.bind(2, "5").unwrap(); stmt.bind(3, "6").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); assert_eq!( vec![ vec![String::from("1"), String::from("2"), String::from("3")], vec![String::from("6"), String::from("5"), String::from("4")] ], db.fetch("select * from abc").unwrap() ); let mut stmt = db.prepare("select a from abc where a='1'").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Row); assert_eq!(stmt.read::<i64>(0).unwrap(), 1); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); } #[test] fn transaction() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute("create table abc (a not null, b, c)").unwrap(); { let _transaction = db.transaction().unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(1, "4").unwrap(); stmt.bind(2, "5").unwrap(); stmt.bind(3, "6").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); }; assert!(db.fetch("select * from abc").unwrap().is_empty()); { let _transaction = db.transaction().unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(2, "5").unwrap(); stmt.bind(3, "6").unwrap(); stmt.next().expect_err("Null violation!"); }; assert!(db.fetch("select * from abc").unwrap().is_empty()); { let transaction = db.transaction().unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(1, "1").unwrap(); stmt.bind(2, "2").unwrap(); stmt.bind(3, "3").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); println!("finish -> commit"); transaction.commit().unwrap(); }; assert_eq!( vec![vec![ String::from("1"), String::from("2"), String::from("3") ],], db.fetch("select * from abc").unwrap() ); } }
use std::collections::HashMap; pub trait DatabaseExt { fn fetch(&self, statement: &str) -> Result<Vec<Vec<String>>, sqlite::Error>; fn transaction(&self) -> Result<Transaction, sqlite::Error>; } impl DatabaseExt for sqlite::Connection {
fn transaction(&self) -> Result<Transaction, sqlite::Error> { self.execute("begin")?; Ok(Transaction { db: self }) } } pub struct Transaction<'a> { db: &'a sqlite::Connection, } impl<'a> Transaction<'a> { pub fn commit(self) -> Result<(), sqlite::Error> { self.db.execute("commit")?; std::mem::forget(self); Ok(()) } } impl<'a> Drop for Transaction<'a> { fn drop(&mut self) { self.db.execute("rollback").ok(); } } pub trait StatementExt { fn columns(&self) -> HashMap<String, usize>; } impl<'a> StatementExt for sqlite::Statement<'a> { fn columns(&self) -> HashMap<String, usize> { (0..self.column_count()) .map(|i| (self.column_name(i).to_string(), i)) .collect() } } #[cfg(test)] mod tests { use super::*; #[test] fn connection() { sqlite::Connection::open(":memory:").unwrap(); } #[test] fn fetch() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute("create table abc (a, b, c)").unwrap(); db.execute("insert into abc values ('a', 'b', 'c')") .unwrap(); let result = db.fetch("select * from abc").unwrap(); assert_eq!( vec![vec![ String::from("a"), String::from("b"), String::from("c") ]], result ); } #[test] fn multiple() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute( "begin; \ create table abc (a, b, c); \ insert into abc values ('d', 'e', 'f'); \ commit;", ) .unwrap(); let result = db .fetch( "insert into abc values ('a', 'b', 'c'); \ select * from abc order by a", ) .unwrap(); assert_eq!( vec![ vec![String::from("a"), String::from("b"), String::from("c")], vec![String::from("d"), String::from("e"), String::from("f")] ], result ); } #[test] fn prepare() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute("create table abc (a, b, c)").unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(1, "1").unwrap(); stmt.bind(2, "2").unwrap(); stmt.bind(3, "3").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); let mut stmt = db.prepare("insert into abc values (?3, ?2, ?1)").unwrap(); stmt.bind(1, "4").unwrap(); stmt.bind(2, "5").unwrap(); stmt.bind(3, "6").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); assert_eq!( vec![ vec![String::from("1"), String::from("2"), String::from("3")], vec![String::from("6"), String::from("5"), String::from("4")] ], db.fetch("select * from abc").unwrap() ); let mut stmt = db.prepare("select a from abc where a='1'").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Row); assert_eq!(stmt.read::<i64>(0).unwrap(), 1); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); } #[test] fn transaction() { let db = sqlite::Connection::open(":memory:").unwrap(); db.execute("create table abc (a not null, b, c)").unwrap(); { let _transaction = db.transaction().unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(1, "4").unwrap(); stmt.bind(2, "5").unwrap(); stmt.bind(3, "6").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); }; assert!(db.fetch("select * from abc").unwrap().is_empty()); { let _transaction = db.transaction().unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(2, "5").unwrap(); stmt.bind(3, "6").unwrap(); stmt.next().expect_err("Null violation!"); }; assert!(db.fetch("select * from abc").unwrap().is_empty()); { let transaction = db.transaction().unwrap(); let mut stmt = db.prepare("insert into abc values (?, ?, ?)").unwrap(); stmt.bind(1, "1").unwrap(); stmt.bind(2, "2").unwrap(); stmt.bind(3, "3").unwrap(); assert_eq!(stmt.next().unwrap(), sqlite::State::Done); println!("finish -> commit"); transaction.commit().unwrap(); }; assert_eq!( vec![vec![ String::from("1"), String::from("2"), String::from("3") ],], db.fetch("select * from abc").unwrap() ); } }
fn fetch(&self, statement: &str) -> Result<Vec<Vec<String>>, sqlite::Error> { let mut result = vec![]; self.iterate(statement, |pairs| { result.push( pairs .iter() .map(|&(_, value)| value.unwrap_or_default().into()) .collect(), ); true })?; Ok(result) }
function_block-full_function
[ { "content": "/// MARC21 Parsing\n\n///\n\n/// ## See Also\n\n/// https://www.dnb.de/EN/Professionell/Metadatendienste/Datenbezug/SRU/sru_node.html\n\npub fn parse(response: &str, isbn: &str) -> api::Result<BookData> {\n\n let document = roxmltree::Document::parse(response)?;\n\n\n\n let mut first_result = None;\n\n\n\n if let Some(records) = document\n\n .descendants()\n\n .find(|n| n.tag_name().name() == \"records\")\n\n {\n\n for record in records.children() {\n\n let record = parse_record(record);\n\n if record.isbns.iter().any(|e| e == isbn) {\n\n return Ok(record.data);\n\n }\n\n if first_result.is_none() {\n\n first_result = Some(record.data);\n\n }\n\n }\n\n }\n\n\n\n first_result.ok_or(api::Error::NothingFound)\n\n}\n\n\n", "file_path": "src/provider/marc21.rs", "rank": 2, "score": 117895.23877004367 }, { "content": "/// Deletes the roles from all users and inserts the new roles.\n\n///\n\n/// The roles of all users not contained in the given list are cleared.\n\npub fn update_roles(db: &Database, users: &[(&str, &str)]) -> api::Result<()> {\n\n let transaction = db.db.transaction()?;\n\n db.db.execute(DELETE_USER_ROLES)?;\n\n\n\n let mut stmt = db.db.prepare(UPDATE_USER_ROLE)?;\n\n for &(account, role) in users {\n\n let account = account.trim();\n\n if !account.is_empty() {\n\n stmt.bind(1, role.trim())?;\n\n stmt.bind(2, account)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n stmt.reset()?;\n\n }\n\n }\n\n transaction.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/user.rs", "rank": 3, "score": 117895.23877004367 }, { "content": "/// Parses the isbn and removing invalid characters.\n\n///\n\n/// If the checksum is invalid the stripped invalid isbn is returned.\n\npub fn parse(input: &str) -> Result<String, String> {\n\n // Collect numeric values\n\n let isbn: Vec<u8> = input\n\n .chars()\n\n .flat_map(|c| {\n\n if c == 'X' || c == 'x' {\n\n Some(10)\n\n } else {\n\n c.to_digit(10).map(|n| n as u8)\n\n }\n\n })\n\n .collect();\n\n\n\n if is10(&isbn) || is13(&isbn) {\n\n Ok(isbn_str(&isbn))\n\n } else {\n\n Err(isbn_str(&isbn))\n\n }\n\n}\n\n\n", "file_path": "src/isbn.rs", "rank": 4, "score": 115238.51942399633 }, { "content": "/// Deletes the user.\n\n/// This includes all its borrows & reservations.\n\npub fn delete(db: &Database, account: &str) -> api::Result<()> {\n\n let account = account.trim();\n\n if account.is_empty() {\n\n return Err(api::Error::InvalidUser);\n\n }\n\n let transaction = db.db.transaction()?;\n\n // remove user\n\n let mut stmt = db.db.prepare(DELETE_USER)?;\n\n stmt.bind(1, account)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n\n\n // remove borrows & reservations\n\n db.db.execute(DELETE_UNUSED_USERS)?;\n\n transaction.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/user.rs", "rank": 5, "score": 109025.23423189746 }, { "content": "/// Removes the category, assuming it is not referenced anywhere.\n\npub fn delete(db: &Database, id: &str) -> api::Result<()> {\n\n let id = id.trim();\n\n if id.is_empty() {\n\n return Err(api::Error::Arguments);\n\n }\n\n\n\n let transaction = db.db.transaction()?;\n\n // Do not allow the removal of used categories\n\n if references(db, id)? > 0 {\n\n return Err(api::Error::Logic);\n\n }\n\n\n\n let mut stmt = db.db.prepare(DELETE)?;\n\n stmt.bind(1, id)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n\n\n transaction.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/category.rs", "rank": 6, "score": 109025.23423189746 }, { "content": "/// Deletes the book including the its authors.\n\n/// Also borrowers & reservations for this book are removed.\n\npub fn delete(db: &Database, id: &str) -> api::Result<()> {\n\n let id = id.trim();\n\n if id.is_empty() {\n\n return Err(api::Error::InvalidBook);\n\n }\n\n\n\n let transaction = db.db.transaction()?;\n\n let mut stmt = db.db.prepare(DELETE)?;\n\n stmt.bind(1, id)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n\n\n // delete missing authors\n\n db.db.execute(DELETE_UNUSED_AUTHORS)?;\n\n transaction.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/book.rs", "rank": 7, "score": 109025.23423189746 }, { "content": "pub fn create(db: &Database, version: &str) -> api::Result<()> {\n\n let transaction = db.db.transaction()?;\n\n db.db.execute(CREATE_TABLES)?;\n\n update_version(&db.db, version)?;\n\n settings::update(db, &Settings::default())?;\n\n transaction.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/structure.rs", "rank": 8, "score": 109025.23423189746 }, { "content": "/// Returns the book with the given `id`.\n\npub fn fetch(db: &Database, id: &str) -> api::Result<Book> {\n\n let mut stmt = db.db.prepare(FETCH)?;\n\n stmt.bind(1, id)?;\n\n if stmt.next()? == sqlite::State::Row {\n\n Book::read(&stmt, &stmt.columns())\n\n } else {\n\n Err(api::Error::SQL)\n\n }\n\n}\n\n\n", "file_path": "src/db/book.rs", "rank": 9, "score": 105405.31134185646 }, { "content": "/// Applies the related migration routines if the version changed.\n\n/// Returns true if the database was updated.\n\npub fn migrate(db: &Database, version: &str) -> api::Result<bool> {\n\n let transaction = db.db.transaction()?;\n\n let mut stmt = db.db.prepare(FETCH_VERSION)?;\n\n let old_version = if stmt.next()? == sqlite::State::Row {\n\n stmt.read::<String>(0)?\n\n } else {\n\n return Err(api::Error::UnsupportedProjectVersion);\n\n };\n\n gdnative::godot_print!(\"Start migration of {}\", old_version);\n\n\n\n let old_version: Version = old_version.parse()?;\n\n let new_version: Version = version.parse()?;\n\n if MIN_VERSION <= old_version && old_version <= new_version {\n\n for (patch_version, patch) in &PATCHES {\n\n if old_version < *patch_version {\n\n gdnative::godot_print!(\"Applying patch {}\", patch_version);\n\n patch(db)?;\n\n }\n\n }\n\n update_version(&db.db, version)?;\n\n transaction.commit()?;\n\n Ok(old_version != new_version)\n\n } else {\n\n Err(api::Error::UnsupportedProjectVersion)\n\n }\n\n}\n\n\n", "file_path": "src/db/structure.rs", "rank": 10, "score": 105405.31134185646 }, { "content": "/// Returns the number of books in this category.\n\npub fn references(db: &Database, id: &str) -> api::Result<i64> {\n\n let id = id.trim();\n\n if id.is_empty() {\n\n return Err(api::Error::Arguments);\n\n }\n\n\n\n let mut stmt = db.db.prepare(REFERENCED)?;\n\n stmt.bind(1, id)?;\n\n if stmt.next()? != sqlite::State::Row {\n\n return Err(api::Error::SQL);\n\n }\n\n Ok(stmt.read(0)?)\n\n}\n", "file_path": "src/db/category.rs", "rank": 11, "score": 105405.31134185646 }, { "content": "/// Returns the user with the given `id`.\n\npub fn fetch(db: &Database, id: &str) -> api::Result<User> {\n\n let mut stmt = db.db.prepare(FETCH_USER)?;\n\n stmt.bind(1, id)?;\n\n if stmt.next()? == sqlite::State::Row {\n\n User::read(&stmt, &stmt.columns())\n\n } else {\n\n Err(api::Error::SQL)\n\n }\n\n}\n\n\n", "file_path": "src/db/user.rs", "rank": 12, "score": 105405.31134185646 }, { "content": "/// Updates the category and all references.\n\npub fn update(db: &Database, id: &str, category: &Category) -> api::Result<()> {\n\n if !category.is_valid() {\n\n return Err(api::Error::Arguments);\n\n }\n\n\n\n let transaction = db.db.transaction()?;\n\n // Update category\n\n let mut stmt = db.db.prepare(UPDATE)?;\n\n stmt.bind(1, category.id.trim())?;\n\n stmt.bind(2, category.name.trim())?;\n\n stmt.bind(3, category.section.trim())?;\n\n stmt.bind(4, id)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n\n\n if id != category.id {\n\n // Update category ids of related media\n\n let mut stmt = db.db.prepare(UPDATE_MEDIA)?;\n\n stmt.bind(1, category.id.trim())?;\n\n stmt.bind(2, id)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n }\n\n\n\n transaction.commit()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/category.rs", "rank": 13, "score": 102019.25983880207 }, { "content": "/// Updates the book and all references if its id changes.\n\npub fn update(db: &Database, previous_id: &str, book: &Book) -> api::Result<()> {\n\n let previous_id = previous_id.trim();\n\n if previous_id.is_empty() || !book.is_valid() {\n\n return Err(api::Error::InvalidBook);\n\n }\n\n let isbn = if !book.isbn.trim().is_empty() {\n\n crate::isbn::parse(&book.isbn).unwrap_or_else(|invalid_isbn| invalid_isbn)\n\n } else {\n\n String::new()\n\n };\n\n let transaction = db.db.transaction()?;\n\n // update book\n\n let mut stmt = db.db.prepare(UPDATE)?;\n\n stmt.bind(1, book.id.trim())?;\n\n stmt.bind(2, isbn.trim())?;\n\n stmt.bind(3, book.title.trim())?;\n\n stmt.bind(4, book.publisher.trim())?;\n\n stmt.bind(5, book.year)?;\n\n stmt.bind(6, book.costs)?;\n\n stmt.bind(7, book.note.trim())?;\n", "file_path": "src/db/book.rs", "rank": 14, "score": 100208.35403507194 }, { "content": "/// Updates the user and all references if its account changes.\n\npub fn update(db: &Database, previous_account: &str, user: &User) -> api::Result<()> {\n\n let previous_account = previous_account.trim();\n\n if previous_account.is_empty() || !user.is_valid() {\n\n return Err(api::Error::InvalidUser);\n\n }\n\n let transaction = db.db.transaction()?;\n\n // update user\n\n let mut stmt = db.db.prepare(UPDATE_USER)?;\n\n stmt.bind(1, user.account.trim())?;\n\n stmt.bind(2, user.forename.trim())?;\n\n stmt.bind(3, user.surname.trim())?;\n\n stmt.bind(4, user.role.trim())?;\n\n stmt.bind(5, user.may_borrow as i64)?;\n\n stmt.bind(6, previous_account)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n\n\n // update borrows\n\n let mut stmt = db.db.prepare(UPDATE_USER_BORROWS)?;\n", "file_path": "src/db/user.rs", "rank": 15, "score": 100208.35403507193 }, { "content": "/// Performs a simple media search with the given `text`.\n\npub fn search<'a>(db: &'a Database, text: &str) -> api::Result<DBIter<'a, Book>> {\n\n let mut stmt = db.db.prepare(SEARCH)?;\n\n let text = text.trim();\n\n stmt.bind(1, text)?;\n\n Ok(DBIter::new(stmt))\n\n}\n\n\n", "file_path": "src/db/book.rs", "rank": 16, "score": 96377.33670756221 }, { "content": "/// Performes a simple user search with the given `text`.\n\npub fn search<'a>(db: &'a Database, text: &str) -> api::Result<DBIter<'a, User>> {\n\n let mut stmt = db.db.prepare(QUERY_USERS)?;\n\n let text = text.trim();\n\n stmt.bind(1, text)?;\n\n Ok(DBIter::new(stmt))\n\n}\n\n\n", "file_path": "src/db/user.rs", "rank": 17, "score": 96377.33670756221 }, { "content": "/// Conversion from database entries.\n\npub trait ReadStmt: Sized {\n\n type Error: std::fmt::Debug;\n\n fn read(\n\n stmt: &sqlite::Statement,\n\n columns: &HashMap<String, usize>,\n\n ) -> Result<Self, Self::Error>;\n\n}\n\n\n\nimpl<'a, T: ReadStmt> Iterator for DBIter<'a, T> {\n\n type Item = T;\n\n fn next(&mut self) -> Option<T> {\n\n if let Ok(state) = self.stmt.next() {\n\n if state != sqlite::State::Done {\n\n match T::read(&self.stmt, &self.columns) {\n\n Ok(r) => Some(r),\n\n Err(e) => {\n\n gdnative::godot_print!(\"SQL! {:?}\", e);\n\n None\n\n }\n\n }\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/db/mod.rs", "rank": 18, "score": 94287.81379972445 }, { "content": "fn id_prefix(author: &str, category: &str) -> String {\n\n let mut author_prefix = author[author.rfind(' ').map(|i| i + 1).unwrap_or_default()..]\n\n .replace(&['ä', 'Ä'][..], \"A\")\n\n .replace(&['ö', 'Ö'][..], \"O\")\n\n .replace(&['ü', 'Ü'][..], \"U\")\n\n .replace('ß', \"S\")\n\n .replace(|x: char| !x.is_ascii_alphabetic(), \"\")\n\n .to_ascii_uppercase();\n\n if author_prefix.is_empty() {\n\n author_prefix = \"XXXX\".into();\n\n }\n\n\n\n let category = if !category.is_empty() {\n\n category\n\n } else {\n\n \"XXXX\"\n\n };\n\n\n\n format!(\n\n \"{} {}\",\n", "file_path": "src/db/book.rs", "rank": 19, "score": 89984.91196608229 }, { "content": "pub fn send(\n\n host: &str,\n\n password: &str,\n\n from: &str,\n\n to: &str,\n\n subject: &str,\n\n body: &str,\n\n) -> api::Result<()> {\n\n // Change encoding of äöü to ascii\n\n let subject = subject.nfc().collect::<String>();\n\n let body = body.nfc().collect::<String>();\n\n\n\n // Create mail\n\n let email = Message::builder()\n\n .from(Mailbox::new(None, Address::new(from, host)?))\n\n .to(Mailbox::new(None, Address::new(to, host)?))\n\n .subject(subject)\n\n .singlepart(\n\n SinglePartBuilder::new()\n\n .content_type(ContentType::TEXT_PLAIN)\n", "file_path": "src/mail.rs", "rank": 20, "score": 89006.75053300963 }, { "content": "fn parse_costs(costs: &str) -> f64 {\n\n if let Some(begin) = costs.find(\"EUR\") {\n\n let begin = begin + 4;\n\n if begin < costs.len() {\n\n let end = costs[begin..]\n\n .find(' ')\n\n .map(|i| begin + i)\n\n .unwrap_or_else(|| costs.len());\n\n return costs[begin..end].parse().unwrap_or_default();\n\n }\n\n } else if let Some(begin) = costs.find(\"DM\") {\n\n let begin = begin + 3;\n\n if begin < costs.len() {\n\n let end = costs[begin..]\n\n .find(' ')\n\n .map(|i| begin + i)\n\n .unwrap_or_else(|| costs.len());\n\n let costs: f64 = costs[begin..end].parse().unwrap_or_default();\n\n return (costs * DM_TO_EUR * 100.0).round() / 100.0;\n\n }\n", "file_path": "src/provider/marc21.rs", "rank": 21, "score": 81871.35474260041 }, { "content": "fn subfield(datafield: roxmltree::Node, code: &str) -> Option<String> {\n\n if let Some(subfield) = datafield\n\n .children()\n\n .find(|n| n.has_tag_name(\"subfield\") && n.attribute(\"code\") == Some(code))\n\n {\n\n if let Some(mut s) = subfield.text().map(|s| s.nfc().collect::<String>()) {\n\n s.retain(|c| !c.is_control()); // remove control characters\n\n Some(s)\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/provider/marc21.rs", "rank": 22, "score": 72164.13431044601 }, { "content": "pub fn fetch(db: &Database) -> api::Result<Stats> {\n\n let mut stmt = db.db.prepare(STATS)?;\n\n if stmt.next()? == sqlite::State::Row {\n\n ReadStmt::read(&stmt, &stmt.columns())\n\n } else {\n\n Err(api::Error::SQL)\n\n }\n\n}\n", "file_path": "src/db/stats.rs", "rank": 23, "score": 71379.94052868242 }, { "content": "pub fn fetch(db: &Database) -> api::Result<Settings> {\n\n let stmt = db.db.prepare(SETTINGS_FETCH)?;\n\n Ok(Settings::from_iter(DBIter::new(stmt)))\n\n}\n", "file_path": "src/db/settings.rs", "rank": 24, "score": 71379.94052868242 }, { "content": "fn update_version(db: &sqlite::Connection, version: &str) -> api::Result<()> {\n\n let mut stmt = db.prepare(UPDATE_VERSION)?;\n\n stmt.bind(1, version)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n Err(api::Error::SQL)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/db/structure.rs", "rank": 25, "score": 70798.66135432705 }, { "content": "pub fn update(db: &Database, settings: &Settings) -> api::Result<()> {\n\n let mut stmt = db.db.prepare(SETTINGS_UPDATE)?;\n\n stmt.bind(1, settings.borrowing_duration)?;\n\n stmt.bind(2, settings.user_path.trim())?;\n\n stmt.bind(3, settings.user_delimiter.trim())?;\n\n stmt.bind(4, settings.dnb_token.trim())?;\n\n stmt.bind(5, settings.mail_last_reminder.trim())?;\n\n stmt.bind(6, settings.mail_from.trim())?;\n\n stmt.bind(7, settings.mail_host.trim())?;\n\n stmt.bind(8, settings.mail_password.trim())?;\n\n stmt.bind(9, settings.mail_info_subject.trim())?;\n\n stmt.bind(10, settings.mail_info_content.trim())?;\n\n stmt.bind(11, settings.mail_overdue_subject.trim())?;\n\n stmt.bind(12, settings.mail_overdue_content.trim())?;\n\n stmt.bind(13, settings.mail_overdue2_subject.trim())?;\n\n stmt.bind(14, settings.mail_overdue2_content.trim())?;\n\n\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/settings.rs", "rank": 26, "score": 68937.93919152505 }, { "content": "/// Adds a new category.\n\npub fn add(db: &Database, category: &Category) -> api::Result<()> {\n\n if !category.is_valid() {\n\n return Err(api::Error::Arguments);\n\n }\n\n\n\n let mut stmt = db.db.prepare(ADD)?;\n\n stmt.bind(1, category.id.trim())?;\n\n stmt.bind(2, category.name.trim())?;\n\n stmt.bind(3, category.section.trim())?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/category.rs", "rank": 27, "score": 68937.93919152505 }, { "content": "/// Adds a new user.\n\npub fn add(db: &Database, user: &User) -> api::Result<()> {\n\n if !user.is_valid() {\n\n return Err(api::Error::InvalidUser);\n\n }\n\n let mut stmt = db.db.prepare(ADD_USER)?;\n\n stmt.bind(1, user.account.trim())?;\n\n stmt.bind(2, user.forename.trim())?;\n\n stmt.bind(3, user.surname.trim())?;\n\n stmt.bind(4, user.role.trim())?;\n\n stmt.bind(5, user.may_borrow as i64)?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/user.rs", "rank": 28, "score": 68937.93919152505 }, { "content": "/// Adds a new book.\n\npub fn add(db: &Database, book: &Book) -> api::Result<()> {\n\n if !book.is_valid() {\n\n return Err(api::Error::InvalidBook);\n\n }\n\n let isbn = if !book.isbn.trim().is_empty() {\n\n crate::isbn::parse(&book.isbn).unwrap_or_else(|invalid_isbn| invalid_isbn)\n\n } else {\n\n String::new()\n\n };\n\n let transaction = db.db.transaction()?;\n\n let mut stmt = db.db.prepare(ADD)?;\n\n stmt.bind(1, book.id.trim())?;\n\n stmt.bind(2, isbn.trim())?;\n\n stmt.bind(3, book.title.trim())?;\n\n stmt.bind(4, book.publisher.trim())?;\n\n stmt.bind(5, book.year)?;\n\n stmt.bind(6, book.costs)?;\n\n stmt.bind(7, book.note.trim())?;\n\n stmt.bind(8, book.borrowable as i64)?;\n\n stmt.bind(9, book.category.trim())?;\n", "file_path": "src/db/book.rs", "rank": 29, "score": 68937.93919152505 }, { "content": "/// Returns all categories.\n\npub fn list(db: &Database) -> api::Result<DBIter<Category>> {\n\n let stmt = db.db.prepare(LIST)?;\n\n Ok(DBIter::new(stmt))\n\n}\n\n\n", "file_path": "src/db/category.rs", "rank": 30, "score": 67585.48328540364 }, { "content": "/// Removes the reservation from the specified book.\n\npub fn release(db: &Database, book: &mut Book) -> api::Result<()> {\n\n if book.reservation.is_empty() {\n\n return Err(api::Error::Logic);\n\n }\n\n\n\n let mut stmt = db.db.prepare(UPDATE_RELEASE)?;\n\n stmt.bind(1, book.id.as_str())?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n book.reservation = String::new();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/lending.rs", "rank": 31, "score": 66658.38556703049 }, { "content": "/// Return the list of expired loan periods.\n\npub fn overdues(db: &Database) -> api::Result<DBIter<(Book, User)>> {\n\n let stmt = db.db.prepare(QUERY_EXPIRED)?;\n\n Ok(DBIter::new(stmt))\n\n}\n\n\n\nimpl ReadStmt for (Book, User) {\n\n type Error = api::Error;\n\n\n\n fn read(\n\n stmt: &sqlite::Statement<'_>,\n\n columns: &HashMap<String, usize>,\n\n ) -> api::Result<(Book, User)> {\n\n Ok((Book::read(stmt, columns)?, User::read(stmt, columns)?))\n\n }\n\n}\n", "file_path": "src/db/lending.rs", "rank": 32, "score": 65387.73739092832 }, { "content": "/// Generates a new unique id based on the authors surname and the category.\n\npub fn generate_id(db: &Database, book: &Book) -> api::Result<String> {\n\n let prefix = id_prefix(\n\n book.authors.first().map(|s| s.trim()).unwrap_or_default(),\n\n book.category.trim(),\n\n );\n\n println!(\"Prefix {}\", prefix);\n\n let id = book.id.trim();\n\n if id.starts_with(&prefix)\n\n && id.len() > prefix.len() + 1\n\n && &id[prefix.len()..prefix.len() + 1] == \" \"\n\n {\n\n return Ok(id.to_string());\n\n }\n\n\n\n let mut stmt = db.db.prepare(UNUSED_ID)?;\n\n stmt.bind(1, prefix.len() as i64)?;\n\n stmt.bind(2, prefix.as_str())?;\n\n if stmt.next()? != sqlite::State::Row {\n\n return Err(api::Error::SQL);\n\n }\n\n let id = stmt.read::<i64>(0)? + 1;\n\n Ok(format!(\"{} {}\", prefix, id))\n\n}\n\n\n", "file_path": "src/db/book.rs", "rank": 33, "score": 65387.73739092832 }, { "content": "/// Returns the book.\n\npub fn return_back(db: &Database, book: &mut Book) -> api::Result<()> {\n\n if book.borrower.is_empty() {\n\n return Err(api::Error::Logic);\n\n }\n\n\n\n let mut stmt = db.db.prepare(UPDATE_REVOKE)?;\n\n stmt.bind(1, book.id.as_str())?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n book.borrower = String::new();\n\n book.deadline = String::new();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/lending.rs", "rank": 34, "score": 65387.73739092832 }, { "content": "fn isbn_str(isbn: &[u8]) -> String {\n\n isbn.iter()\n\n .map(|&n| std::char::from_digit(n as _, 10).unwrap_or('X'))\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn parse_isbns() {\n\n assert_eq!(parse(\"\"), Err(\"\".into()));\n\n assert_eq!(parse(\"1234567890\"), Err(\"1234567890\".into()));\n\n assert_eq!(parse(\"3-440-03914-5\"), Ok(\"3440039145\".into()));\n\n assert_eq!(parse(\"978-3923923410\"), Ok(\"9783923923410\".into()));\n\n assert_eq!(parse(\"978-1338099133\"), Ok(\"9781338099133\".into()));\n\n assert_eq!(parse(\"353411292X\"), Ok(\"353411292X\".into()));\n\n assert_eq!(parse(\"35341129XX\"), Err(\"35341129XX\".into()));\n\n }\n\n}\n", "file_path": "src/isbn.rs", "rank": 35, "score": 62716.26879780865 }, { "content": "/// Creates a reservation for the borrowed book.\n\npub fn reserve(db: &Database, book: &mut Book, user: &User) -> api::Result<()> {\n\n if !user.may_borrow {\n\n return Err(api::Error::LendingUserMayNotBorrow);\n\n }\n\n if !book.borrowable {\n\n return Err(api::Error::LendingBookNotBorrowable);\n\n }\n\n if !book.reservation.is_empty() {\n\n return Err(api::Error::LendingBookAlreadyReserved);\n\n }\n\n if book.borrower.is_empty() {\n\n return Err(api::Error::LendingBookNotBorrowed);\n\n }\n\n if book.borrower == user.account {\n\n return Err(api::Error::LendingBookAlreadyBorrowedByUser);\n\n }\n\n\n\n let mut stmt = db.db.prepare(UPDATE_RESERVE)?;\n\n stmt.bind(1, user.account.as_str())?;\n\n stmt.bind(2, book.id.as_str())?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n book.reservation = user.account.clone();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/lending.rs", "rank": 36, "score": 62525.52788180057 }, { "content": "/// Lends the book to the specified user.\n\npub fn lend(db: &Database, book: &mut Book, user: &User, days: i64) -> api::Result<()> {\n\n if !user.may_borrow {\n\n return Err(api::Error::LendingUserMayNotBorrow);\n\n }\n\n if !book.borrowable {\n\n return Err(api::Error::LendingBookNotBorrowable);\n\n }\n\n if !book.reservation.is_empty() {\n\n if book.reservation == user.account {\n\n release(db, book)?; // Allow lending to reserver\n\n } else {\n\n return Err(api::Error::LendingBookAlreadyReserved);\n\n }\n\n }\n\n // Allow renewal\n\n if !book.borrower.is_empty() && book.borrower != user.account {\n\n return Err(api::Error::LendingBookAlreadyBorrowed);\n\n }\n\n\n\n let deadline = chrono::Utc::today() + chrono::Duration::days(days);\n", "file_path": "src/db/lending.rs", "rank": 37, "score": 58877.277076823695 }, { "content": "/// Performs an advanced media search with the given search parameters.\n\npub fn search_advanced<'a>(db: &'a Database, params: &BookSearch) -> api::Result<DBIter<'a, Book>> {\n\n let mut stmt = db.db.prepare(SEARCH_ADVANCED)?;\n\n stmt.bind(1, params.id.trim())?;\n\n stmt.bind(2, params.isbn.trim())?;\n\n stmt.bind(3, params.title.trim())?;\n\n stmt.bind(4, params.publisher.trim())?;\n\n stmt.bind(5, params.authors.trim())?;\n\n let year = params.year.trim();\n\n if let Some(i) = year.find('-') {\n\n stmt.bind(6, year[..i].trim())?;\n\n stmt.bind(7, year[i + 1..].trim())?;\n\n } else if year.is_empty() {\n\n stmt.bind(6, std::i64::MIN)?;\n\n stmt.bind(7, std::i64::MAX)?;\n\n } else {\n\n stmt.bind(6, year)?;\n\n stmt.bind(7, year)?;\n\n }\n\n stmt.bind(8, params.category.trim())?;\n\n stmt.bind(9, params.note.trim())?;\n", "file_path": "src/db/book.rs", "rank": 38, "score": 58877.277076823695 }, { "content": "fn checksum13(isbn: &[u8]) -> u8 {\n\n let checksum = isbn[..12]\n\n .iter()\n\n .enumerate()\n\n .fold(0, |acc, (i, &n)| acc + (1 + 2 * (i % 2)) * n as usize);\n\n ((400 - checksum) % 10) as u8\n\n}\n\n\n", "file_path": "src/isbn.rs", "rank": 39, "score": 38074.49731918196 }, { "content": "fn is10(isbn: &[u8]) -> bool {\n\n isbn.len() == 10 && isbn[0..9].iter().all(|&n| n < 10) && isbn[9] == checksum10(isbn)\n\n}\n\n\n", "file_path": "src/isbn.rs", "rank": 40, "score": 38074.49731918196 }, { "content": "fn is13(isbn: &[u8]) -> bool {\n\n isbn.len() == 13 && isbn.iter().all(|&n| n < 10) && isbn[12] == checksum13(isbn)\n\n}\n\n\n", "file_path": "src/isbn.rs", "rank": 41, "score": 38074.49731918196 }, { "content": "fn checksum10(isbn: &[u8]) -> u8 {\n\n let checksum = isbn[..9]\n\n .iter()\n\n .enumerate()\n\n .fold(0, |acc, (i, &n)| acc + (i + 1) * n as usize);\n\n (checksum % 11) as u8\n\n}\n\n\n", "file_path": "src/isbn.rs", "rank": 42, "score": 38074.49731918196 }, { "content": "// Function that registers all exposed classes to Godot\n\nfn init(handle: InitHandle) {\n\n handle.add_class::<Project>();\n\n handle.add_class::<date::Date>();\n\n handle.add_class::<mailer::Mailer>();\n\n handle.add_class::<marc21::Marc21>();\n\n}\n\n\n\n// Macros that create the entry-points of the dynamic library.\n\ngodot_init!(init);\n", "file_path": "src/api/mod.rs", "rank": 43, "score": 37705.565237643954 }, { "content": "fn patch_0_8_0(db: &Database) -> api::Result<()> {\n\n const UPDATE_MAIL_PLACEHOLDERS: &str = \"\\\n\n update sbv_meta set \\\n\n value=replace(replace(value, '[mediumtitel]', '{booktitle}'), '[name]', '{username}') \\\n\n where key like 'mail.%.subject' or key like 'mail.%.content' \\\n\n \";\n\n db.db.execute(UPDATE_MAIL_PLACEHOLDERS)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::*;\n\n use super::*;\n\n\n\n #[test]\n\n fn version_parsing() {\n\n assert!(\"0.0\".parse::<Version>().unwrap() == Version(0, 0, 0));\n\n assert!(\"1.0\".parse::<Version>().unwrap() == Version(0, 1, 0));\n\n assert!(\"3.55\".parse::<Version>().unwrap() == Version(0, 3, 55));\n", "file_path": "src/db/structure.rs", "rank": 44, "score": 35720.8107421717 }, { "content": "fn patch_0_6_3(db: &Database) -> api::Result<()> {\n\n use std::fs::File;\n\n use std::io::BufReader;\n\n\n\n use gdnative::api::RegEx;\n\n\n\n fn regex_search(regex: &str, text: &str) -> String {\n\n let re = RegEx::new();\n\n if re.compile(regex).is_err() {\n\n gdnative::godot_error!(\"Malformed regex: {}\", regex);\n\n return String::new();\n\n }\n\n re.search(text, 0, -1)\n\n .map(|s| unsafe { s.assume_safe().get_string(1).to_string() })\n\n .unwrap_or_default()\n\n }\n\n\n\n // apply new key setting names\n\n fn update(item: (String, String), db: &Database) -> (String, String) {\n\n match item.0.as_str() {\n", "file_path": "src/db/structure.rs", "rank": 45, "score": 35720.8107421717 }, { "content": "type MigrationRoutine = fn(&Database) -> api::Result<()>;\n\n\n\n/// Database migration routines\n\nconst PATCHES: [(Version, MigrationRoutine); 2] = [\n\n (Version(0, 6, 3), patch_0_6_3),\n\n (Version(0, 8, 0), patch_0_8_0),\n\n];\n\n\n", "file_path": "src/db/structure.rs", "rank": 46, "score": 34902.38925948496 }, { "content": "fn parse_record(record: roxmltree::Node) -> Record {\n\n let mut r = Record::default();\n\n let mut persons = Vec::new();\n\n for datafield in record\n\n .descendants()\n\n .filter(|x| x.has_tag_name(\"datafield\") && x.has_attribute(\"tag\"))\n\n {\n\n match datafield.attribute(\"tag\").unwrap() {\n\n ISBN_COSTS_TAG => {\n\n subfield(datafield, ISBN_CODE).map_or((), |t| r.isbns.push(t));\n\n subfield(datafield, COSTS_CODE).map_or((), |t| r.data.costs = parse_costs(&t))\n\n }\n\n EAN_TAG => subfield(datafield, EAN_CODE).map_or((), |t| r.isbns.push(t)),\n\n TITLE_TAG => {\n\n subfield(datafield, TITLE_CODE).map_or((), |t| r.data.title = t);\n\n // Add subtitle if the title is to short\n\n if r.data.title.len() < SHORT_TITLE_LEN {\n\n subfield(datafield, SUBTITLE_CODE).map_or((), |t| {\n\n if !t.is_empty() {\n\n r.data.title.push_str(\" - \");\n", "file_path": "src/provider/marc21.rs", "rank": 47, "score": 34902.38925948496 }, { "content": "use raw::StatementExt;\n\n\n\nuse super::PKG_VERSION;\n\n\n\npub struct Database {\n\n path: PathBuf,\n\n db: sqlite::Connection,\n\n}\n\n\n\nimpl fmt::Debug for Database {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"Database {{ {:?} }}\", self.path)\n\n }\n\n}\n\n\n\nimpl Database {\n\n /// Creates a new database at the given path.\n\n pub fn create(path: &str) -> api::Result<Database> {\n\n let path = PathBuf::from(path);\n\n if !path.exists() {\n", "file_path": "src/db/mod.rs", "rank": 48, "score": 12.057692613854508 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::api;\n\n\n\nuse super::raw::{DatabaseExt, StatementExt};\n\nuse super::{DBIter, Database, ReadStmt};\n\n\n\n// Query\n\nconst FETCH_USER: &str = \"\\\n\n select \\\n\n account, \\\n\n forename, \\\n\n surname, \\\n\n role, \\\n\n may_borrow \\\n\n from user \\\n\n where account=? \\\n\n\";\n\n\n\nconst QUERY_USERS: &str = \"\\\n", "file_path": "src/db/user.rs", "rank": 49, "score": 8.804598333294393 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::{Database, ReadStmt, StatementExt};\n\nuse crate::api;\n\n\n\nconst STATS: &str = \"\\\n\n select \\\n\n (select count(*) from medium) as books, \\\n\n (select count(distinct name) from author) as authors, \\\n\n (select count(*) from user) as users, \\\n\n (select count(*) from medium where borrower <> '') as borrows, \\\n\n (select count(*) from medium where reservation <> '') as reservations, \\\n\n (select count(*) from medium where borrower <> '' and JulianDay(date('now')) > JulianDay(date(deadline))) as overdues \\\n\n\";\n\n\n\n/// Data object for book.\n\n#[derive(Debug, Clone, gdnative::ToVariant, gdnative::FromVariant)]\n\n#[cfg_attr(test, derive(PartialEq, Default))]\n\npub struct Stats {\n\n pub books: usize,\n", "file_path": "src/db/stats.rs", "rank": 50, "score": 8.121406366253382 }, { "content": "use std::collections::hash_map::HashMap;\n\nuse std::fmt;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse crate::api;\n\n\n\npub mod book;\n\npub use book::{Book, BookSearch, BookState};\n\npub mod category;\n\npub use category::Category;\n\npub mod lending;\n\npub mod raw;\n\npub mod settings;\n\npub use settings::Settings;\n\npub mod stats;\n\npub use stats::Stats;\n\npub mod structure;\n\npub mod user;\n\npub use user::User;\n\n\n", "file_path": "src/db/mod.rs", "rank": 51, "score": 8.113334934428547 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::api;\n\n\n\nuse super::raw::{DatabaseExt, StatementExt};\n\nuse super::{DBIter, Database, ReadStmt};\n\n\n\nconst FETCH: &str = \"\\\n\n select \\\n\n id, \\\n\n isbn, \\\n\n title, \\\n\n publisher, \\\n\n year, \\\n\n costs, \\\n\n note, \\\n\n borrowable, \\\n\n category, \\\n\n ifnull(group_concat(author.name),'') as authors, \\\n\n borrower, \\\n", "file_path": "src/db/book.rs", "rank": 52, "score": 7.797933616599581 }, { "content": " delete from category where id=? \\\n\n\";\n\nconst REFERENCED: &str = \"\\\n\n select count(id) from medium where category=? \\\n\n\";\n\n\n\n#[derive(Debug, Clone, gdnative::ToVariant, gdnative::FromVariant)]\n\npub struct Category {\n\n pub id: String,\n\n pub name: String,\n\n pub section: String,\n\n}\n\n\n\nimpl Category {\n\n fn is_valid(&self) -> bool {\n\n !self.id.trim().is_empty()\n\n && !self.name.trim().is_empty()\n\n && !self.section.trim().is_empty()\n\n }\n\n}\n", "file_path": "src/db/category.rs", "rank": 53, "score": 7.753970942812158 }, { "content": "use std::iter::FromIterator;\n\n\n\nuse gdnative::prelude::*;\n\n\n\nuse crate::api::{self, Error};\n\nuse crate::db;\n\n\n\n/// The Global Project Singleton\n\n#[derive(NativeClass, Debug)]\n\n#[inherit(Node)]\n\npub struct Project {\n\n db: Option<db::Database>,\n\n settings: Option<db::Settings>,\n\n}\n\n\n\n#[methods]\n\nimpl Project {\n\n /// Creates a new Project object.\n\n /// This functions should not be called directly as this class is a singleton.\n\n fn new(_owner: &Node) -> Self {\n", "file_path": "src/api/project.rs", "rank": 54, "score": 7.410112802245415 }, { "content": "use crate::api;\n\nuse crate::provider::BookData;\n\n\n\nuse unicode_normalization::UnicodeNormalization;\n\n\n\nconst ISBN_COSTS_TAG: &str = \"020\";\n\nconst ISBN_CODE: &str = \"a\";\n\nconst COSTS_CODE: &str = \"c\";\n\nconst EAN_TAG: &str = \"024\";\n\nconst EAN_CODE: &str = \"a\";\n\nconst TITLE_TAG: &str = \"245\";\n\nconst TITLE_CODE: &str = \"a\";\n\nconst SUBTITLE_CODE: &str = \"p\";\n\nconst AUTHOR_TAG: &str = \"100\";\n\nconst AUTHOR_CODE: &str = \"a\";\n\nconst PERSON_TAG: &str = \"700\";\n\nconst PERSON_CODE: &str = \"a\";\n\nconst PUBLISHER_TAG: &str = \"264\";\n\nconst PUBLISHER_CODE: &str = \"b\";\n\n\n", "file_path": "src/provider/marc21.rs", "rank": 55, "score": 7.334971424422255 }, { "content": "use chrono::Datelike;\n\nuse gdnative::prelude::*;\n\n\n\nuse crate::api;\n\n\n\n/// The Date wrapper \"class\"\n\n///\n\n/// It provides two date formats:\n\n/// - The iso date: %Y-%m-%d like 2001-07-08\n\n/// - The locale date, which is based on the language of the OS (en: %m/%d/%y)\n\n#[derive(NativeClass, Debug)]\n\n#[register_with(Date::register)]\n\n#[inherit(Reference)]\n\npub struct Date {\n\n date: chrono::NaiveDate,\n\n}\n\n\n\n#[methods]\n\nimpl Date {\n\n fn new(_owner: &Reference) -> Self {\n", "file_path": "src/api/date.rs", "rank": 56, "score": 7.122241646827378 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::api;\n\n\n\nuse super::raw::DatabaseExt;\n\nuse super::{DBIter, Database, ReadStmt};\n\n\n\nconst LIST: &str = \"\\\n\n select id, name, section from category order by section, id \\\n\n\";\n\nconst ADD: &str = \"\\\n\n insert into category values (?, ?, ?) \\\n\n\";\n\nconst UPDATE: &str = \"\\\n\n update category set id=?, name=?, section=? where id=? \\\n\n\";\n\nconst UPDATE_MEDIA: &str = \"\\\n\n update medium set category=? where category=? \\\n\n\";\n\nconst DELETE: &str = \"\\\n", "file_path": "src/db/category.rs", "rank": 57, "score": 7.003057816489408 }, { "content": " pub publisher: String,\n\n pub year: i64,\n\n pub costs: f64,\n\n pub note: String,\n\n pub borrowable: bool,\n\n pub category: String,\n\n pub authors: Vec<String>,\n\n pub borrower: String,\n\n pub deadline: String,\n\n pub reservation: String,\n\n}\n\n\n\nimpl Book {\n\n fn is_valid(&self) -> bool {\n\n !self.id.trim().is_empty() && !self.title.trim().is_empty()\n\n }\n\n}\n\n\n\nimpl ReadStmt for Book {\n\n type Error = api::Error;\n", "file_path": "src/db/book.rs", "rank": 58, "score": 6.935547669219481 }, { "content": " #[cfg(test)]\n\n fn memory() -> api::Result<Database> {\n\n Ok(Database {\n\n path: PathBuf::new(),\n\n db: sqlite::open(\":memory:\")?,\n\n })\n\n }\n\n}\n\n\n\n/// Iterator over database results.\n\npub struct DBIter<'a, T> {\n\n stmt: sqlite::Statement<'a>,\n\n columns: HashMap<String, usize>,\n\n ty: std::marker::PhantomData<T>,\n\n}\n\n\n\nimpl<'a, T> DBIter<'a, T> {\n\n pub fn new(stmt: sqlite::Statement<'a>) -> Self {\n\n DBIter {\n\n columns: stmt.columns(),\n\n stmt,\n\n ty: std::marker::PhantomData,\n\n }\n\n }\n\n}\n\n\n\n/// Conversion from database entries.\n", "file_path": "src/db/mod.rs", "rank": 59, "score": 6.764737439308593 }, { "content": "use std::collections::HashMap;\n\n\n\nuse super::{Book, DBIter, Database, ReadStmt, User};\n\nuse crate::api;\n\n\n\nconst UPDATE_LEND: &str = \"\\\n\n update medium set borrower=?, deadline=? where id=? \\\n\n\";\n\nconst UPDATE_REVOKE: &str = \"\\\n\n update medium set borrower='', deadline='' where id=? \\\n\n\";\n\nconst UPDATE_RESERVE: &str = \"\\\n\n update medium set reservation=? where id=? \\\n\n\";\n\nconst UPDATE_RELEASE: &str = \"\\\n\n update medium set reservation='' where id=? \\\n\n\";\n\nconst QUERY_EXPIRED: &str = \"\\\n\n select \\\n\n id, \\\n", "file_path": "src/db/lending.rs", "rank": 60, "score": 6.643205190113555 }, { "content": " pub authors: usize,\n\n pub users: usize,\n\n pub borrows: usize,\n\n pub reservations: usize,\n\n pub overdues: usize,\n\n}\n\n\n\nimpl ReadStmt for Stats {\n\n type Error = api::Error;\n\n\n\n fn read(stmt: &sqlite::Statement<'_>, columns: &HashMap<String, usize>) -> api::Result<Stats> {\n\n Ok(Stats {\n\n books: stmt.read::<i64>(columns[\"books\"])? as _,\n\n authors: stmt.read::<i64>(columns[\"authors\"])? as _,\n\n users: stmt.read::<i64>(columns[\"users\"])? as _,\n\n borrows: stmt.read::<i64>(columns[\"borrows\"])? as _,\n\n reservations: stmt.read::<i64>(columns[\"reservations\"])? as _,\n\n overdues: stmt.read::<i64>(columns[\"overdues\"])? as _,\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/db/stats.rs", "rank": 61, "score": 6.545020990152955 }, { "content": "use std::fmt::{self, Display};\n\nuse std::str::FromStr;\n\n\n\nuse crate::api;\n\n\n\nuse super::{raw::DatabaseExt, settings, settings::Settings, Database};\n\n\n\nconst CREATE_TABLES: &str = \"\\\n\n create table sbv_meta ( \\\n\n key text primary key, \\\n\n value text not null); \\\n\n \\\n\n create table author ( \\\n\n name text not null, \\\n\n medium text not null, \\\n\n primary key (name, medium)); \\\n\n \\\n\n create table user ( \\\n\n account text not null primary key, \\\n\n forename text not null, \\\n", "file_path": "src/db/structure.rs", "rank": 62, "score": 6.371121569142714 }, { "content": " update author set medium=? where medium=? \\\n\n\";\n\nconst DELETE: &str = \"\\\n\n delete from medium where id=? \\\n\n\";\n\nconst DELETE_UNUSED_AUTHORS: &str = \"\\\n\n delete from author where medium not in (select id from medium) \\\n\n\";\n\nconst UNUSED_ID: &str = \"\\\n\n select max(substr(id, ? + 2)) from medium \\\n\n where id like ?||'%' order by id \\\n\n\";\n\n\n\n/// Data object for book.\n\n#[derive(Debug, Clone, gdnative::ToVariant, gdnative::FromVariant)]\n\n#[cfg_attr(test, derive(PartialEq, Default))]\n\npub struct Book {\n\n pub id: String,\n\n pub isbn: String,\n\n pub title: String,\n", "file_path": "src/db/book.rs", "rank": 63, "score": 6.34671549997047 }, { "content": "use gdnative::prelude::*;\n\n\n\nuse crate::api;\n\nuse crate::provider::{marc21, BookData};\n\n\n\n/// The Marc21Parser wrapper \"class\"\n\n#[derive(NativeClass)]\n\n#[inherit(Reference)]\n\npub struct Marc21 {}\n\n\n\n#[methods]\n\nimpl Marc21 {\n\n fn new(_owner: &Reference) -> Self {\n\n Marc21 {}\n\n }\n\n\n\n /// Perform a request to the DNB and fetch the metadata for the given isbn.\n\n #[export]\n\n fn parse(&self, _owner: &Reference, isbn: String, response: String) -> api::Result<BookData> {\n\n if let Ok(isbn) = crate::isbn::parse(&isbn) {\n\n marc21::parse(&response, &isbn)\n\n } else {\n\n Err(api::Error::InvalidISBN)\n\n }\n\n }\n\n}\n", "file_path": "src/api/marc21.rs", "rank": 64, "score": 6.319221542978843 }, { "content": "use gdnative::prelude::*;\n\n\n\nmod error;\n\npub use error::*;\n\n\n\nmod project;\n\nmod date;\n\nmod mailer;\n\nmod marc21;\n\n\n\nuse project::Project;\n\n\n\n// Function that registers all exposed classes to Godot\n", "file_path": "src/api/mod.rs", "rank": 65, "score": 6.230352920325357 }, { "content": "}\n\n\n\nimpl gdnative::core_types::FromVariant for Error {\n\n fn from_variant(\n\n variant: &gdnative::core_types::Variant,\n\n ) -> std::result::Result<Self, gdnative::core_types::FromVariantError> {\n\n i64::from_variant(variant).and_then(|x| {\n\n if 0 < x || x <= Error::UnsupportedProjectVersion as i64 {\n\n Ok(unsafe { std::mem::transmute(x) })\n\n } else {\n\n Err(gdnative::core_types::FromVariantError::Unspecified)\n\n }\n\n })\n\n }\n\n}\n\n\n\nimpl ToVariant for Error {\n\n #[inline]\n\n fn to_variant(&self) -> Variant {\n\n (*self as i64).to_variant()\n\n }\n\n}\n\n\n\n/// Result type using the api error.\n\npub type Result<T> = std::result::Result<T, Error>;\n", "file_path": "src/api/error.rs", "rank": 66, "score": 5.936579758712144 }, { "content": "use crate::api;\n\nuse std::collections::HashMap;\n\n\n\nuse super::{DBIter, Database, ReadStmt};\n\n\n\nconst SETTINGS_FETCH: &str = \"\\\n\n select key, value from sbv_meta \\\n\n\";\n\n\n\nconst SETTINGS_UPDATE: &str = \"\\\n\n replace into sbv_meta values \\\n\n ('borrowing.duration', ?), \\\n\n ('user.path', ?), \\\n\n ('user.delimiter', ?), \\\n\n ('dnb.token', ?), \\\n\n ('mail.lastReminder', ?), \\\n\n ('mail.from', ?), \\\n\n ('mail.host', ?), \\\n\n ('mail.password', ?), \\\n\n ('mail.info.subject', ?), \\\n", "file_path": "src/db/settings.rs", "rank": 68, "score": 5.907978768194472 }, { "content": "\";\n\n\n\n/// Data object for a user.\n\n#[derive(Debug, Clone, gdnative::ToVariant, gdnative::FromVariant)]\n\n#[cfg_attr(test, derive(PartialEq, Default))]\n\npub struct User {\n\n pub account: String,\n\n pub forename: String,\n\n pub surname: String,\n\n pub role: String,\n\n pub may_borrow: bool,\n\n}\n\n\n\nimpl User {\n\n fn is_valid(&self) -> bool {\n\n !self.account.trim().is_empty()\n\n && !self.forename.trim().is_empty()\n\n && !self.surname.trim().is_empty()\n\n }\n\n}\n", "file_path": "src/db/user.rs", "rank": 69, "score": 5.811439114278237 }, { "content": " pub mail_password: String,\n\n // Mail Templates\n\n pub mail_info_subject: String,\n\n pub mail_info_content: String,\n\n pub mail_overdue_subject: String,\n\n pub mail_overdue_content: String,\n\n pub mail_overdue2_subject: String,\n\n pub mail_overdue2_content: String,\n\n}\n\n\n\nimpl Default for Settings {\n\n fn default() -> Settings {\n\n Settings {\n\n borrowing_duration: 28,\n\n user_path: String::new(),\n\n user_delimiter: \",\".into(),\n\n dnb_token: String::new(),\n\n mail_last_reminder: String::new(),\n\n mail_from: String::new(),\n\n mail_host: String::new(),\n", "file_path": "src/db/settings.rs", "rank": 70, "score": 5.400863071976204 }, { "content": "\n\nimpl ReadStmt for (String, String) {\n\n type Error = api::Error;\n\n fn read(\n\n stmt: &sqlite::Statement,\n\n _columns: &HashMap<String, usize>,\n\n ) -> Result<Self, Self::Error> {\n\n Ok((stmt.read(0)?, stmt.read(1)?))\n\n }\n\n}\n\n\n", "file_path": "src/db/settings.rs", "rank": 71, "score": 5.002329763628309 }, { "content": " mail_password: String::new(),\n\n mail_info_subject: String::new(),\n\n mail_info_content: String::new(),\n\n mail_overdue_subject: String::new(),\n\n mail_overdue_content: String::new(),\n\n mail_overdue2_subject: String::new(),\n\n mail_overdue2_content: String::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Settings {\n\n pub fn from_iter<I: IntoIterator<Item = (String, String)>>(iter: I) -> Settings {\n\n let mut settings = Settings::default();\n\n for (key, value) in iter {\n\n match key.as_str() {\n\n \"version\" => {}\n\n \"borrowing.duration\" => {\n\n settings.borrowing_duration =\n\n value.parse().unwrap_or(settings.borrowing_duration)\n", "file_path": "src/db/settings.rs", "rank": 72, "score": 4.902491541643892 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq)]\n\npub enum BookState {\n\n None = 0,\n\n Borrowable,\n\n NotBorrowable,\n\n BorrowedOrReserved,\n\n}\n\n\n\nimpl Default for BookState {\n\n fn default() -> BookState {\n\n BookState::None\n\n }\n\n}\n\n\n\nimpl From<i64> for BookState {\n\n fn from(value: i64) -> Self {\n\n match value {\n\n 1 => BookState::Borrowable,\n\n 2 => BookState::NotBorrowable,\n\n 3 => BookState::BorrowedOrReserved,\n", "file_path": "src/db/book.rs", "rank": 73, "score": 4.844712141889328 }, { "content": "pub struct Mailer {\n\n /// Username of the sender.\n\n #[property]\n\n from: String,\n\n /// Host name of the mail server.\n\n #[property]\n\n host: String,\n\n /// Password of the sender.\n\n #[property]\n\n password: String,\n\n\n\n /// Internal sender thread.\n\n worker: Option<thread::JoinHandle<()>>,\n\n}\n\n\n\n#[methods]\n\nimpl Mailer {\n\n fn new(_owner: &Reference) -> Self {\n\n Mailer::default()\n\n }\n", "file_path": "src/api/mailer.rs", "rank": 74, "score": 4.434114948203584 }, { "content": "\n\nimpl ReadStmt for Category {\n\n type Error = api::Error;\n\n\n\n fn read(\n\n stmt: &sqlite::Statement<'_>,\n\n columns: &HashMap<String, usize>,\n\n ) -> api::Result<Category> {\n\n Ok(Category {\n\n id: stmt.read(columns[\"id\"])?,\n\n name: stmt.read(columns[\"name\"])?,\n\n section: stmt.read(columns[\"section\"])?,\n\n })\n\n }\n\n}\n\n\n\n/// Returns all categories.\n", "file_path": "src/db/category.rs", "rank": 75, "score": 4.338925845002178 }, { "content": "use std::time::Duration;\n\n\n\nuse crate::api;\n\n\n\nuse lettre::message::{header::ContentType, Mailbox, SinglePartBuilder};\n\nuse lettre::transport::smtp::authentication::Credentials;\n\nuse lettre::{Address, Message, SmtpTransport, Transport};\n\nuse unicode_normalization::UnicodeNormalization;\n\n\n", "file_path": "src/mail.rs", "rank": 76, "score": 4.015070186743786 }, { "content": "use gdnative::prelude::*;\n\n\n\n/// The api compatible error type.\n\n/// On the godot side there are specific error messages displayed for each of the error types.\n\n///\n\n/// More specific error messages are removed to be api compatible.\n\n/// Those messages are logged however.\n\n#[repr(i64)]\n\n#[derive(Debug, Clone, Copy)]\n\npub enum Error {\n\n Arguments,\n\n Logic,\n\n NoProject,\n\n FileNotFound,\n\n FileOpen,\n\n SQL,\n\n Network,\n\n InvalidFormat,\n\n NothingFound,\n\n // Specific errors\n", "file_path": "src/api/error.rs", "rank": 77, "score": 3.9758523126886947 }, { "content": " ('mail.info.content', ?), \\\n\n ('mail.overdue.subject', ?), \\\n\n ('mail.overdue.content', ?), \\\n\n ('mail.overdue2.subject', ?), \\\n\n ('mail.overdue2.content', ?) \\\n\n\";\n\n\n\n#[derive(Debug, PartialEq, Clone, gdnative::ToVariant, gdnative::FromVariant)]\n\npub struct Settings {\n\n // Borrowing\n\n pub borrowing_duration: i64,\n\n // User\n\n pub user_path: String,\n\n pub user_delimiter: String,\n\n // DNB\n\n pub dnb_token: String,\n\n // Mail\n\n pub mail_last_reminder: String,\n\n pub mail_from: String,\n\n pub mail_host: String,\n", "file_path": "src/db/settings.rs", "rank": 78, "score": 3.947314923485069 }, { "content": "mod api;\n\nmod db;\n\nmod provider;\n\nmod isbn;\n\nmod mail;\n\n\n\nconst PKG_NAME: &str = env!(\"CARGO_PKG_NAME\");\n\nconst PKG_VERSION: &str = env!(\"CARGO_PKG_VERSION\");\n\nconst PKG_REPOSITORY: &str = env!(\"CARGO_PKG_REPOSITORY\");\n\nconst PKG_AUTHORS: &str = env!(\"CARGO_PKG_AUTHORS\");\n\nconst PKG_DESCRIPTION: &str = env!(\"CARGO_PKG_DESCRIPTION\");\n\nconst PKG_LICENSE: &str = env!(\"CARGO_PKG_LICENSE\");\n", "file_path": "src/lib.rs", "rank": 79, "score": 3.7790793295064558 }, { "content": "\n\nimpl ReadStmt for User {\n\n type Error = api::Error;\n\n\n\n fn read(stmt: &sqlite::Statement<'_>, columns: &HashMap<String, usize>) -> api::Result<User> {\n\n Ok(User {\n\n account: stmt.read(columns[\"account\"])?,\n\n forename: stmt.read(columns[\"forename\"])?,\n\n surname: stmt.read(columns[\"surname\"])?,\n\n role: stmt.read(columns[\"role\"])?,\n\n may_borrow: stmt.read::<i64>(columns[\"may_borrow\"])? != 0,\n\n })\n\n }\n\n}\n\n\n\n/// Returns the user with the given `id`.\n", "file_path": "src/db/user.rs", "rank": 80, "score": 3.7219279415413644 }, { "content": "\";\n\nconst UPDATE_USER_BORROWS: &str = \"\n\n update medium set borrower=? where borrower=? \\\n\n\";\n\nconst UPDATE_USER_RESERVATIONS: &str = \"\\\n\n update medium set reservation=? where reservation=? \\\n\n\";\n\n\n\nconst DELETE_USER: &str = \"\\\n\n delete from user where account=? \\\n\n\";\n\nconst DELETE_UNUSED_USERS: &str = \"\\\n\n update medium set reservation='' where reservation not in (select account from user); \\\n\n update medium set borrower='' where borrower not in (select account from user); \\\n\n\";\n\nconst DELETE_USER_ROLES: &str = \"\\\n\n update user set role='' \\\n\n\";\n\nconst UPDATE_USER_ROLE: &str = \"\\\n\n update user set role=? where account=? \\\n", "file_path": "src/db/user.rs", "rank": 81, "score": 3.671225237741041 }, { "content": " and year between ? and ? \\\n\n and category like ? \\\n\n and note like '%'||?||'%' \\\n\n and (borrower like '%'||?||'%' or reservation like '%'||?||'%') \\\n\n and borrowable like ? \\\n\n\";\n\n\n\nconst ADD: &str = \"\\\n\n insert into medium values (?, ?, ?, ?, ?, ?, ?, ?, ?, '', '', '') \\\n\n\";\n\n\n\nconst ADD_AUTHOR: &str = \"\\\n\n insert or ignore into author values (?, ?) \\\n\n\";\n\nconst UPDATE: &str = \"\\\n\n update medium \\\n\n set id=?, isbn=?, title=?, publisher=?, year=?, costs=?, note=?, borrowable=?, category=? \\\n\n where id=? \\\n\n\";\n\nconst UPDATE_AUTHORS: &str = \"\\\n", "file_path": "src/db/book.rs", "rank": 82, "score": 3.596456907982267 }, { "content": " let database = Database {\n\n db: sqlite::Connection::open_with_flags(\n\n &path,\n\n sqlite::OpenFlags::new().set_create().set_read_write(),\n\n )\n\n .map_err(|_| api::Error::FileOpen)?,\n\n path,\n\n };\n\n structure::create(&database, PKG_VERSION)?;\n\n Ok(database)\n\n } else {\n\n Err(api::Error::FileOpen)\n\n }\n\n }\n\n\n\n /// Opens a database connection to the given project database.\n\n pub fn open(path: &str) -> api::Result<(Database, bool)> {\n\n let path = PathBuf::from(path);\n\n if path.exists() {\n\n let database = Database {\n", "file_path": "src/db/mod.rs", "rank": 83, "score": 3.5019470280528213 }, { "content": "use std::thread;\n\n\n\nuse gdnative::prelude::*;\n\n\n\nuse crate::api;\n\nuse crate::mail;\n\n\n\n/// The mail class provides method for sending mail messages to users of the\n\n/// library.\n\n///\n\n/// Mail addresses are constructed in the form of `<account>@<host>`.\n\n///\n\n/// It is assumed that the sender and the recipients have the same mail\n\n/// server and that their username name matches the account name on the\n\n/// mail server.\n\n///\n\n/// The credentials are stored in the database.\n\n#[derive(NativeClass, Debug, Default)]\n\n#[inherit(Reference)]\n\n#[register_with(Self::register)]\n", "file_path": "src/api/mailer.rs", "rank": 84, "score": 3.4637436490102536 }, { "content": "pub mod marc21;\n\n\n\n#[derive(Debug, Default, PartialEq, gdnative::ToVariant, gdnative::FromVariant)]\n\npub struct BookData {\n\n title: String,\n\n authors: Vec<String>,\n\n publisher: String,\n\n costs: f64,\n\n}\n", "file_path": "src/provider/mod.rs", "rank": 85, "score": 3.3195725450875804 }, { "content": "\n\nimpl From<std::convert::Infallible> for Error {\n\n fn from(e: std::convert::Infallible) -> Error {\n\n godot_print!(\"convert::Infallible: {}\", e);\n\n Error::Arguments\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(e: std::io::Error) -> Error {\n\n godot_print!(\"File Error: {:?}\", e);\n\n Error::FileOpen\n\n }\n\n}\n\n\n\nimpl From<roxmltree::Error> for Error {\n\n fn from(e: roxmltree::Error) -> Error {\n\n godot_print!(\"Invalid XML Format: {:?}\", e);\n\n Error::InvalidFormat\n\n }\n", "file_path": "src/api/error.rs", "rank": 86, "score": 3.2465235970010258 }, { "content": " /// Deletes the roles from all users and inserts the new roles.\n\n ///\n\n /// The roles of all users not contained in the given list are cleared.\n\n #[export]\n\n fn user_update_roles(&self, _owner: &Node, users: Vec<(String, String)>) -> api::Result<()> {\n\n let db = self.get_db()?;\n\n let users: Vec<(&str, &str)> = users\n\n .iter()\n\n .map(|(u, r)| (u.as_str(), r.as_str()))\n\n .collect();\n\n db::user::update_roles(db, &users)\n\n }\n\n\n\n // Category\n\n\n\n /// Fetches and returns all categories.\n\n #[export]\n\n fn category_list(&self, _owner: &Node) -> api::Result<VariantArray> {\n\n let result = db::category::list(self.get_db()?)?;\n\n Ok(VariantArray::from_iter(result).into_shared())\n", "file_path": "src/api/project.rs", "rank": 87, "score": 3.2347062818754853 }, { "content": " let deadline = deadline.format(\"%F\").to_string();\n\n gdnative::godot_print!(\n\n \"Lend {} to {} deadline {}\",\n\n &book.id,\n\n &user.account,\n\n &deadline\n\n );\n\n\n\n let mut stmt = db.db.prepare(UPDATE_LEND)?;\n\n stmt.bind(1, user.account.as_str())?;\n\n stmt.bind(2, deadline.as_str())?;\n\n stmt.bind(3, book.id.as_str())?;\n\n if stmt.next()? != sqlite::State::Done {\n\n return Err(api::Error::SQL);\n\n }\n\n\n\n book.borrower = user.account.clone();\n\n book.deadline = deadline;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/db/lending.rs", "rank": 89, "score": 3.0916912185088212 }, { "content": " deadline text not null default '', \\\n\n reservation text not null default ''); \\\n\n\";\n\n\n\nconst FETCH_VERSION: &str = \"\\\n\n select value from sbv_meta where key='version' \\\n\n\";\n\nconst UPDATE_VERSION: &str = \"\\\n\n replace into sbv_meta values ('version', ?) \\\n\n\";\n\n\n\n/// Minimum supported version.\n\nconst MIN_VERSION: Version = Version(0, 6, 2);\n\n\n", "file_path": "src/db/structure.rs", "rank": 90, "score": 3.090767836212344 }, { "content": " select \\\n\n account, \\\n\n forename, \\\n\n surname, \\\n\n role, \\\n\n may_borrow \\\n\n \\\n\n from user \\\n\n where account like '%'||?1||'%' \\\n\n or forename like '%'||?1||'%' \\\n\n or surname like '%'||?1||'%' \\\n\n or role like '%'||?1||'%' \\\n\n order by account \\\n\n\";\n\n\n\nconst ADD_USER: &str = \"\\\n\n insert into user values (?, ?, ?, ?, ?) \\\n\n\";\n\nconst UPDATE_USER: &str = \"\\\n\n update user set account=?, forename=?, surname=?, role=?, may_borrow=? where account=? \\\n", "file_path": "src/db/user.rs", "rank": 91, "score": 2.9717618720441283 }, { "content": " assert!(\"0.0.0\".parse::<Version>().unwrap() == Version(0, 0, 0));\n\n assert!(\"0.1.0\".parse::<Version>().unwrap() == Version(0, 1, 0));\n\n assert!(\"0.9.22\".parse::<Version>().unwrap() == Version(0, 9, 22));\n\n assert!(\"10.9.22\".parse::<Version>().unwrap() == Version(10, 9, 22));\n\n assert!(\"255.255.255\".parse::<Version>().unwrap() == Version(255, 255, 255));\n\n\n\n assert!(\"10\".parse::<Version>().is_err());\n\n assert!(\"1.2.3.4\".parse::<Version>().is_err());\n\n assert!(\"0.-1\".parse::<Version>().is_err());\n\n assert!(\"1.2.-2\".parse::<Version>().is_err());\n\n assert!(\"..\".parse::<Version>().is_err());\n\n\n\n assert!(PKG_VERSION.parse::<Version>().is_ok());\n\n }\n\n\n\n #[test]\n\n fn create_tables() {\n\n use book::Book;\n\n use category::Category;\n\n use user::User;\n", "file_path": "src/db/structure.rs", "rank": 92, "score": 2.9601852268029516 }, { "content": " category,\n\n &author_prefix[..author_prefix.len().min(4)],\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::*;\n\n use super::*;\n\n\n\n #[test]\n\n fn id_prefix() {\n\n use super::id_prefix;\n\n assert_eq!(id_prefix(\"Isabel Abedi\", \"FANT\"), \"FANT ABED\".to_string());\n\n assert_eq!(id_prefix(\"Isabel Äbedi\", \"FANT\"), \"FANT ABED\".to_string());\n\n assert_eq!(id_prefix(\"\", \"FANT\"), \"FANT XXXX\".to_string());\n\n assert_eq!(id_prefix(\"äÖü\", \"FANT\"), \"FANT AOU\".to_string());\n\n assert_eq!(\n\n id_prefix(\"Remigius Bäumer\", \"RErk\"),\n\n \"RErk BAUM\".to_string()\n", "file_path": "src/db/book.rs", "rank": 93, "score": 2.844230602198773 }, { "content": " _ => BookState::None,\n\n }\n\n }\n\n}\n\n\n\nimpl gdnative::core_types::ToVariant for BookState {\n\n fn to_variant(&self) -> gdnative::core_types::Variant {\n\n (*self as i64).to_variant()\n\n }\n\n}\n\n\n\nimpl gdnative::core_types::FromVariant for BookState {\n\n fn from_variant(\n\n variant: &gdnative::core_types::Variant,\n\n ) -> Result<Self, gdnative::core_types::FromVariantError> {\n\n i64::from_variant(variant).map(|x| x.into())\n\n }\n\n}\n\n\n\n/// Returns the book with the given `id`.\n", "file_path": "src/db/book.rs", "rank": 94, "score": 2.8138987213047173 }, { "content": "\n\nimpl Drop for Mailer {\n\n fn drop(&mut self) {\n\n if let Some(thread) = self.worker.take() {\n\n if let Err(e) = thread.join() {\n\n godot_print!(\"Error dropping mailer worker: {:?}\", e);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/api/mailer.rs", "rank": 95, "score": 2.791385189264006 }, { "content": " }\n\n 0.0\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs;\n\n\n\n #[test]\n\n fn parse_single_record() {\n\n let response = fs::read_to_string(\"test/data/dnb/dnb-response_9783570303337.xml\").unwrap();\n\n let data = parse(&response, \"9783570303337\").unwrap();\n\n assert_eq!(\n\n data,\n\n BookData {\n\n title: \"Eragon - Das Vermächtnis der Drachenreiter\".into(),\n\n authors: vec![\"Christopher Paolini\".into()],\n\n publisher: \"cbj\".into(),\n\n costs: 9.95,\n", "file_path": "src/provider/marc21.rs", "rank": 96, "score": 2.7888452754699617 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::super::*;\n\n use super::*;\n\n\n\n #[test]\n\n fn add_update_remove_users() {\n\n let db = Database::memory().unwrap();\n\n structure::create(&db, PKG_VERSION).unwrap();\n\n\n\n let user = User {\n\n account: \"foo.bar\".into(),\n\n forename: \"Foo\".into(),\n\n surname: \"Bar\".into(),\n\n role: \"Demo\".into(),\n\n may_borrow: true,\n\n };\n\n user::add(&db, &user).unwrap();\n\n\n\n let result: Vec<User> = user::search(&db, \"\").unwrap().collect();\n", "file_path": "src/db/user.rs", "rank": 97, "score": 2.691914574573075 }, { "content": "impl From<lettre::error::Error> for api::Error {\n\n fn from(e: lettre::error::Error) -> Self {\n\n gdnative::godot_print!(\"Invalid Mail Format {:?}\", e);\n\n api::Error::Arguments\n\n }\n\n}\n\nimpl From<lettre::transport::smtp::Error> for api::Error {\n\n fn from(e: lettre::transport::smtp::Error) -> Self {\n\n gdnative::godot_print!(\"Mail SMTP Error {:?}\", e);\n\n api::Error::Network\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n #[test]\n\n #[ignore]\n\n fn send_mail() {\n\n super::send(\n\n &std::env::var(\"SBV_MAIL_HOST\").unwrap(),\n", "file_path": "src/mail.rs", "rank": 98, "score": 2.605962626959969 }, { "content": " deadline, \\\n\n reservation \\\n\n \\\n\n from medium \\\n\n left join author on author.medium=id \\\n\n where id=? \\\n\n group by id \\\n\n\";\n\n\n\nconst SEARCH: &str = \"\\\n\n select \\\n\n id, \\\n\n isbn, \\\n\n title, \\\n\n publisher, \\\n\n year, \\\n\n costs, \\\n\n note, \\\n\n borrowable, \\\n\n category, \\\n", "file_path": "src/db/book.rs", "rank": 99, "score": 2.5227641690754607 } ]
Rust
webnis-server/src/webnis.rs
EwoutH/webnis
078814931bd5dab47eebe0c21d32d3045a4d6040
use std::collections::HashMap; use std::sync::Arc; use actix_web::http::StatusCode; use actix_web::HttpResponse; use serde_json; use crate::config; use crate::db; use crate::db::MapType; use crate::errors::WnError; use crate::format; use crate::iplist::IpList; use crate::lua; use crate::util::*; #[derive(Clone)] pub(crate) struct Webnis { pub inner: Arc<WebnisInner>, } pub(crate) struct WebnisInner { pub config: config::Config, pub securenets: Option<IpList>, } impl Webnis { pub fn new(config: config::Config, securenets: Option<IpList>) -> Webnis { Webnis { inner: Arc::new(WebnisInner { config: config, securenets: securenets, }), } } } impl Webnis { pub fn handle_info(&self, domain: &str) -> HttpResponse { let domain = match self.inner.config.find_domain(domain) { None => return json_error(StatusCode::BAD_REQUEST, None, "Domain not found"), Some(d) => d, }; let mut maps = HashMap::new(); for mapname in &domain.maps { let mut map_keys = Vec::new(); let mapvec = match self.inner.config.map_.get(mapname) { Some(i) => i, None => continue, }; for m in mapvec { let keys = m.key.iter().chain(m.keys.iter()).chain(m.key_alias.keys()); map_keys.extend(keys); } let mut hm = HashMap::new(); hm.insert("keys", map_keys); maps.insert(mapname, hm); } #[derive(Serialize)] struct Reply<T> { maps: T, } let r = Reply { maps: maps }; let reply = serde_json::to_value(r).unwrap(); json_result(StatusCode::OK, &reply) } pub fn handle_auth(&self, domain: String, is_json: bool, body: Vec<u8>) -> HttpResponse { let domain = match self.inner.config.find_domain(&domain) { None => return json_error(StatusCode::BAD_REQUEST, None, "Domain not found"), Some(d) => d, }; let authinfo = match AuthInfo::from_post_body(&body, is_json) { None => return json_error(StatusCode::BAD_REQUEST, None, "Body parameters missing"), Some(ai) => ai, }; let auth = match domain.auth.as_ref().and_then(|a| self.inner.config.auth.get(a)) { None => return json_error(StatusCode::NOT_FOUND, None, "Authentication not enabled"), Some(a) => a, }; if let Some(ref lua_func) = auth.lua_function { let lauth = lua::AuthInfo { username: authinfo.username, password: authinfo.password, map: auth.map.clone(), key: auth.key.clone(), extra: authinfo.extra, }; let res = match lua::lua_auth(lua_func, &domain.name, lauth) { Ok((serde_json::Value::Null, status)) => { if status == 0 { json_error( StatusCode::FORBIDDEN, Some(StatusCode::UNAUTHORIZED), "Password incorrect", ) } else { json_error( StatusCode::from_u16(status).unwrap(), Some(StatusCode::UNAUTHORIZED), "Password incorrect", ) } }, Ok((val, status)) => { if status == 0 { json_result(StatusCode::OK, &val) } else { json_result_raw(StatusCode::from_u16(status).unwrap(), &val) } }, Err(_) => json_error(StatusCode::INTERNAL_SERVER_ERROR, None, "Internal server error"), }; return res; } let auth_map = auth.map.as_ref().unwrap(); let auth_key = auth.key.as_ref().unwrap(); match self.auth_map(domain, auth_map, auth_key, &authinfo.username, &authinfo.password) { Ok(true) => json_result(StatusCode::OK, &json!({})), Ok(false) => { json_error( StatusCode::FORBIDDEN, Some(StatusCode::UNAUTHORIZED), "Password incorrect", ) }, Err(WnError::MapNotFound) => { return json_error(StatusCode::NOT_FOUND, None, "Associated auth map not found"); }, Err(_) => json_error(StatusCode::INTERNAL_SERVER_ERROR, None, "Internal server error"), } } fn auth_map( &self, dom: &config::Domain, map: &str, key: &str, username: &str, passwd: &str, ) -> Result<bool, WnError> { let (map, keyname) = match self.inner.config.find_map(map, key) { None => { warn!("auth_map: map {} with key {} not found", map, key); return Err(WnError::MapNotFound); }, Some(m) => m, }; let res = match map.map_type { MapType::Gdbm => self.lookup_gdbm_map(dom, map, username), MapType::Json => self.lookup_json_map(dom, map, keyname, username), _ => { warn!("auth_map: map {}: unsupported {:?}", map.name, map.map_type); return Err(WnError::DbOther); }, }; let json = match res { Ok(jv) => jv, Err(WnError::KeyNotFound) => return Ok(false), Err(e) => return Err(e), }; let res = match json.get("passwd").map(|p| p.as_str()).unwrap_or(None) { None => false, Some(hash) => check_unix_password(passwd, hash), }; Ok(res) } pub fn lua_map_auth( &self, domain: &str, map: &str, key: &str, username: &str, passwd: &str, ) -> Result<bool, WnError> { let domain = match self.inner.config.find_domain(&domain) { None => return Err(WnError::DbOther), Some(d) => d, }; self.auth_map(domain, map, key, username, passwd) } pub fn handle_map(&self, domain: &str, map: &str, query: &HashMap<String, String>) -> HttpResponse { let domain = match self.inner.config.find_domain(&domain) { None => return json_error(StatusCode::BAD_REQUEST, None, "Domain not found"), Some(d) => d, }; let (keyname, keyval) = match query.iter().next() { None => return json_error(StatusCode::BAD_REQUEST, None, "Query params missing"), Some(kv) => kv, }; let (map, keyname) = match self.inner.config.find_allowed_map(&domain, map, keyname) { None => return json_error(StatusCode::NOT_FOUND, None, "No such map"), Some(m) => m, }; let res = match map.map_type { MapType::Gdbm => self.lookup_gdbm_map(domain, map, keyval), MapType::Json => self.lookup_json_map(domain, map, keyname, keyval), MapType::Lua => self.lookup_lua_map(domain, map, keyname, keyval), MapType::None => unreachable!(), }; match res { Err(WnError::KeyNotFound) => json_error(StatusCode::NOT_FOUND, None, "No such key in map"), Err(WnError::MapNotFound) => json_error(StatusCode::NOT_FOUND, None, "No such map"), Err(WnError::UnknownFormat) => json_error(StatusCode::NOT_FOUND, None, "Unknown map format"), Err(WnError::SerializeJson(_)) => json_error(StatusCode::NOT_FOUND, None, "Serialize error"), Err(_) => json_error(StatusCode::INTERNAL_SERVER_ERROR, None, "Error reading database"), Ok(r) => json_result(StatusCode::OK, &r), } } pub fn lua_map_lookup( &self, domain: &str, mapname: &str, keyname: &str, keyval: &str, ) -> Result<serde_json::Value, WnError> { let domain = match self.inner.config.find_domain(&domain) { None => return Err(WnError::DbOther), Some(d) => d, }; let (map, keyname) = match self.inner.config.find_map(mapname, keyname) { None => return Err(WnError::MapNotFound), Some(m) => m, }; let res = match map.map_type { MapType::Gdbm => self.lookup_gdbm_map(domain, map, keyval), MapType::Json => self.lookup_json_map(domain, map, keyname, keyval), _ => Err(WnError::Other), }; match res { Err(WnError::KeyNotFound) => Ok(json!(null)), x => x, } } fn lookup_gdbm_map( &self, dom: &config::Domain, map: &config::Map, keyval: &str, ) -> Result<serde_json::Value, WnError> { let format = match map.map_format { None => return Err(WnError::UnknownFormat), Some(ref s) => s, }; let path = format!("{}/{}", dom.db_dir, map.map_file.as_ref().unwrap()); let line = db::gdbm_lookup(&path, keyval)?; format::line_to_json(&line, format, &map.map_output) } fn lookup_json_map( &self, dom: &config::Domain, map: &config::Map, keyname: &str, keyval: &str, ) -> Result<serde_json::Value, WnError> { let path = format!("{}/{}", dom.db_dir, map.map_file.as_ref().unwrap()); db::json_lookup(path, keyname, keyval) } fn lookup_lua_map( &self, dom: &config::Domain, map: &config::Map, keyname: &str, keyval: &str, ) -> Result<serde_json::Value, WnError> { match lua::lua_map(&map.lua_function.as_ref().unwrap(), &dom.name, keyname, keyval) { Ok(serde_json::Value::Null) => Err(WnError::KeyNotFound), Ok(m) => Ok(m), Err(_) => Err(WnError::Other), } } }
use std::collections::HashMap; use std::sync::Arc; use actix_web::http::StatusCode; use actix_web::HttpResponse; use serde_json; use crate::config; use crate::db; use crate::db::MapType; use crate::errors::WnError; use crate::format; use crate::iplist::IpList; use crate::lua; use crate::util::*; #[derive(Clone)] pub(crate) struct Webnis { pub inner: Arc<WebnisInner>, } pub(crate) struct WebnisInner { pub config: config::Config, pub securenets: Option<IpList>, } impl Webnis { pub fn new(config: config::Config, securenets: Option<IpList>) -> Webnis { Webnis { inner: Arc::new(WebnisInner { config: config, securenets: securenets, }), } } } impl Webnis {
pub fn handle_auth(&self, domain: String, is_json: bool, body: Vec<u8>) -> HttpResponse { let domain = match self.inner.config.find_domain(&domain) { None => return json_error(StatusCode::BAD_REQUEST, None, "Domain not found"), Some(d) => d, }; let authinfo = match AuthInfo::from_post_body(&body, is_json) { None => return json_error(StatusCode::BAD_REQUEST, None, "Body parameters missing"), Some(ai) => ai, }; let auth = match domain.auth.as_ref().and_then(|a| self.inner.config.auth.get(a)) { None => return json_error(StatusCode::NOT_FOUND, None, "Authentication not enabled"), Some(a) => a, }; if let Some(ref lua_func) = auth.lua_function { let lauth = lua::AuthInfo { username: authinfo.username, password: authinfo.password, map: auth.map.clone(), key: auth.key.clone(), extra: authinfo.extra, }; let res = match lua::lua_auth(lua_func, &domain.name, lauth) { Ok((serde_json::Value::Null, status)) => { if status == 0 { json_error( StatusCode::FORBIDDEN, Some(StatusCode::UNAUTHORIZED), "Password incorrect", ) } else { json_error( StatusCode::from_u16(status).unwrap(), Some(StatusCode::UNAUTHORIZED), "Password incorrect", ) } }, Ok((val, status)) => { if status == 0 { json_result(StatusCode::OK, &val) } else { json_result_raw(StatusCode::from_u16(status).unwrap(), &val) } }, Err(_) => json_error(StatusCode::INTERNAL_SERVER_ERROR, None, "Internal server error"), }; return res; } let auth_map = auth.map.as_ref().unwrap(); let auth_key = auth.key.as_ref().unwrap(); match self.auth_map(domain, auth_map, auth_key, &authinfo.username, &authinfo.password) { Ok(true) => json_result(StatusCode::OK, &json!({})), Ok(false) => { json_error( StatusCode::FORBIDDEN, Some(StatusCode::UNAUTHORIZED), "Password incorrect", ) }, Err(WnError::MapNotFound) => { return json_error(StatusCode::NOT_FOUND, None, "Associated auth map not found"); }, Err(_) => json_error(StatusCode::INTERNAL_SERVER_ERROR, None, "Internal server error"), } } fn auth_map( &self, dom: &config::Domain, map: &str, key: &str, username: &str, passwd: &str, ) -> Result<bool, WnError> { let (map, keyname) = match self.inner.config.find_map(map, key) { None => { warn!("auth_map: map {} with key {} not found", map, key); return Err(WnError::MapNotFound); }, Some(m) => m, }; let res = match map.map_type { MapType::Gdbm => self.lookup_gdbm_map(dom, map, username), MapType::Json => self.lookup_json_map(dom, map, keyname, username), _ => { warn!("auth_map: map {}: unsupported {:?}", map.name, map.map_type); return Err(WnError::DbOther); }, }; let json = match res { Ok(jv) => jv, Err(WnError::KeyNotFound) => return Ok(false), Err(e) => return Err(e), }; let res = match json.get("passwd").map(|p| p.as_str()).unwrap_or(None) { None => false, Some(hash) => check_unix_password(passwd, hash), }; Ok(res) } pub fn lua_map_auth( &self, domain: &str, map: &str, key: &str, username: &str, passwd: &str, ) -> Result<bool, WnError> { let domain = match self.inner.config.find_domain(&domain) { None => return Err(WnError::DbOther), Some(d) => d, }; self.auth_map(domain, map, key, username, passwd) } pub fn handle_map(&self, domain: &str, map: &str, query: &HashMap<String, String>) -> HttpResponse { let domain = match self.inner.config.find_domain(&domain) { None => return json_error(StatusCode::BAD_REQUEST, None, "Domain not found"), Some(d) => d, }; let (keyname, keyval) = match query.iter().next() { None => return json_error(StatusCode::BAD_REQUEST, None, "Query params missing"), Some(kv) => kv, }; let (map, keyname) = match self.inner.config.find_allowed_map(&domain, map, keyname) { None => return json_error(StatusCode::NOT_FOUND, None, "No such map"), Some(m) => m, }; let res = match map.map_type { MapType::Gdbm => self.lookup_gdbm_map(domain, map, keyval), MapType::Json => self.lookup_json_map(domain, map, keyname, keyval), MapType::Lua => self.lookup_lua_map(domain, map, keyname, keyval), MapType::None => unreachable!(), }; match res { Err(WnError::KeyNotFound) => json_error(StatusCode::NOT_FOUND, None, "No such key in map"), Err(WnError::MapNotFound) => json_error(StatusCode::NOT_FOUND, None, "No such map"), Err(WnError::UnknownFormat) => json_error(StatusCode::NOT_FOUND, None, "Unknown map format"), Err(WnError::SerializeJson(_)) => json_error(StatusCode::NOT_FOUND, None, "Serialize error"), Err(_) => json_error(StatusCode::INTERNAL_SERVER_ERROR, None, "Error reading database"), Ok(r) => json_result(StatusCode::OK, &r), } } pub fn lua_map_lookup( &self, domain: &str, mapname: &str, keyname: &str, keyval: &str, ) -> Result<serde_json::Value, WnError> { let domain = match self.inner.config.find_domain(&domain) { None => return Err(WnError::DbOther), Some(d) => d, }; let (map, keyname) = match self.inner.config.find_map(mapname, keyname) { None => return Err(WnError::MapNotFound), Some(m) => m, }; let res = match map.map_type { MapType::Gdbm => self.lookup_gdbm_map(domain, map, keyval), MapType::Json => self.lookup_json_map(domain, map, keyname, keyval), _ => Err(WnError::Other), }; match res { Err(WnError::KeyNotFound) => Ok(json!(null)), x => x, } } fn lookup_gdbm_map( &self, dom: &config::Domain, map: &config::Map, keyval: &str, ) -> Result<serde_json::Value, WnError> { let format = match map.map_format { None => return Err(WnError::UnknownFormat), Some(ref s) => s, }; let path = format!("{}/{}", dom.db_dir, map.map_file.as_ref().unwrap()); let line = db::gdbm_lookup(&path, keyval)?; format::line_to_json(&line, format, &map.map_output) } fn lookup_json_map( &self, dom: &config::Domain, map: &config::Map, keyname: &str, keyval: &str, ) -> Result<serde_json::Value, WnError> { let path = format!("{}/{}", dom.db_dir, map.map_file.as_ref().unwrap()); db::json_lookup(path, keyname, keyval) } fn lookup_lua_map( &self, dom: &config::Domain, map: &config::Map, keyname: &str, keyval: &str, ) -> Result<serde_json::Value, WnError> { match lua::lua_map(&map.lua_function.as_ref().unwrap(), &dom.name, keyname, keyval) { Ok(serde_json::Value::Null) => Err(WnError::KeyNotFound), Ok(m) => Ok(m), Err(_) => Err(WnError::Other), } } }
pub fn handle_info(&self, domain: &str) -> HttpResponse { let domain = match self.inner.config.find_domain(domain) { None => return json_error(StatusCode::BAD_REQUEST, None, "Domain not found"), Some(d) => d, }; let mut maps = HashMap::new(); for mapname in &domain.maps { let mut map_keys = Vec::new(); let mapvec = match self.inner.config.map_.get(mapname) { Some(i) => i, None => continue, }; for m in mapvec { let keys = m.key.iter().chain(m.keys.iter()).chain(m.key_alias.keys()); map_keys.extend(keys); } let mut hm = HashMap::new(); hm.insert("keys", map_keys); maps.insert(mapname, hm); } #[derive(Serialize)] struct Reply<T> { maps: T, } let r = Reply { maps: maps }; let reply = serde_json::to_value(r).unwrap(); json_result(StatusCode::OK, &reply) }
function_block-full_function
[ { "content": "// Read the TOML config into a config::Condig struct.\n\npub fn read(toml_file: impl AsRef<Path>) -> io::Result<Config> {\n\n let buffer = std::fs::read_to_string(&toml_file)?;\n\n\n\n // initial parse.\n\n let mut config: Config = match toml::from_str(&buffer) {\n\n Ok(v) => Ok(v),\n\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, e.to_string())),\n\n }?;\n\n\n\n // see if \"include_maps\" is set- if so, read a separate map definition file.\n\n if let Some(ref extra) = config.include_maps {\n\n // relative to main config file.\n\n let include_maps = match toml_file.as_ref().parent() {\n\n Some(parent) => parent.join(Path::new(extra)),\n\n None => PathBuf::from(extra),\n\n };\n\n let buffer = std::fs::read_to_string(&include_maps)\n\n .map_err(|e| io::Error::new(e.kind(), format!(\"{:?}: {}\", include_maps, e)))?;\n\n let maps: HashMap<String, MapOrMaps> = match toml::from_str(&buffer) {\n\n Ok(v) => Ok(v),\n", "file_path": "webnis-server/src/config.rs", "rank": 0, "score": 151848.60520573994 }, { "content": "/// load SSL keys and exit on fail.\n\npub fn acceptor_or_exit(config: &Config) -> SslAcceptorBuilder {\n\n let k = config.server.key_file.as_ref().unwrap();\n\n let c = config.server.crt_file.as_ref().unwrap();\n\n match acceptor(k, c) {\n\n Ok(a) => a,\n\n Err(e) => {\n\n eprintln!(\"{}: {}\", PROGNAME, e);\n\n exit(1);\n\n },\n\n }\n\n}\n", "file_path": "webnis-server/src/ssl.rs", "rank": 1, "score": 135241.14543382378 }, { "content": "/// Read a file in the NIS ypserv.securenets format.\n\npub fn read_securenets(file: impl AsRef<Path>, iplist: &mut IpList) -> io::Result<()> {\n\n let buffer = std::fs::read_to_string(&file)?;\n\n for line in buffer.split('\\n') {\n\n let line = line.trim_left();\n\n if line.is_empty() || line.starts_with(\"#\") {\n\n continue;\n\n }\n\n let words = line.split_whitespace().collect::<Vec<_>>();\n\n if let Ok(ipnet) = parse_ip(words) {\n\n iplist.add(ipnet);\n\n }\n\n }\n\n iplist.finalize();\n\n Ok(())\n\n}\n", "file_path": "webnis-server/src/config.rs", "rank": 2, "score": 134887.25008685 }, { "content": "pub fn read(name: &str) -> io::Result<Config> {\n\n let mut f = File::open(name)?;\n\n let mut buffer = String::new();\n\n f.read_to_string(&mut buffer)?;\n\n\n\n let mut config : Config = match toml::from_str(&buffer) {\n\n Ok(v) => Ok(v),\n\n Err(e) => Err(io::Error::new(io::ErrorKind::InvalidData, format!(\"{}: {}\", name, e))),\n\n }?;\n\n\n\n if let Some(s) = config.server.take() {\n\n config.servers.push(s);\n\n }\n\n if config.servers.len() == 0 {\n\n return Err(io::Error::new(io::ErrorKind::InvalidData, \"no servers defined\"));\n\n }\n\n if config.domain.as_str() == \"\" {\n\n config.domain = \"default\".to_string();\n\n }\n\n\n\n Ok(config)\n\n}\n", "file_path": "webnis-bind/src/config.rs", "rank": 3, "score": 125943.40699019362 }, { "content": "/// Check http authentication.\n\npub fn check_http_auth(hdrs: &HeaderMap<HeaderValue>, domain: &config::Domain) -> AuthResult {\n\n // Get authschema from config. Not set? Access allowed.\n\n let schema = match domain.http_authschema {\n\n Some(ref s) => s.as_str(),\n\n None => return AuthResult::AuthOk,\n\n };\n\n\n\n // Get authtoken from config. Not set? Access denied.\n\n let token = match domain.http_authtoken {\n\n Some(ref t) => t.as_str(),\n\n None => {\n\n debug!(\"check_http_auth: domain {}: http_authtoken not set\", domain.name);\n\n return AuthResult::BadAuth;\n\n },\n\n };\n\n\n\n // find Authorization header and transform into &str\n\n let hdr = match hdrs.get(header::AUTHORIZATION).map(|v| v.to_str()) {\n\n Some(Ok(h)) => h,\n\n _ => return AuthResult::NoAuth,\n", "file_path": "webnis-server/src/util.rs", "rank": 4, "score": 98610.39732963132 }, { "content": "pub fn gdbm_lookup(db_path: impl AsRef<str>, key: &str) -> Result<String, WnError> {\n\n MAPS.with(|maps| {\n\n // do we have an open handle.\n\n let m = &mut *maps.borrow_mut();\n\n let path = db_path.as_ref();\n\n if let Some(mut db) = m.get_mut(path) {\n\n // yes. now, every 5 secs, see if database file has changed.\n\n let mut reopen = false;\n\n let now = SystemTime::now();\n\n if let Ok(d) = now.duration_since(db.lastcheck) {\n\n if d.as_secs() > 5 {\n\n if let Ok(metadata) = fs::metadata(path) {\n\n reopen = match (metadata.modified(), db.modified) {\n\n (Ok(m1), Some(m2)) => m1 != m2,\n\n _ => true,\n\n };\n\n }\n\n }\n\n }\n\n\n", "file_path": "webnis-server/src/db.rs", "rank": 5, "score": 95650.60719253488 }, { "content": "pub fn json_lookup(\n\n db_path: impl AsRef<str>,\n\n keyname: &str,\n\n keyval: &str,\n\n) -> Result<serde_json::Value, WnError>\n\n{\n\n let file = File::open(db_path.as_ref()).map_err(|_| WnError::MapNotFound)?;\n\n let entries: serde_json::Value = serde_json::from_reader(file).map_err(|_| WnError::DbOther)?;\n\n let mut idx: usize = 0;\n\n let keyval = match keyval.parse::<u64>() {\n\n Ok(num) => json!(num),\n\n Err(_) => json!(keyval),\n\n };\n\n loop {\n\n let obj = match entries.get(idx) {\n\n None => break,\n\n Some(obj) => obj,\n\n };\n\n if obj.get(keyname) == Some(&keyval) {\n\n return Ok(obj.to_owned());\n", "file_path": "webnis-server/src/db.rs", "rank": 6, "score": 94253.24975113217 }, { "content": "pub fn line_to_json(\n\n line: &str,\n\n format: &Format,\n\n output: &Option<HashMap<String, String>>,\n\n) -> Result<serde_json::Value, WnError>\n\n{\n\n match format {\n\n Format::Passwd => to_json(&Passwd::from_line(line)?),\n\n Format::Group => to_json(&Group::from_line(line)?),\n\n Format::Adjunct => to_json(&Adjunct::from_line(line)?),\n\n Format::KeyValue => to_json(&KeyValue::from_line(line, output)?),\n\n Format::ColSep => to_json(&Fields::from_line(line, output, \":\")?),\n\n Format::WsSep => to_json(&Fields::from_line(line, output, \"\")?),\n\n Format::TabSep => to_json(&Fields::from_line(line, output, \"\\t\")?),\n\n Format::Line => to_json(&Fields::from_line(line, output, \"\\n\")?),\n\n Format::Json => serde_json::from_str(line).map_err(WnError::SerializeJson),\n\n }\n\n}\n", "file_path": "webnis-server/src/format.rs", "rank": 7, "score": 94253.24975113219 }, { "content": "// build a new hyper::Client.\n\nfn new_client(config: &super::config::Config) -> hyper::Client<HttpsConnector<HttpConnector>> {\n\n let http2_only = config.http2_only.unwrap_or(false);\n\n let https = HttpsConnector::new(4).unwrap();\n\n hyper::Client::builder()\n\n .http2_only(http2_only)\n\n .keep_alive(true)\n\n .keep_alive_timeout(Duration::new(30, 0))\n\n .build::<_, hyper::Body>(https)\n\n}\n\n\n", "file_path": "webnis-bind/src/request.rs", "rank": 8, "score": 91887.25427433803 }, { "content": "/// decode POST body into simple key/value.\n\n///\n\n/// Now wouldn't it be great if we could use serde_urlencoded! Unfortunately\n\n/// there's no support for non-UTF8 strings (nope, OsString / Vec<u8> do not work)\n\npub fn decode_post_body(body: &[u8]) -> HashMap<String, String> {\n\n let mut hm = HashMap::new();\n\n\n\n for kv in body.split(|&b| b == b'&') {\n\n let mut w = kv.splitn(2, |&b| b == b'=');\n\n let (k, v) = (w.next().unwrap(), w.next().unwrap_or(b\"\"));\n\n if let Ok(k) = percent_decode(k).decode_utf8() {\n\n // don't percent-decode the password value.\n\n let v = match k.as_ref() {\n\n \"password\" => std::str::from_utf8(v).map(|s| s.to_string()),\n\n \"password_raw\" => continue,\n\n _ => percent_decode(v).decode_utf8().map(|x| x.into_owned()),\n\n };\n\n if let Ok(v) = v {\n\n hm.insert(k.into_owned(), v);\n\n }\n\n }\n\n }\n\n hm\n\n}\n", "file_path": "webnis-server/src/util.rs", "rank": 9, "score": 72119.66396527879 }, { "content": "/// load ssl keys\n\npub fn acceptor(keyfile: &str, chainfile: &str) -> io::Result<SslAcceptorBuilder> {\n\n let mut builder = SslAcceptor::mozilla_intermediate(SslMethod::tls())\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, format!(\"opentls: {}\", e)))?;\n\n builder\n\n .set_private_key_file(keyfile, SslFiletype::PEM)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, format!(\"{}: {}\", keyfile, e)))?;\n\n builder\n\n .set_certificate_chain_file(chainfile)\n\n .map_err(|e| io::Error::new(io::ErrorKind::Other, format!(\"{}: {}\", keyfile, e)))?;\n\n builder.set_verify(ssl::SslVerifyMode::NONE);\n\n\n\n let mut options = ssl::SslOptions::empty();\n\n options.insert(SslOptions::NO_COMPRESSION);\n\n options.insert(SslOptions::CIPHER_SERVER_PREFERENCE);\n\n options.insert(SslOptions::NO_SSLV2);\n\n options.insert(SslOptions::NO_SSLV3);\n\n options.insert(SslOptions::NO_TLSV1);\n\n options.insert(SslOptions::NO_TLSV1_1);\n\n builder.set_options(options);\n\n\n\n let mode = SslSessionCacheMode::SERVER;\n\n builder.set_session_cache_mode(mode);\n\n\n\n Ok(builder)\n\n}\n\n\n", "file_path": "webnis-server/src/ssl.rs", "rank": 10, "score": 68191.24462720146 }, { "content": "fn masklen(mask: &Ipv4Addr) -> u8 {\n\n let v: u32 = (*mask).into();\n\n for i in 0..32 {\n\n if v & 2u32.pow(i) > 0 {\n\n return (32 - i) as u8;\n\n }\n\n }\n\n 0\n\n}\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 11, "score": 65055.87468790171 }, { "content": "// Serde helper\n\npub fn deserialize_map_type<'de, D>(deserializer: D) -> Result<MapType, D::Error>\n\nwhere D: Deserializer<'de> {\n\n let s = String::deserialize(deserializer)?;\n\n MapType::from_str(&s).map_err(serde::de::Error::custom)\n\n}\n\n\n\nimpl Default for MapType {\n\n fn default() -> MapType {\n\n MapType::None\n\n }\n\n}\n", "file_path": "webnis-server/src/db.rs", "rank": 12, "score": 64592.405377387615 }, { "content": "// Serde helper\n\npub fn option_deserialize_format<'de, D>(deserializer: D) -> Result<Option<Format>, D::Error>\n\nwhere D: Deserializer<'de> {\n\n let s = String::deserialize(deserializer)?;\n\n Format::from_str(&s)\n\n .map(|f| Some(f))\n\n .map_err(serde::de::Error::custom)\n\n}\n\n\n", "file_path": "webnis-server/src/format.rs", "rank": 13, "score": 63577.617874660944 }, { "content": "// main info that interpreter instances use to initialize.\n\nstruct LuaMaster {\n\n webnis: Webnis,\n\n name: String,\n\n script: String,\n\n}\n\n\n", "file_path": "webnis-server/src/lua.rs", "rank": 14, "score": 59184.02545595685 }, { "content": "// per-instance interpreter state.\n\nstruct LuaState {\n\n lua: Lua,\n\n}\n\n\n\n// for now, 1 interpreter per thread. this might be excessive- perhaps\n\n// we want to just start a maximum of N interpreters and multiplex\n\n// over them. Hey, using actix actors perhaps.\n\nthread_local! {\n\n static LUA: RefCell<Option<LuaState>> = RefCell::new(local_lua_init());\n\n}\n\n\n\nlazy_static! {\n\n static ref LUA_MASTER: Mutex<Option<LuaMaster>> = Mutex::new(None);\n\n}\n\n\n", "file_path": "webnis-server/src/lua.rs", "rank": 15, "score": 59180.49928199763 }, { "content": "struct GdbmDb {\n\n #[allow(unused)]\n\n file_name: String,\n\n modified: Option<SystemTime>,\n\n lastcheck: SystemTime,\n\n handle: gdbm::Gdbm,\n\n}\n\n\n\n// Unfortunately `gdbm' is not thread-safe.\n\nthread_local! {\n\n static MAPS: RefCell<HashMap<String, GdbmDb>> = RefCell::new(HashMap::new());\n\n}\n\n\n", "file_path": "webnis-server/src/db.rs", "rank": 16, "score": 59180.49928199763 }, { "content": "/// parse IP adress/mask, 2 formats:\n\n/// 1. 255.255.255.248 194.109.16.0\n\n/// 2. 194.109.16.0/27 or 2001:888:4:42::/64\n\nfn parse_ip(words: Vec<&str>) -> Result<IpNet, ()> {\n\n if words.len() >= 2 {\n\n match (words[0].parse::<Ipv4Addr>(), words[1].parse::<Ipv4Addr>()) {\n\n (Ok(mask), Ok(ip)) => {\n\n let ipnet = Ipv4Net::new(ip, masklen(&mask)).unwrap();\n\n return Ok(ipnet.into());\n\n },\n\n _ => {},\n\n }\n\n }\n\n if !words[0].contains('/') {\n\n return match IpAddr::from_str(words[0]) {\n\n Ok(IpAddr::V4(ip)) => Ok(Ipv4Net::new(ip, 32).unwrap().into()),\n\n Ok(IpAddr::V6(ip)) => Ok(Ipv6Net::new(ip, 128).unwrap().into()),\n\n Err(_) => Err(()),\n\n };\n\n }\n\n IpNet::from_str(words[0]).map_err(|_| ())\n\n}\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 17, "score": 58416.086477168814 }, { "content": "fn map_inherit(key: &str, map: &Map, base: &Map) -> Map {\n\n Map {\n\n name: String::new(),\n\n key: map.key.clone().or_else(|| Some(key.to_string())),\n\n keys: map.keys.clone(),\n\n key_alias: map.key_alias.clone(),\n\n lua_function: map.lua_function.clone().or_else(|| base.lua_function.clone()),\n\n map_type: if map.map_type != MapType::None {\n\n map.map_type.clone()\n\n } else {\n\n base.map_type.clone()\n\n },\n\n map_format: map.map_format.clone().or_else(|| base.map_format.clone()),\n\n map_file: map.map_file.clone().or_else(|| base.map_file.clone()),\n\n map_output: map.map_output.clone().or_else(|| base.map_output.clone()),\n\n submaps: HashMap::new(),\n\n }\n\n}\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 18, "score": 55801.348957673625 }, { "content": "/// Recursively transform a rlua::Value to a serde_json::Value\n\nfn lua_value_to_json(lua_value: rlua::Value) -> serde_json::Value {\n\n match lua_value {\n\n rlua::Value::Nil => JValue::Null,\n\n rlua::Value::Boolean(v) => JValue::Bool(v),\n\n rlua::Value::Integer(v) => From::from(v as i64),\n\n rlua::Value::Number(v) => From::from(v as f64),\n\n rlua::Value::String(v) => From::from(v.to_str().unwrap_or(\"\").to_string()),\n\n rlua::Value::Table(t) => {\n\n let is_array = match t.raw_get::<usize, rlua::Value>(1) {\n\n Ok(rlua::Value::Nil) => false,\n\n Err(_) => false,\n\n _ => true,\n\n };\n\n if is_array {\n\n // this table has a sequence part. handle it as an array.\n\n let v = t\n\n .sequence_values::<rlua::Value>()\n\n .filter_map(|res| res.ok())\n\n .map(|e| lua_value_to_json(e))\n\n .collect::<Vec<_>>();\n", "file_path": "webnis-server/src/lua.rs", "rank": 19, "score": 53930.86023529676 }, { "content": "fn main() {\n\n env_logger::init();\n\n\n\n let matches = clap_app!(webnis_server =>\n\n (version: \"0.1\")\n\n (@arg CFG: -c --config +takes_value \"configuration file (/etc/webnis-server.toml)\")\n\n )\n\n .get_matches();\n\n let cfg = matches.value_of(\"CFG\").unwrap_or(\"/etc/webnis-server.toml\");\n\n\n\n let config = match config::read(cfg) {\n\n Err(e) => {\n\n eprintln!(\"{}: {}: {}\", PROGNAME, cfg, e);\n\n exit(1);\n\n },\n\n Ok(c) => c,\n\n };\n\n if config.domain.len() == 0 {\n\n eprintln!(\"{}: no domains defined in {}\", PROGNAME, cfg);\n\n exit(1);\n", "file_path": "webnis-server/src/main.rs", "rank": 20, "score": 48740.08320458793 }, { "content": "fn main() {\n\n env_logger::init().unwrap();\n\n\n\n let matches = clap_app!(webnis_bind =>\n\n (version: \"0.1\")\n\n (@arg LISTEN: -l --listen +takes_value \"unix domain socket to listen on (/var/run/webnis-bind.sock)\")\n\n (@arg CFG: -c --config +takes_value \"configuration file (/etc/webnis-bind.toml)\")\n\n ).get_matches();\n\n\n\n let listen = matches.value_of(\"LISTEN\").unwrap_or(\"/var/run/webnis-bind.sock\");\n\n let cfg = matches.value_of(\"CFG\").unwrap_or(\"/etc/webnis-bind.toml\");\n\n\n\n let config = match config::read(cfg) {\n\n Ok(c) => c,\n\n Err(e) => {\n\n eprintln!(\"{}: {}\", PROGNAME, e);\n\n exit(1);\n\n }\n\n };\n\n let http2_only = config.http2_only.unwrap_or(false);\n", "file_path": "webnis-bind/src/main.rs", "rank": 21, "score": 48740.08320458793 }, { "content": "// helper.\n\nfn to_json<T: serde::Serialize>(value: T) -> Result<serde_json::Value, WnError> {\n\n serde_json::to_value(value).map_err(WnError::SerializeJson)\n\n}\n\n\n\n#[derive(Debug, Clone, Deserialize)]\n\npub enum Format {\n\n Passwd,\n\n Group,\n\n Adjunct,\n\n KeyValue,\n\n ColSep,\n\n WsSep,\n\n TabSep,\n\n Line,\n\n Json,\n\n}\n\n\n\nimpl FromStr for Format {\n\n type Err = WnError;\n\n\n", "file_path": "webnis-server/src/format.rs", "rank": 22, "score": 47174.79332540558 }, { "content": "/// Recursively transform a serde_json::Value to a rlua::Value.\n\n/// This is surprisingly easy!\n\nfn json_value_to_lua<'a>(lua: &'a Lua, jv: serde_json::Value) -> rlua::Value<'a> {\n\n match jv {\n\n serde_json::Value::Null => rlua::Nil,\n\n serde_json::Value::Bool(b) => b.to_lua(lua).unwrap(),\n\n serde_json::Value::Number(n) => {\n\n if let Some(n) = n.as_i64() {\n\n n.to_lua(lua).unwrap()\n\n } else if let Some(n) = n.as_f64() {\n\n n.to_lua(lua).unwrap()\n\n } else {\n\n rlua::Nil\n\n }\n\n },\n\n serde_json::Value::String(s) => s.to_lua(lua).unwrap(),\n\n serde_json::Value::Array(a) => {\n\n a.into_iter()\n\n .map(|e| json_value_to_lua(lua, e))\n\n .collect::<Vec<_>>()\n\n .to_lua(lua)\n\n .unwrap_or(rlua::Nil)\n\n },\n\n serde_json::Value::Object(o) => {\n\n let hm: HashMap<String, rlua::Value> =\n\n HashMap::from_iter(o.into_iter().map(|(k, v)| (k, json_value_to_lua(lua, v))));\n\n hm.to_lua(lua).unwrap_or(rlua::Nil)\n\n },\n\n }\n\n}\n\n\n", "file_path": "webnis-server/src/lua.rs", "rank": 23, "score": 46819.69498165641 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "src/main.rs", "rank": 24, "score": 45536.798547988605 }, { "content": "fn set_webnis_table(lua: &Lua, webnis: Webnis) {\n\n let table = lua.create_table().expect(\"failed to create table\");\n\n let globals = lua.globals();\n\n\n\n let map_lookup = {\n\n let webnis = webnis.clone();\n\n lua.create_function(\n\n move |lua, (mapname, keyname, keyvalue): (String, String, String)| {\n\n // it gets a bit verbose when you want to log errors\n\n // (as opposed to sending them up, which might be better..)\n\n let w_obj: rlua::Table = match lua.globals().get(\"webnis\") {\n\n Ok(w) => w,\n\n Err(e) => {\n\n warn!(\"map_lookup: get webnis global: {}\", e);\n\n return Err(e);\n\n },\n\n };\n\n let domain: String = match w_obj.get(\"domain\") {\n\n Ok(d) => d,\n\n Err(e) => {\n", "file_path": "webnis-server/src/lua.rs", "rank": 25, "score": 41814.54380612455 }, { "content": "/// This is called the first time the thread-local LUA is referenced.\n\n/// Try to start up an interpreter.\n\nfn local_lua_init() -> Option<LuaState> {\n\n let guard = LUA_MASTER.lock().unwrap();\n\n let lua_master = match &*guard {\n\n Some(l) => l,\n\n None => {\n\n debug!(\"LUA not initialized but someone is trying to use it\");\n\n return None;\n\n },\n\n };\n\n let lua = Lua::new();\n\n if let Err::<(), _>(e) = lua.exec(&lua_master.script, Some(lua_master.name.as_str())) {\n\n panic!(\"error loading lua script {}: {}\", lua_master.name, e);\n\n }\n\n\n\n set_webnis_table(&lua, lua_master.webnis.clone());\n\n\n\n Some(LuaState { lua: lua })\n\n}\n\n\n\n/// Read the lua script from a file, and evaluate it. If it does evaluate\n", "file_path": "webnis-server/src/lua.rs", "rank": 26, "score": 40734.59658067682 }, { "content": "// helper.\n\nfn from_io_error(e: std::io::Error) -> PamError {\n\n match e.kind() {\n\n std::io::ErrorKind::TimedOut|\n\n std::io::ErrorKind::Interrupted => PamError::AUTHINFO_UNAVAIL,\n\n _ => PamError::AUTH_ERR,\n\n }\n\n}\n\n\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 27, "score": 39520.275166681604 }, { "content": "fn handle_info(req: &HttpRequest<Webnis>) -> HttpResponse {\n\n let domain = match Path::<String>::extract(req) {\n\n Err(_) => return HttpResponse::InternalServerError().body(\"handle_info should not fail\\n\"),\n\n Ok(d) => d,\n\n };\n\n if let Some(denied) = check_authorization(req, &domain) {\n\n return denied;\n\n }\n\n req.state().handle_info(&domain)\n\n}\n\n\n", "file_path": "webnis-server/src/main.rs", "rank": 28, "score": 39259.37221271723 }, { "content": "fn handle_map(req: &HttpRequest<Webnis>) -> HttpResponse {\n\n let params = match Path::<(String, String)>::extract(req) {\n\n Err(_) => return HttpResponse::InternalServerError().body(\"handle_map should not fail\\n\"),\n\n Ok(d) => d,\n\n };\n\n if let Some(denied) = check_authorization(req, &params.0) {\n\n return denied;\n\n }\n\n req.state().handle_map(&params.0, &params.1, &req.query())\n\n}\n\n\n", "file_path": "webnis-server/src/main.rs", "rank": 29, "score": 39259.37221271723 }, { "content": "fn raise_rlimit_nofile(want_lim: libc::rlim_t) {\n\n // get current rlimit.\n\n let mut rlim = libc::rlimit {\n\n rlim_cur: 0,\n\n rlim_max: 0,\n\n };\n\n if unsafe { libc::getrlimit(libc::RLIMIT_NOFILE, &mut rlim as *mut libc::rlimit) } != 0 {\n\n return;\n\n }\n\n\n\n // might be enough already.\n\n if rlim.rlim_cur >= want_lim {\n\n return;\n\n }\n\n\n\n // if the current soft limit is smaller than the current hard limit,\n\n // first try raising the soft limit as far as we can or need.\n\n if rlim.rlim_cur < rlim.rlim_max {\n\n let lim = std::cmp::min(want_lim, rlim.rlim_max);\n\n let new_rlim = libc::rlimit {\n", "file_path": "webnis-server/src/main.rs", "rank": 30, "score": 38975.73582943933 }, { "content": "// decode gidlist line\n\nfn decode_gidlist(line: String) -> NssResult<Vec<gid_t>> {\n\n\n\n // let's be anal about this.\n\n let fields : Vec<&str> = line.split(':').collect();\n\n if fields.len() != 2 {\n\n debug!(\"wrong number of fields for gidlist, expected 2, got {}\", fields.len());\n\n return Err(NssError::Unavailable);\n\n }\n\n let mut gids = Vec::new();\n\n for gid in fields[1].split(',') {\n\n let g = match gid.parse::<gid_t>() {\n\n Ok(n) => n,\n\n Err(_) => {\n\n debug!(\"invalid gid in answer: {}\", gid);\n\n return Err(NssError::Unavailable);\n\n }\n\n };\n\n gids.push(g);\n\n }\n\n Ok(gids)\n\n}\n\n\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 31, "score": 38627.502177923314 }, { "content": "// decode group line\n\nfn decode_group(mut grp: Group, line: String) -> NssResult<()> {\n\n\n\n // let's be anal about this.\n\n let fields : Vec<&str> = line.split(':').collect();\n\n if fields.len() != 4 {\n\n debug!(\"wrong number of fields for group, expected 4, got {}\", fields.len());\n\n return Err(NssError::Unavailable);\n\n }\n\n if fields[0].len() == 0 {\n\n debug!(\"wnbind reply contains empty groupname field\");\n\n return Err(NssError::Unavailable);\n\n }\n\n let gid = match fields[2].parse::<gid_t>() {\n\n Ok(n) => n,\n\n Err(_) => {\n\n debug!(\"invalid gr_gid in answer: {}\", fields[2]);\n\n return Err(NssError::Unavailable);\n\n },\n\n };\n\n grp.set_name(fields[0]);\n\n grp.set_passwd(fields[1]);\n\n grp.set_gid(gid);\n\n let members : Vec<&str> = fields[3].split(',').collect();\n\n grp.set_members(members);\n\n\n\n grp.result()\n\n}\n\n\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 32, "score": 37223.18725085146 }, { "content": "// call cmd_run and sleep/retry a few times if we fail.\n\nfn wnbind_get(cmd: &str, arg: &str) -> NssResult<String> {\n\n for tries in 0 .. MAX_TRIES {\n\n match wnbind_try(cmd, arg) {\n\n Ok(r) => {\n\n if r.contains(0 as char) {\n\n debug!(\"wnbind answer contains a literal 0\");\n\n return Err(NssError::Unavailable);\n\n }\n\n return Ok(r);\n\n },\n\n res @ Err(NssError::NotFound) => return res,\n\n Err(NssError::TimedOut) => {},\n\n _ => {\n\n if tries < MAX_TRIES - 1 {\n\n sleep(Duration::from_millis(RETRY_DELAY_MS));\n\n }\n\n },\n\n }\n\n }\n\n Err(NssError::Unavailable)\n\n}\n\n\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 33, "score": 37223.18725085146 }, { "content": "// decode passwd line\n\nfn decode_passwd(mut pwd: Passwd, line: String) -> NssResult<()> {\n\n\n\n // let's be anal about this.\n\n let fields : Vec<&str> = line.split(':').collect();\n\n if fields.len() != 7 {\n\n debug!(\"wrong number of fields for passwd, expected 7, got {}\", fields.len());\n\n return Err(NssError::Unavailable);\n\n }\n\n if fields[0].len() == 0 {\n\n debug!(\"wnbind reply contains empty username field\");\n\n return Err(NssError::Unavailable);\n\n }\n\n let uid = match fields[2].parse::<uid_t>() {\n\n Ok(n) => n,\n\n Err(_) => {\n\n debug!(\"invalid pw_uid in answer: {}\", fields[2]);\n\n return Err(NssError::Unavailable);\n\n },\n\n };\n\n let gid = match fields[3].parse::<gid_t>() {\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 34, "score": 37223.18725085146 }, { "content": "// open socket, send one command, read reply, return.\n\nfn wnbind_try(cmd: &str, arg: &str) -> NssResult<String> {\n\n\n\n // connect to webnis-bind.\n\n let mut socket = match UnixStream::connect(SOCKADDR) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n debug!(\"connect to {}: {}\", SOCKADDR, e);\n\n return Err(e)?;\n\n },\n\n };\n\n socket.set_read_timeout(Some(Duration::from_millis(REQUEST_READ_TIMEOUT_MS))).ok();\n\n socket.set_write_timeout(Some(Duration::from_millis(REQUEST_WRITE_TIMEOUT_MS))).ok();\n\n\n\n // send request.\n\n let b = format!(\"{} {}\\n\", cmd, arg).into_bytes();\n\n if let Err(e) = socket.write_all(&b) {\n\n debug!(\"write to {}: {}\", SOCKADDR, e);\n\n return Err(e)?;\n\n }\n\n\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 35, "score": 37223.18725085146 }, { "content": "// over-engineered way to lowercase a string without allocating.\n\nfn tolower<'a>(s: &'a str, buf: &'a mut [u8]) -> &'a str {\n\n let b = s.as_bytes();\n\n if b.len() > buf.len() {\n\n return s;\n\n }\n\n for idx in 0 .. b.len() {\n\n let c = b[idx];\n\n buf[idx] = if c >= 65 && c <= 90 { c + 32 } else { c };\n\n }\n\n match ::std::str::from_utf8(&buf[0..b.len()]) {\n\n Ok(s) => s,\n\n Err(_) => s,\n\n }\n\n}\n\n\n\nimpl<'a> Request<'a> {\n\n pub fn parse(input: &'a str) -> Result<Request<'a>, String> {\n\n let mut parts = input.splitn(3, \" \");\n\n let mut buf = [0u8; 16];\n\n\t let c = match parts.next() {\n", "file_path": "webnis-bind/src/request.rs", "rank": 36, "score": 36244.633326292584 }, { "content": "// build a hyper::Uri from a host and a path.\n\n//\n\n// host can be \"hostname\", \"hostname:port\", or \"http(s)://hostname\".\n\n// if it's in the plain \"hostname\" format, the scheme will be http is\n\n// the host is localhost, https otherwise.\n\nfn build_uri(host: &str, path: &str) -> hyper::Uri {\n\n let url = if host.starts_with(\"http://\") || host.starts_with(\"https://\") {\n\n let host = host.trim_right_matches(\"/\");\n\n format!(\"{}{}\", host, path)\n\n } else if host == \"localhost\" || host.starts_with(\"localhost:\") {\n\n format!(\"http://{}/.well-known/webnis{}\", host, path)\n\n } else {\n\n format!(\"https://{}/.well-known/webnis{}\", host, path)\n\n };\n\n url.parse::<hyper::Uri>().unwrap()\n\n}\n\n\n", "file_path": "webnis-bind/src/request.rs", "rank": 37, "score": 36175.019834658546 }, { "content": "#[inline]\n\nfn compare_v6(probe: &IpNet, ip: &Ipv6Net) -> Ordering {\n\n match probe {\n\n IpNet::V4(_) => Less,\n\n IpNet::V6(probe) => {\n\n if probe > ip {\n\n Greater\n\n } else if probe.contains(ip) {\n\n Equal\n\n } else {\n\n Less\n\n }\n\n },\n\n }\n\n}\n", "file_path": "webnis-server/src/iplist.rs", "rank": 38, "score": 35906.303845992574 }, { "content": "#[inline]\n\nfn compare_v4(probe: &IpNet, ip: &Ipv4Net) -> Ordering {\n\n match probe {\n\n IpNet::V6(_) => Greater,\n\n IpNet::V4(probe) => {\n\n if probe > ip {\n\n Greater\n\n } else if probe.contains(ip) {\n\n Equal\n\n } else {\n\n Less\n\n }\n\n },\n\n }\n\n}\n\n\n", "file_path": "webnis-server/src/iplist.rs", "rank": 39, "score": 35906.303845992574 }, { "content": "\n\nuse std::io::prelude::*;\n\nuse std::io;\n\nuse std::fs::File;\n\n\n\nuse toml;\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Config {\n\n #[serde(default)]\n\n pub domain: String,\n\n pub password: String,\n\n pub server: Option<String>,\n\n #[serde(default)]\n\n pub servers: Vec<String>,\n\n pub http2_only: Option<bool>,\n\n pub concurrency: Option<usize>,\n\n #[serde(default)]\n\n pub restrict_getpwuid: bool,\n\n #[serde(default)]\n\n pub restrict_getgrgid: bool,\n\n}\n\n\n", "file_path": "webnis-bind/src/config.rs", "rank": 40, "score": 35749.13513846508 }, { "content": "use std::collections::HashMap;\n\nuse std::io;\n\nuse std::net::Ipv4Addr;\n\nuse std::net::{IpAddr, SocketAddr, ToSocketAddrs};\n\nuse std::path::{Path, PathBuf};\n\nuse std::str::FromStr;\n\n\n\nuse ipnet::{IpNet, Ipv4Net, Ipv6Net};\n\nuse toml;\n\n\n\nuse crate::db::{deserialize_map_type, MapType};\n\nuse crate::format::{option_deserialize_format, Format};\n\nuse crate::iplist::IpList;\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Config {\n\n pub server: Server,\n\n pub domain: Vec<Domain>,\n\n //#[serde(default, rename=\"mapdef\")]\n\n #[serde(default)]\n", "file_path": "webnis-server/src/config.rs", "rank": 41, "score": 35748.092173437384 }, { "content": " pub map: HashMap<String, MapOrMaps>,\n\n #[serde(skip)]\n\n pub map_: HashMap<String, Vec<Map>>,\n\n #[serde(default)]\n\n pub auth: HashMap<String, Auth>,\n\n pub lua: Option<LuaConfig>,\n\n pub include_maps: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Server {\n\n #[serde(default)]\n\n pub tls: bool,\n\n pub crt_file: Option<String>,\n\n pub key_file: Option<String>,\n\n #[serde(default)]\n\n pub cert_password: String,\n\n pub listen: OneOrManyAddr,\n\n #[serde(default)]\n\n pub securenets: Vec<String>,\n", "file_path": "webnis-server/src/config.rs", "rank": 42, "score": 35747.10664188342 }, { "content": "}\n\n\n\nimpl Config {\n\n /// look up a domain by name.\n\n pub fn find_domain(&self, name: &str) -> Option<&Domain> {\n\n self.domain.iter().find(|d| d.name == name)\n\n }\n\n\n\n /// Find a map by name. As map definitions with the same name can occur\n\n /// multiple times in the config with different keys, the key has\n\n /// to be a valid lookup key for the map as well.\n\n pub fn find_map<'b, 'a: 'b>(&'a self, mapname: &str, key: &'b str) -> Option<(&'a Map, &'b str)> {\n\n let maps = self.map_.get(mapname)?;\n\n\n\n // if it's just one map without any keys, return map.\n\n // this can only happen for LUA maps.\n\n if maps.len() == 1 && maps[0].key.is_none() && maps[0].keys.len() == 0 {\n\n return Some((&maps[0], key));\n\n }\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 43, "score": 35746.22447644011 }, { "content": " return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: cannot use output with format {:?}\", m.name, m.map_format),\n\n ));\n\n },\n\n _ => {},\n\n }\n\n }\n\n }\n\n }\n\n config.map_.insert(k.to_string(), mm);\n\n }\n\n\n\n // Check domains for validity\n\n for d in &config.domain {\n\n if let Some(ref auth_name) = d.auth {\n\n let auth = match config.auth.get(auth_name) {\n\n None => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n", "file_path": "webnis-server/src/config.rs", "rank": 44, "score": 35745.03702384862 }, { "content": " pub map_type: MapType,\n\n /// format: kv, json, passwd, fields (optional for map_type \"json\")\n\n #[serde(default, rename = \"format\", deserialize_with = \"option_deserialize_format\")]\n\n pub map_format: Option<Format>,\n\n /// filename\n\n #[serde(rename = \"file\")]\n\n pub map_file: Option<String>,\n\n /// optional args for types like 'fields'\n\n #[serde(rename = \"output\")]\n\n pub map_output: Option<HashMap<String, String>>,\n\n #[serde(flatten)]\n\n pub submaps: HashMap<String, Map>,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct LuaConfig {\n\n pub script: String,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n", "file_path": "webnis-server/src/config.rs", "rank": 45, "score": 35744.97253415352 }, { "content": " }\n\n\n\n // Check if TLS settings are valid.\n\n if config.server.tls {\n\n if config.server.key_file.is_none() && config.server.crt_file.is_none() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"config: tls enabled but no cert files configured\",\n\n ));\n\n }\n\n\n\n if config.server.key_file.is_some() != config.server.crt_file.is_some() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"config: both the key_file and crt_file must be set\",\n\n ));\n\n }\n\n }\n\n\n\n Ok(config)\n", "file_path": "webnis-server/src/config.rs", "rank": 46, "score": 35743.79030850109 }, { "content": "#[serde(untagged)]\n\npub enum MapOrMaps {\n\n Maps(HashMap<String, Map>),\n\n Map(Map),\n\n // Other(serde_json::Value),\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\n#[serde(untagged)]\n\npub enum OneOrManyAddr {\n\n One(SocketAddr),\n\n Many(Vec<SocketAddr>),\n\n}\n\n\n\nimpl ToSocketAddrs for OneOrManyAddr {\n\n type Iter = std::vec::IntoIter<SocketAddr>;\n\n fn to_socket_addrs(&self) -> io::Result<std::vec::IntoIter<SocketAddr>> {\n\n let i = match self {\n\n OneOrManyAddr::Many(ref v) => v.to_owned(),\n\n OneOrManyAddr::One(ref s) => vec![*s],\n\n };\n\n Ok(i.into_iter())\n\n }\n\n}\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 47, "score": 35743.76340156436 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\npub struct Auth {\n\n pub map: Option<String>,\n\n pub key: Option<String>,\n\n pub lua_function: Option<String>,\n\n}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Map {\n\n #[serde(skip, default)]\n\n pub name: String,\n\n pub key: Option<String>,\n\n #[serde(default)]\n\n pub keys: Vec<String>,\n\n #[serde(default)]\n\n pub key_alias: HashMap<String, String>,\n\n /// LUA function to call.\n\n pub lua_function: Option<String>,\n\n /// type: gdbm, json, lua\n\n #[serde(default, rename = \"type\", deserialize_with = \"deserialize_map_type\")]\n", "file_path": "webnis-server/src/config.rs", "rank": 48, "score": 35743.668000153506 }, { "content": "}\n\n\n\n#[derive(Deserialize, Debug, Clone)]\n\npub struct Domain {\n\n /// domain name\n\n pub name: String,\n\n /// database directory\n\n pub db_dir: String,\n\n /// available (allowed) maps\n\n pub maps: Vec<String>,\n\n /// link to the authentication method/map\n\n pub auth: Option<String>,\n\n /// HTTP Authentication schema (first thing in the Authorization: header)\n\n pub http_authschema: Option<String>,\n\n /// HTTP Token (comes after the schema in the Authorization header).\n\n pub http_authtoken: Option<String>,\n\n /// Encoding of the authtoken. For schema 'Basic' this is usually 'base64'.\n\n pub http_authencoding: Option<String>,\n\n}\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 49, "score": 35743.52370683722 }, { "content": " Err(e) => {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"include_maps {:?}: {}\", include_maps, e),\n\n ))\n\n },\n\n }?;\n\n // add to main config.\n\n for (name, map) in maps.into_iter() {\n\n config.map.insert(name, map);\n\n }\n\n }\n\n // Build the `map_ `HashMap.\n\n for (k, v) in config.map.iter() {\n\n //\n\n // there are 3 variants here:\n\n //\n\n // 1. simple map definition: [passwd] => MapOrMaps::Map( MapDef )\n\n //\n\n // 2. a map definition with the keyname included in the name.\n", "file_path": "webnis-server/src/config.rs", "rank": 50, "score": 35742.9802924942 }, { "content": " format!(\"config: domain {}: auth {} not defined\", d.name, auth_name),\n\n ));\n\n },\n\n Some(a) => a,\n\n };\n\n if auth.lua_function.is_none() {\n\n if auth.key.is_none() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"config: auth {}: 'key' not set\", auth_name),\n\n ));\n\n }\n\n if auth.map.is_none() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"config: auth {}: 'map' not set\", auth_name),\n\n ));\n\n }\n\n }\n\n }\n", "file_path": "webnis-server/src/config.rs", "rank": 51, "score": 35742.96154541942 }, { "content": " // Now walk over all maps and do some basic validity checks.\n\n for m in &mut mm {\n\n m.name = k.to_string();\n\n\n\n // Map type must be set.\n\n if m.map_type == MapType::None {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: map_type not set\", m.name),\n\n ));\n\n }\n\n\n\n // format = \"...\" only works with MapType::Gdbm at this time.\n\n if m.map_type != MapType::Gdbm && m.map_format.is_some() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: cannot use format with map type {:?}\", m.name, m.map_type),\n\n ));\n\n }\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 52, "score": 35741.36282812568 }, { "content": " // find first map with a matching key.\n\n for m in maps {\n\n let key = m.key_alias.get(key).map(|s| s.as_str()).unwrap_or(key);\n\n let mut keys = m.key.iter().chain(m.keys.iter());\n\n if let Some(k) = keys.find(|ref k| k.as_str() == key) {\n\n return Some((m, k));\n\n }\n\n }\n\n None\n\n }\n\n\n\n /// Like find_map, but map must be in the allowed list for the domain\n\n pub fn find_allowed_map<'b, 'a: 'b>(\n\n &'a self,\n\n domain: &Domain,\n\n mapname: &str,\n\n key: &'b str,\n\n ) -> Option<(&'a Map, &'b str)>\n\n {\n\n domain\n\n .maps\n\n .iter()\n\n .find(|m| m.as_str() == mapname)\n\n .and_then(|_| self.find_map(mapname, key))\n\n }\n\n}\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 53, "score": 35741.34799229773 }, { "content": " io::ErrorKind::InvalidData,\n\n format!(\"map {}: no key\", m.name),\n\n ));\n\n }\n\n\n\n // Must have a filename.\n\n if m.map_file.is_none() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: map file not set\", m.name),\n\n ));\n\n }\n\n\n\n // output mapping doesn't work (yet) with all formats.\n\n if m.map_output.is_some() {\n\n match m.map_format {\n\n Some(Format::Json) |\n\n Some(Format::Passwd) |\n\n Some(Format::Group) |\n\n Some(Format::Adjunct) => {\n", "file_path": "webnis-server/src/config.rs", "rank": 54, "score": 35739.59200540027 }, { "content": " if m.map_type == MapType::Lua {\n\n // Type Lua, function must be set.\n\n if m.lua_function.is_none() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: lua_function not set\", m.name),\n\n ));\n\n }\n\n } else {\n\n // lua_function must not be set.\n\n if m.lua_function.is_some() {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: lua_function set, map_type must be \\\"lua\\\"\", m.name),\n\n ));\n\n }\n\n\n\n // Must have a key.\n\n if m.key.is_none() && m.keys.len() == 0 {\n\n return Err(io::Error::new(\n", "file_path": "webnis-server/src/config.rs", "rank": 55, "score": 35739.59200540027 }, { "content": " // There can be multiple definitions with the same basename.\n\n // E.g [passwd.name] and [passwd.uid] => MapOrMaps::Maps( HashMap<String, Map> )\n\n // The hashmap has two entries here, with keys \"name\" and \"uid\".\n\n //\n\n // 3. Like 2, but with a basemap definition.\n\n // E.g [passwd], [passwd.name], [passwd.uid].\n\n // This results in a single Map (MapOrMaps::Map), where the\n\n // passwd.name and passwd.uid maps can be found in the map.submaps member.\n\n //\n\n // We put all definitions with the same basename together in a Vec.\n\n let mut mm = Vec::new();\n\n match v {\n\n MapOrMaps::Map(m) => {\n\n if m.submaps.len() > 0 {\n\n // basemap with submaps.\n\n if m.key.is_some() || m.keys.len() > 0 || m.key_alias.len() > 0 {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"map {}: basemap cannot have a key\", k),\n\n ));\n", "file_path": "webnis-server/src/config.rs", "rank": 56, "score": 35739.59200540027 }, { "content": " }\n\n for (key, submap) in m.submaps.iter() {\n\n mm.push(map_inherit(key, submap, m));\n\n }\n\n } else {\n\n // single map.\n\n mm.push(m.to_owned());\n\n }\n\n },\n\n MapOrMaps::Maps(m) => {\n\n for (key, map) in m.iter() {\n\n let mut newmap = map.clone();\n\n if newmap.key.is_none() {\n\n newmap.key = Some(key.to_owned());\n\n }\n\n mm.push(newmap);\n\n }\n\n },\n\n }\n\n\n", "file_path": "webnis-server/src/config.rs", "rank": 57, "score": 35739.59200540027 }, { "content": "fn nss_error(err: NssError, errnop: *mut i32) -> i32 {\n\n let (errno, status) = match err {\n\n NssError::InsufficientBuffer => (ERANGE, NssStatus::TryAgain),\n\n NssError::NotFound => (ENOENT, NssStatus::NotFound),\n\n NssError::Unavailable => (EAGAIN, NssStatus::Unavailable),\n\n NssError::TryAgain => (EAGAIN, NssStatus::TryAgain),\n\n NssError::TimedOut => (ETIMEDOUT, NssStatus::TryAgain),\n\n };\n\n unsafe { *errnop = errno };\n\n status as i32\n\n}\n\n\n", "file_path": "webnis-nss/src/nss.rs", "rank": 58, "score": 35339.77186160501 }, { "content": "fn check_authorization(req: &HttpRequest<Webnis>, domain: &str) -> Option<HttpResponse> {\n\n let webnis = req.state();\n\n\n\n // check the securenets access list.\n\n if let Some(ref sn) = webnis.inner.securenets {\n\n trace!(\"checking securenets\");\n\n if let Some(pa) = req.peer_addr() {\n\n let mut ip = pa.ip();\n\n trace!(\"peer ip is {}\", ip);\n\n if ip.is_loopback() {\n\n trace!(\"peer ip is loopback\");\n\n if let Some(remote) = req.connection_info().remote() {\n\n trace!(\"connectioninfo remote is {}\", remote);\n\n if let Ok(ipaddr) = IpAddr::from_str(remote) {\n\n ip = ipaddr;\n\n }\n\n }\n\n }\n\n if !sn.contains(ip) && !ip.is_loopback() {\n\n warn!(\"securenets: access denied for peer {}\", ip);\n", "file_path": "webnis-server/src/main.rs", "rank": 59, "score": 35182.398660634695 }, { "content": "// open socket, auth once, read reply, return.\n\nfn wnbind_try(user: &str, pass: &str, _debug: bool) -> Result<(), PamError> {\n\n\n\n // connect to webnis-bind.\n\n let mut socket = match UnixStream::connect(SOCKADDR) {\n\n Ok(s) => s,\n\n Err(e) => {\n\n #[cfg(debug_assertions)]\n\n {\n\n if _debug { println!(\"connect to {}: {}\", SOCKADDR, e); }\n\n }\n\n return Err(from_io_error(e));\n\n },\n\n };\n\n socket.set_read_timeout(Some(Duration::from_millis(REQUEST_READ_TIMEOUT_MS))).ok();\n\n socket.set_write_timeout(Some(Duration::from_millis(REQUEST_WRITE_TIMEOUT_MS))).ok();\n\n\n\n // send request.\n\n let b = format!(\"auth {} {}\\n\", user, pass).into_bytes();\n\n if let Err(e) = socket.write_all(&b) {\n\n #[cfg(debug_assertions)]\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 60, "score": 34719.440437244695 }, { "content": "// call wnbind_try() and sleep/retry once if we fail.\n\nfn wnbind_auth(user: &str, pass: &str, _debug: bool) -> Result<(), PamError> {\n\n for tries in 0 .. MAX_TRIES {\n\n match wnbind_try(user, pass, _debug) {\n\n Ok(r) => return Ok(r),\n\n Err(PamError::AUTH_ERR) => return Err(PamError::AUTH_ERR),\n\n _ => {\n\n if tries < MAX_TRIES - 1 {\n\n sleep(Duration::from_millis(RETRY_DELAY_MS));\n\n }\n\n },\n\n }\n\n }\n\n Err(PamError::AUTHINFO_UNAVAIL)\n\n}\n\n\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 61, "score": 34719.440437244695 }, { "content": "fn nss_result<T>(res: NssResult<T>, errnop: *mut i32) -> i32 {\n\n match res {\n\n Ok(_) => NssStatus::Success as i32,\n\n Err(err) => nss_error(err, errnop),\n\n }\n\n}\n\n\n", "file_path": "webnis-nss/src/nss.rs", "rank": 62, "score": 32847.95243237174 }, { "content": "// Make a new TcpListener, and if it's a V6 listener, set the\n\n// V6_V6ONLY socket option on it.\n\nfn make_listener(addr: &SocketAddr) -> std::io::Result<std::net::TcpListener> {\n\n let s = if addr.is_ipv6() {\n\n let s = net2::TcpBuilder::new_v6()?;\n\n s.only_v6(true)?;\n\n s\n\n } else {\n\n net2::TcpBuilder::new_v4()?\n\n };\n\n s.reuse_address(true).ok();\n\n s.bind(addr)?;\n\n s.listen(128)\n\n}\n\n\n", "file_path": "webnis-server/src/main.rs", "rank": 63, "score": 32156.166278479926 }, { "content": "fn handle_auth(req: &HttpRequest<Webnis>) -> Box<Future<Item = HttpResponse, Error = actix_web::Error>> {\n\n let domain = match Path::<String>::extract(req) {\n\n Err(_) => {\n\n return Box::new(future::ok(\n\n HttpResponse::InternalServerError().body(\"handle_auth should not fail\\n\"),\n\n ));\n\n },\n\n Ok(d) => d,\n\n };\n\n if let Some(denied) = check_authorization(req, &domain) {\n\n return Box::new(future::ok(denied));\n\n }\n\n\n\n let is_json = match req.request().headers().get(\"content-type\") {\n\n Some(ct) => ct == \"application/json\" || ct == \"text/json\",\n\n None => false,\n\n };\n\n\n\n let webnis = req.state().clone();\n\n let domain = domain.clone();\n\n req.body()\n\n .limit(1024)\n\n .from_err()\n\n .and_then(move |data| future::ok(webnis.handle_auth(domain, is_json, data.to_vec()).into()))\n\n .responder()\n\n}\n\n\n", "file_path": "webnis-server/src/main.rs", "rank": 64, "score": 31328.185649263723 }, { "content": "// This function can call itself recursively to keep on\n\n// generating futures so as to retry.\n\n//\n\n// On errors (except 404) we cycle to the next server.\n\n//\n\n// If there is a serious error from hyper::Client that we do not reckognize,\n\n// we throw away the current hyper::Client instance and create a new one.\n\n//\n\n// This guards against bugs in hyper::Client or its dependencies\n\n// that can get a hyper::Client stuck, see:\n\n//\n\n// https://github.com/hyperium/hyper/issues/1422\n\n// https://github.com/rust-lang/rust/issues/47955\n\n//\n\nfn req_with_retries(ctx: &Context, path: String, authorization: String, body: Option<String>, try_no: u32) -> Box<Future<Item=String, Error=io::Error> + Send> {\n\n\n\n let ctx_clone = ctx.clone();\n\n\n\n let (client, seqno) = {\n\n let mut guard = ctx.http_client.lock().unwrap();\n\n let http_client = &mut *guard;\n\n if http_client.client.is_none() {\n\n // create a new http client.\n\n http_client.client.get_or_insert_with(|| new_client(&ctx.config));\n\n http_client.seqno += 1;\n\n }\n\n let cc = http_client.client.as_ref().unwrap().clone();\n\n (cc, http_client.seqno)\n\n };\n\n\n\n // build the uri based on the currently active webnis server.\n\n let server = &ctx.config.servers[seqno % ctx.config.servers.len()];\n\n let uri = build_uri(server, &path);\n\n let method = if body.is_some() { Method::POST } else { Method::GET };\n", "file_path": "webnis-bind/src/request.rs", "rank": 65, "score": 24179.406497484997 }, { "content": "\n\nuse std::time::Duration;\n\nuse std::os::unix::net::UnixStream;\n\nuse std::io::{BufRead,BufReader};\n\nuse std::thread::sleep;\n\nuse std::io::Write;\n\n\n\nuse super::nss::{Passwd,Group,uid_t,gid_t,NssResult,NssError};\n\n\n\nstatic SOCKADDR: &'static str = \"/var/run/webnis-bind.sock\";\n\n\n\nconst MAX_TRIES: u32 = 2;\n\nconst RETRY_DELAY_MS: u64 = 2500;\n\nconst REQUEST_READ_TIMEOUT_MS: u64 = 2500;\n\nconst REQUEST_WRITE_TIMEOUT_MS: u64 = 1000;\n\n\n\npub struct Webnis;\n\n\n\nimpl Webnis {\n\n pub fn new() -> Webnis {\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 68, "score": 7887.77648374805 }, { "content": "\n\n// type to impl the PamServiceModule on.\n\npub struct Webnis;\n\n\n\nimpl PamServiceModule for Webnis {\n\n fn authenticate(self: &Self, pam: pamsm::Pam, _pam_flags: pamsm::PamFlag, args: Vec<String>) -> PamError {\n\n\n\n // config file cmdline args.\n\n let pam_args = PamArgs::parse(&args);\n\n let _debug = (pam_args & PamArgs::DEBUG as u32) != 0;\n\n\n\n let user = match pam.get_user(None) {\n\n Ok(Some(u)) => u,\n\n Ok(None) => return PamError::USER_UNKNOWN,\n\n Err(e) => return e,\n\n };\n\n let user = match user.to_str() {\n\n Ok(s) => s,\n\n Err(_) => return PamError::AUTH_ERR,\n\n };\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 69, "score": 7884.959482847572 }, { "content": "use std;\n\nuse std::time::Duration;\n\nuse std::os::unix::net::UnixStream;\n\nuse std::io::{BufRead,BufReader};\n\nuse std::thread::sleep;\n\nuse std::io::Write;\n\n\n\nuse percent_encoding::{\n\n percent_encode,\n\n QUERY_ENCODE_SET\n\n};\n\n\n\nuse pamsm::{self,PamServiceModule,PamError};\n\n\n\nstatic SOCKADDR: &'static str = \"/var/run/webnis-bind.sock\";\n\n\n\nconst MAX_TRIES: u32 = 2;\n\nconst RETRY_DELAY_MS: u64 = 2500;\n\nconst REQUEST_READ_TIMEOUT_MS: u64 = 2500;\n\nconst REQUEST_WRITE_TIMEOUT_MS: u64 = 1000;\n\n\n\n// the arguments that can be passed in the /etc/pam.d/FILE config file.\n\n#[allow(non_camel_case_types)]\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 72, "score": 7883.443583847631 }, { "content": " if log_enabled!(::log::Level::Debug) {\n\n ::env_logger::init();\n\n }\n\n Webnis\n\n }\n\n\n\n pub fn getgidlist(&self, name: &str) -> NssResult<(Vec<gid_t>)> {\n\n let reply = wnbind_get(\"getgidlist\", name)?;\n\n decode_gidlist(reply)\n\n }\n\n\n\n pub fn getgrnam(&self, grp: Group, name: &str) -> NssResult<()> {\n\n let reply = wnbind_get(\"getgrnam\", name)?;\n\n decode_group(grp, reply)\n\n }\n\n\n\n pub fn getgrgid(&self, grp: Group, gid: gid_t) -> NssResult<()> {\n\n let reply = wnbind_get(\"getgrgid\", &gid.to_string())?;\n\n decode_group(grp, reply)\n\n }\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 75, "score": 7881.294307847509 }, { "content": "\n\n pub fn getpwnam(&self, pwd: Passwd, name: &str) -> NssResult<()> {\n\n let reply = wnbind_get(\"getpwnam\", name)?;\n\n decode_passwd(pwd, reply)\n\n }\n\n\n\n pub fn getpwuid(&self, pwd: Passwd, uid: uid_t) -> NssResult<()> {\n\n let reply = wnbind_get(\"getpwuid\", &uid.to_string())?;\n\n decode_passwd(pwd, reply)\n\n }\n\n}\n\n\n\n// open socket, send one command, read reply, return.\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 82, "score": 7879.334928590337 }, { "content": " Ok(n) => n,\n\n Err(_) => {\n\n debug!(\"invalid pw_gid in answer: {}\", fields[3]);\n\n return Err(NssError::Unavailable);\n\n },\n\n };\n\n pwd.set_name(fields[0]);\n\n pwd.set_passwd(fields[1]);\n\n pwd.set_uid(uid);\n\n pwd.set_gid(gid);\n\n pwd.set_gecos(fields[4]);\n\n pwd.set_home(fields[5]);\n\n pwd.set_shell(fields[6]);\n\n\n\n pwd.result()\n\n}\n\n\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 85, "score": 7876.419644470088 }, { "content": " // get reply.\n\n let mut line = String::new();\n\n let mut rdr = BufReader::new(socket);\n\n if let Err(e) = rdr.read_line(&mut line) {\n\n debug!(\"reading from {}: {}\", SOCKADDR, e);\n\n return Err(e)?;\n\n }\n\n\n\n // split into reply-code and message-text\n\n let mut s = line.trim_right().splitn(2, ' ');\n\n let num = s.next().unwrap();\n\n let val = s.next().unwrap_or(\"\");\n\n\n\n let code = match num.parse::<u16>() {\n\n Ok(c) => c,\n\n Err(_) => {\n\n debug!(\"error: got garbage answer [{}]\", num);\n\n return Err(NssError::Unavailable);\n\n },\n\n };\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 86, "score": 7876.419644470088 }, { "content": "\n\n match code {\n\n 200 ... 299 => {\n\n Ok(val.to_string())\n\n },\n\n 404 => Err(NssError::NotFound),\n\n 400 ... 499 => {\n\n debug!(\"error: {}\", line);\n\n Err(NssError::TryAgain)\n\n },\n\n _ => {\n\n debug!(\"error: {}\", line);\n\n Err(NssError::Unavailable)\n\n }\n\n }\n\n}\n\n\n", "file_path": "webnis-nss/src/webnis.rs", "rank": 87, "score": 7876.419644470088 }, { "content": " }\n\n Err(PamError::AUTH_ERR)\n\n\t\t},\n\n _ => {\n\n #[cfg(debug_assertions)]\n\n {\n\n if _debug { println!(\"error: {}\", line); };\n\n }\n\n Err(PamError::AUTHINFO_UNAVAIL)\n\n }\n\n }\n\n}\n\n\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 89, "score": 7876.419644470088 }, { "content": "\n\n let code = match num.parse::<u16>() {\n\n Ok(c) => c,\n\n Err(_) => {\n\n #[cfg(debug_assertions)]\n\n {\n\n if _debug { println!(\"error: got garbage answer [{}]\", line); }\n\n }\n\n return Err(PamError::AUTHINFO_UNAVAIL);\n\n },\n\n };\n\n\n\n match code {\n\n 200 ... 299 => {\n\n Ok(())\n\n },\n\n\t\t401|403|404 => {\n\n #[cfg(debug_assertions)]\n\n {\n\n if _debug { println!(\"error: {}\", line); };\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 91, "score": 7876.419644470088 }, { "content": " {\n\n if _debug { println!(\"write to {}: {}\", SOCKADDR, e); }\n\n }\n\n return Err(from_io_error(e));\n\n }\n\n\n\n // get reply.\n\n let mut line = String::new();\n\n let mut rdr = BufReader::new(socket);\n\n if let Err(e) = rdr.read_line(&mut line) {\n\n #[cfg(debug_assertions)]\n\n {\n\n if _debug { println!(\"reading from {}: {}\", SOCKADDR, e); }\n\n }\n\n return Err(from_io_error(e));\n\n }\n\n\n\n // Now decode the line.\n\n let mut s = line.splitn(2, ' ');\n\n let num = s.next().unwrap();\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 92, "score": 7876.419644470088 }, { "content": "\n\n let pass = match pam.get_authtok(None) {\n\n Ok(Some(p)) => p,\n\n Ok(None) => return PamError::AUTH_ERR,\n\n Err(e) => return e,\n\n };\n\n let pass : String = percent_encode(pass.to_bytes(), QUERY_ENCODE_SET).collect();\n\n\n\n // run authentication.\n\n match wnbind_auth(user, &pass, _debug) {\n\n Ok(_) => PamError::SUCCESS,\n\n Err(e) => e,\n\n }\n\n }\n\n}\n\n\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 93, "score": 7876.419644470088 }, { "content": "#[allow(non_camel_case_types)]\n\nenum PamArgs {\n\n // enable debugging\n\n DEBUG = 1,\n\n // unused, use_first_pass is the default.\n\n USE_FIRST_PASS = 2,\n\n}\n\n\n\nimpl PamArgs {\n\n fn parse(args: &Vec<String>) -> u32 {\n\n let mut a = 0u32;\n\n for i in args.iter() {\n\n match i.as_str() {\n\n \"debug\" => a |= PamArgs::DEBUG as u32,\n\n \"use_first_pass\" => a |= PamArgs::USE_FIRST_PASS as u32,\n\n _ => {},\n\n }\n\n }\n\n a\n\n }\n\n}\n", "file_path": "webnis-pam/src/webnis.rs", "rank": 94, "score": 7439.7787920658775 }, { "content": "#[macro_use] extern crate log;\n\nextern crate env_logger;\n\nextern crate libc;\n\n\n\nmod webnis;\n\nmod nss;\n\nmod buffer;\n\n\n\npub use nss::_nss_webnis_initgroups_dyn;\n\npub use nss::_nss_webnis_getgrnam_r;\n\npub use nss::_nss_webnis_getgrgid_r;\n\npub use nss::_nss_webnis_getpwnam_r;\n\npub use nss::_nss_webnis_getpwuid_r;\n\n\n", "file_path": "webnis-nss/src/lib.rs", "rank": 95, "score": 6148.390136031542 }, { "content": "#[macro_use] extern crate pamsm;\n\nextern crate percent_encoding;\n\n\n\nmod webnis;\n\npub use webnis::Webnis;\n\n\n\npamsm_init!(Box::new(Webnis));\n\n\n", "file_path": "webnis-pam/src/lib.rs", "rank": 96, "score": 6147.119308613411 }, { "content": "// This file contains the raw _libnss_* ffi entrance points\n\nuse std;\n\nuse std::ffi::CStr;\n\n\n\nuse libc;\n\nuse libc::{c_void, c_char, size_t, group, passwd};\n\nuse libc::{ENOENT, EAGAIN, ERANGE, ETIMEDOUT};\n\n\n\npub use super::buffer::{Passwd,Group};\n\npub use libc::{uid_t, gid_t};\n\n\n\nuse super::webnis::Webnis;\n\n\n\nthread_local! {\n\n static NSS: Webnis = Webnis::new();\n\n}\n\n\n\n/// NSS FFI entry point for _initgroups_dyn()\n\n///\n\n/// _initgroups_dyn generates the data for getgrouplist(3).\n", "file_path": "webnis-nss/src/nss.rs", "rank": 97, "score": 6146.669109286636 }, { "content": " }\n\n\n\n // read /etc/ypserv.securenets if configured.\n\n let securenets = if config.server.securenets.len() > 0 {\n\n let mut iplist = IpList::new();\n\n for file in &config.server.securenets {\n\n if let Err(e) = config::read_securenets(&file, &mut iplist) {\n\n eprintln!(\"{}: {}: {}\", PROGNAME, file, e);\n\n exit(1);\n\n }\n\n }\n\n Some(iplist)\n\n } else {\n\n None\n\n };\n\n\n\n // arbitrary limit, really.\n\n raise_rlimit_nofile(64000);\n\n\n\n // initialize webnis stuff\n", "file_path": "webnis-server/src/main.rs", "rank": 98, "score": 6146.430291651692 }, { "content": "use std::borrow::Cow;\n\nuse std::collections::HashMap;\n\n\n\nuse actix_web::http::header::{self, HeaderMap, HeaderValue};\n\nuse actix_web::http::StatusCode;\n\nuse actix_web::HttpResponse;\n\n\n\nuse base64;\n\nuse percent_encoding::{percent_decode, utf8_percent_encode, DEFAULT_ENCODE_SET};\n\nuse pwhash;\n\nuse serde_json;\n\n\n\nuse crate::config;\n\n\n\n//pub(crate) type BoxedError = Box<::std::error::Error>;\n\n//pub(crate) type BoxedFuture = Box<Future<Item=HttpResponse, Error=BoxedError>>;\n\n//\n\n//pub(crate) fn box_error(e: impl std::error::Error + Send + Sync + 'static) -> BoxedError {\n\n// Box::new(e)\n\n//}\n", "file_path": "webnis-server/src/util.rs", "rank": 99, "score": 6146.299929940045 } ]
Rust
src/api.rs
IslandUsurper/mailchimp-rs
cfa26fffbe925df6b0878ea55bf4284097cc699f
use crate::internal::api::Api; use crate::internal::error_type::MailchimpErrorType; use crate::internal::request::MailchimpRequest; use crate::types::Ping; use serde::de::DeserializeOwned; use serde::ser::Serialize; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug, Clone)] pub struct MailchimpApi { i_api: Box<Api<MailchimpRequest>>, } impl MailchimpApi { pub fn new<'a>(api_key: &'a str) -> Self { let mut creds = api_key.split('-').collect::<Vec<&str>>(); if creds.len() <= 1 { creds.push("usX"); } MailchimpApi { i_api: Box::new(Api::<MailchimpRequest>::new( creds[1], creds[0], Box::new(MailchimpRequest::new()), )), } } pub fn domain(&self) -> String { self.i_api.domain() } pub fn version(&self) -> String { self.i_api.api_version() } pub fn post<'a, T, P>(&self, endpoint: &'a str, payload: P) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, P: Serialize, { self.i_api.post_edge::<T, P>(endpoint, payload) } pub fn patch<'a, T, P>(&self, endpoint: &'a str, payload: P) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, P: Serialize, { self.i_api.patch_edge::<T, P>(endpoint, payload) } pub fn put<'a, T, P>(&self, endpoint: &'a str, payload: P) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, P: Serialize, { self.i_api.put_edge::<T, P>(endpoint, payload) } pub fn get<'a, T>( &self, endpoint: &'a str, payload: HashMap<String, String>, ) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, { self.i_api.get_edge(endpoint, payload) } pub fn delete<'a, T>( &self, endpoint: &'a str, payload: HashMap<String, String>, ) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, { self.i_api.delete_edge(endpoint, payload) } pub fn ping(&self) -> Result<Ping, MailchimpErrorType> { self.get::<Ping>("ping", HashMap::new()) } } impl Default for MailchimpApi { fn default() -> Self { MailchimpApi { i_api: Box::new(Api::<MailchimpRequest>::new( "", "", Box::new(MailchimpRequest::new()), )), } } } pub trait MailchimpApiUpdate { /** * Update API */ fn set_api(&mut self, api: Rc<MailchimpApi>); }
use crate::internal::api::Api; use crate::internal::error_type::MailchimpErrorType; use crate::internal::request::MailchimpRequest; use crate::types::Ping; use serde::de::DeserializeOwned; use serde::ser::Serialize; use std::collections::HashMap; use std::rc::Rc; #[derive(Debug, Clone)] pub struct MailchimpApi { i_api: Box<Api<MailchimpRequest>>, } impl MailchimpApi { pub fn new<'a>(api_key: &'a str) -> Self { let mut creds = api_key.split('-').collect::<Vec<&str>>(); if creds.len() <= 1 { creds.push("usX"); } MailchimpApi { i_api: Box::new(Api::<MailchimpRequest>::new( creds[1], creds[0], Box::new(MailchimpRequest::new()), )), } } pub fn domain(&self) -> String { self.i_api.domain() } pub fn version(&self) -> String { self.i_api.api_version() } pub fn post<'a, T, P>(&self, endpoint: &'a str, payload: P) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, P: Serialize, { self.i_api.post_edge::<T, P>(endpoint, payload) } pub fn patch<'a, T, P>(&self, endpoint: &'a str, payload: P) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, P: Serialize, { self.i_api.patch_edge::<T, P>(endpoint, payload) } pub fn put<'a, T, P>(&self, endpoint: &'a str, payload: P) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, P: Serialize, { self.i_api.put_edge::<T, P>(endpoint, payload) } pub fn get<'a, T>( &self, endpoint: &'a str, payload: HashMap<String, String>, ) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, { self.i_api.get_edge(endpoint, payload) } pub fn delete<'a, T>( &self, endpoint: &'a str, payload: HashMap<String, String>, ) -> Result<T, MailchimpErrorType> where T: DeserializeOwned, { self.i_api.delete_edge(endpoint, payload) } pub fn ping(&self) -> Result<Ping, MailchimpErrorType> { self.get::<Ping>("ping", HashMap::new(
MailchimpRequest>::new( "", "", Box::new(MailchimpRequest::new()), )), } } } pub trait MailchimpApiUpdate { /** * Update API */ fn set_api(&mut self, api: Rc<MailchimpApi>); }
)) } } impl Default for MailchimpApi { fn default() -> Self { MailchimpApi { i_api: Box::new(Api::<
random
[ { "content": "///\n\n/// ====================================================================\n\n///\n\n///\n\nfn connect_mqtt<'a>(host: &'a str, user_name: &'a str, password: &'a str) -> mqtt::Client {\n\n let endpoint = String::from(\"tcp://\") + host;\n\n let create_options = mqtt::CreateOptionsBuilder::new()\n\n .server_uri(endpoint)\n\n .persistence(mqtt::PersistenceType::None)\n\n .finalize();\n\n let cli = mqtt::Client::new(create_options).unwrap_or_else(|err| {\n\n println!(\"Error creating the client: {:?}\", err);\n\n process::exit(1);\n\n });\n\n\n\n let conn_opts = mqtt::ConnectOptionsBuilder::new()\n\n .keep_alive_interval(Duration::from_secs(20))\n\n .user_name(user_name)\n\n .password(password)\n\n .clean_session(true)\n\n .finalize();\n\n\n\n // Connect and wait for it to complete or fail\n\n match cli.connect(conn_opts) {\n\n Ok(_) => println!(\"Connect: Successs \\n\"),\n\n Err(e) => {\n\n println!(\"Unable to connect:\\n\\t{:?}\", e);\n\n process::exit(1);\n\n }\n\n };\n\n\n\n cli\n\n}\n\n\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 0, "score": 105872.37280602669 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n let mut mqtt_settings = (String::new(), String::new(), String::new());\n\n\n\n for v in env::vars() {\n\n if v.0 == \"MQTT_USER\" {\n\n mqtt_settings.0 = v.1.clone();\n\n }\n\n if v.0 == \"MQTT_PASSWORD\" {\n\n mqtt_settings.1 = v.1.clone();\n\n }\n\n if v.0 == \"MQTT_HOST\" {\n\n mqtt_settings.2 = v.1.clone();\n\n }\n\n }\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 1, "score": 84876.0846925867 }, { "content": "fn send_message<'a, T>(cli: &mqtt::Client, topic: &'a str, mas: &T)\n\nwhere\n\n T: serde::Serialize,\n\n{\n\n let payload = serde_json::to_string(mas);\n\n println!(\"Topic: {:?} payload {:?}\", topic, payload);\n\n // Create a message and publish it\n\n let msg = mqtt::MessageBuilder::new()\n\n .topic(topic)\n\n .payload(payload.unwrap().as_str())\n\n .qos(1)\n\n .finalize();\n\n\n\n if let Err(e) = cli.publish(msg) {\n\n println!(\"Error sending message: {:?}\", e);\n\n }\n\n}\n\n\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 2, "score": 84829.67994493178 }, { "content": "fn disconnect(cli: &mqtt::Client) {\n\n // Disconnect from the broker\n\n cli.disconnect(None).unwrap();\n\n}\n\n\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 3, "score": 70503.66969932332 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Instance of Lists\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let lists_c = lists.iter(ListFilter::default());\n\n let mut list_id = String::new();\n\n let mut count = 0;\n\n\n\n for w in lists_c {\n\n list_id = w.id.unwrap().clone();\n", "file_path": "examples/list.rs", "rank": 4, "score": 53332.36339991426 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Conversations instance\n\n let conv = Conversations::new(api);\n\n let mut conversation_id = \"\".to_string();\n\n\n\n for it in conv.get_conversations(None) {\n\n conversation_id = it.id.clone();\n\n println!(\"\\n\\nConversation: \");\n\n println!(\"\\tid {:?}\", it.id);\n\n println!(\"\\tMessage Count {:?}\", it.message_count);\n\n println!(\"\\tUnread Messages {:?}\", it.unread_messages);\n", "file_path": "examples/conversations.rs", "rank": 5, "score": 53332.36339991426 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Get all campaigns in an account.\n\n let mut campaign_id = String::new();\n\n let r_campaigns = Campaigns::new(api);\n\n\n\n let mut count = 0;\n\n for w in r_campaigns.iter(CampaignFilter::default()) {\n\n count += 1;\n\n campaign_id = w.id.as_ref().unwrap().to_string();\n\n println!(\"\\n Campaign {:}\", count);\n\n println!(\"\\t Campaign ID {:?}\", campaign_id);\n", "file_path": "examples/campaigns.rs", "rank": 6, "score": 53332.36339991426 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create instance of Automations\n\n let automations = Automations::new(api);\n\n let mut last_automation_id = String::from(\"\");\n\n\n\n // Iterate through the existing Automations\n\n for w in automations.iter(AutomationsFilter::default()) {\n\n let settings = w.settings.as_ref().unwrap();\n\n last_automation_id = w.id.as_ref().unwrap().to_string();\n\n println!(\"Automation\");\n\n println!(\"ID {:?}\", w.id);\n\n println!(\"Title {:?}\", settings.title);\n", "file_path": "examples/automations.rs", "rank": 7, "score": 53332.36339991426 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Instance of Reports\n\n let reports = Reports::new(api);\n\n\n\n // Get information about all reports.\n\n let reports_iter = reports.iter_reports(None);\n\n\n\n for report in reports_iter {\n\n print_report_type(&report);\n\n println!(\"=============================================\");\n\n }\n\n}\n\n\n", "file_path": "examples/reports.rs", "rank": 8, "score": 53332.36339991426 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Campaigns Instance\n\n let r_campaigns = Campaigns::new(api);\n\n let mut count = 0;\n\n // Get all campaigns in an account.\n\n for w in r_campaigns.iter(CampaignFilter::default()) {\n\n count += 1;\n\n println!(\"\\n Campaign {:}\", count);\n\n println!(\n\n \"\\t Campaign Title {:?}\",\n\n w.settings.as_ref().unwrap().title\n", "file_path": "examples/campaign_feedback.rs", "rank": 9, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n for l in lists.iter(ListFilter::default()) {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in l.get_segments(None) {\n", "file_path": "examples/list_segments.rs", "rank": 10, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n // Create Automation Instance\n\n let automations = Automations::new(api);\n\n\n\n // Example of how to create new automations\n\n let automat_resp = automations.create_automation(\n\n RecipientType::create(\"<list_id>\", \"<scope_id>\"),\n\n AutomationTriggerType::create(\"<workflow_type>\"),\n\n Some(AutomationCampaignSettingsType::create(\n\n \"<from_name>\",\n\n \"<reply_to>\",\n\n )),\n\n );\n\n\n\n match automat_resp {\n\n Ok(value) => println!(\"{:?}\", value),\n\n Err(e) => println!(\"{:?}\", e),\n\n }\n\n}\n", "file_path": "examples/automation_create.rs", "rank": 11, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n for l in lists.iter(ListFilter::default()) {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in l.get_webhooks(None) {\n\n println!(\"\\nWebhooks:\");\n\n println!(\"\\t URL {:?}\", c.url);\n\n println!(\"\\t Events {:?}\", c.events);\n\n println!(\"\\t Sources {:?}\", c.sources);\n\n println!(\"\\t List ID {:?}\", c.list_id);\n\n }\n\n }\n\n}\n", "file_path": "examples/list_webwooks.rs", "rank": 12, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to obtain the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_API_KEY\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n\n\n // Init API Instance\n\n let api = MailchimpApi::new(&apk);\n\n // Create a instance of API Root, to get the info about the current account\n\n let api_root = ApiRoot::new(api);\n\n\n\n // Get the info about the current account\n\n let info = api_root.get_info(HashMap::new());\n\n\n\n match info {\n\n Ok(account) => {\n\n println!(\"Account Info\");\n\n println!(\"Account Name {:?}\", account.account_name);\n\n println!(\"Email {:?}\", account.email);\n", "file_path": "examples/api_root.rs", "rank": 13, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_API_KEY\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API Instance\n\n let api = MailchimpApi::new(&apk);\n\n // Ping\n\n let ping_rs = api.ping();\n\n match ping_rs {\n\n Ok(value) => {\n\n println!(\"Ping ... {:?}\", value);\n\n }\n\n Err(e) => println!(\"Error Title: {:?} \\ndetail {:?}\", e.title, e.detail),\n\n }\n\n}\n", "file_path": "examples/api_request.rs", "rank": 14, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Conversations instance\n\n let conv = LandingPages::new(api);\n\n\n\n for it in conv.get_pages(None) {\n\n println!(\"\\n\\nPage: \");\n\n println!(\"\\tid {:?}\", it.id);\n\n println!(\"\\tname {:?}\", it.name);\n\n println!(\"\\ttitle {:?}\", it.title);\n\n println!(\"\\tdescription {:?}\", it.description);\n\n println!(\"\\ttemplate_id {:?}\", it.template_id);\n", "file_path": "examples/landing_pages.rs", "rank": 15, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create the Instance Automations\n\n let automations = Automations::new(api);\n\n\n\n // Iterate through the existing Automations\n\n for workflow in automations.iter(AutomationsFilter::default()) {\n\n println!(\n\n \"\\nAutomation Workflow ID: {:?} \\n Title: {:?}\",\n\n workflow.id,\n\n workflow.settings.as_ref().unwrap().title\n\n );\n\n // Example that how to obtail susbscribers removed\n\n for usr in workflow.get_subscribers_removed() {\n\n println!(\"Susbscriber Removed \");\n\n println!(\"ID: {:?}\", &usr.id);\n\n println!(\"Email: {:?}\", &usr.email_address);\n\n }\n\n }\n\n}\n", "file_path": "examples/automation_unsuscribers.rs", "rank": 16, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let mut lists_c = lists.iter(ListFilter::default());\n\n let lists_it = lists_c.next();\n\n\n\n if let Some(l) = lists_it {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n", "file_path": "examples/list_members.rs", "rank": 17, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n // Create instance of AuthorizedApps\n\n let authorized_apps = AuthorizedApps::new(api);\n\n // Iterate through the existing Authorized Apps\n\n let apps = authorized_apps.iter(AuthorizedFilter::default());\n\n let mut count = 0;\n\n\n\n for app in apps {\n\n count += 1;\n\n println!(\"\\nApp {:?}\", count);\n\n println!(\"ID {:?}\", app.id);\n\n println!(\"Name {:?}\", app.name);\n\n println!(\"Descriptions {:?}\", app.description);\n\n println!(\"Users {:?}\", app.users);\n\n println!(\"===========================\");\n\n }\n\n}\n", "file_path": "examples/authorized_apps.rs", "rank": 18, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n let lists = Lists::new(api);\n\n // Get information about all lists in the account.\n\n let lists_c = lists.iter(ListFilter::default());\n\n let mut count = 0;\n\n\n\n for w in lists_c {\n\n count += 1;\n\n println!(\"\\n\\nList {:?}\", count);\n\n println!(\"\\tid {:?}\", w.id.as_ref().unwrap().to_string());\n\n println!(\"\\tName {:?}\", w.name.as_ref().unwrap());\n\n println!(\"\\tStats {:?}\", w.stats.as_ref().unwrap());\n", "file_path": "examples/list_activity.rs", "rank": 19, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let lists_c = lists.iter(ListFilter::default());\n\n let mut count = 0;\n\n\n\n for w in lists_c {\n\n count += 1;\n\n println!(\"\\n\\nList {:?}\", count);\n", "file_path": "examples/list_locations.rs", "rank": 20, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Instance\n\n let lists = Lists::new(api);\n\n // Get information about all lists in the account.\n\n let lists_c = lists.iter(ListFilter::default());\n\n let mut count = 0;\n\n\n\n for w in lists_c {\n\n count += 1;\n\n println!(\"\\n\\nList {:?}\", count);\n\n println!(\"\\tName {:?}\", w.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in w.get_clients(None, None) {\n\n println!(\"\\nClients\");\n\n println!(\"Client {:?}\", c.client);\n\n println!(\"Emails Sent {:?}\", c.members);\n\n }\n\n }\n\n}\n", "file_path": "examples/list_clients.rs", "rank": 21, "score": 51551.765899081576 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let mut lists_c = lists.iter(ListFilter::default());\n\n let lists_it = lists_c.next();\n\n\n\n if let Some(l) = lists_it {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n", "file_path": "examples/list_member_notes.rs", "rank": 22, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n for l in lists.iter(ListFilter::default()) {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in l.get_signup_forms(None) {\n\n println!(\"\\nInterest Category:\");\n\n println!(\"\\tHeader {:?}\", c.header);\n\n println!(\"\\tContent {:?}\", c.contents);\n\n println!(\"\\tStyle {:?}\", c.styles);\n\n println!(\"\\tSignup Form Url {:?}\", c.signup_form_url);\n\n println!(\"\\tList ID {:?}\", c.list_id);\n\n }\n\n }\n\n}\n", "file_path": "examples/list_signup_forms.rs", "rank": 23, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists Instance\n\n let lists = Lists::new(api);\n\n // Get information about all lists in the account.\n\n let lists_c = lists.iter(ListFilter::default());\n\n let mut count = 0;\n\n\n\n for w in lists_c {\n\n count += 1;\n\n println!(\"\\n\\nList {:?}\", count);\n\n println!(\"\\tName {:?}\", w.name.as_ref().unwrap());\n", "file_path": "examples/list_abuse_reports.rs", "rank": 24, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists Instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let lists_c = lists.iter(ListFilter::default());\n\n let mut count = 0;\n\n\n\n for w in lists_c {\n\n count += 1;\n\n println!(\"\\n\\nList {:?}\", count);\n", "file_path": "examples/list_growth_history.rs", "rank": 25, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n for l in lists.iter(ListFilter::default()) {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in l.get_interest_categories(None) {\n\n println!(\"\\nInterest Category:\");\n\n println!(\"\\tTitle {:?}\", c.title);\n\n println!(\"\\tType {:?}\", c.ic_type);\n\n println!(\"\\tDisplay Order {:?}\", c.display_order);\n\n }\n\n }\n\n}\n", "file_path": "examples/list_interest_categories.rs", "rank": 26, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let mut lists_c = lists.iter(ListFilter::default());\n\n let lists_it = lists_c.next();\n\n\n\n if let Some(l) = lists_it {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n", "file_path": "examples/list_member_goals.rs", "rank": 27, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let mut lists_c = lists.iter(ListFilter::default());\n\n let lists_it = lists_c.next();\n\n\n\n if let Some(l) = lists_it {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n", "file_path": "examples/list_member_activity.rs", "rank": 28, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n let mut lists_c = lists.iter(ListFilter::default());\n\n let lists_it = lists_c.next();\n\n\n\n if let Some(l) = lists_it {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n", "file_path": "examples/list_member_tags.rs", "rank": 29, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Automations Instance\n\n let automations = Automations::new(api);\n\n\n\n // Iterate through the existing Automations Workflows\n\n for workflow in automations.iter(AutomationsFilter::default()) {\n\n println!(\n\n \"\\nAutomation Workflow ID: {:?} \\n Title: {:?}\",\n\n workflow.id,\n\n workflow.settings.as_ref().unwrap().title\n\n );\n\n\n", "file_path": "examples/automation_email_queue.rs", "rank": 30, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n for l in lists.iter(ListFilter::default()) {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in l.get_interest_categories(None) {\n", "file_path": "examples/list_ic_interest.rs", "rank": 31, "score": 49929.35240765257 }, { "content": "fn main() {\n\n // Init dotenv\n\n dotenv().ok();\n\n // Filter the env vars to get the Mailchimp Credential\n\n // mailchimp\n\n let mut env_mailchimp = env::vars().filter(|e| e.0.to_string().contains(\"MAILCHIMP_\"));\n\n let apk = env_mailchimp.next().unwrap().1;\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n\n\n // Create Lists instance\n\n let lists = Lists::new(api);\n\n\n\n // Get information about all lists in the account.\n\n for l in lists.iter(ListFilter::default()) {\n\n println!(\"\\n\\nList\");\n\n println!(\"\\tName {:?}\", l.name.as_ref().unwrap());\n\n println!(\"=============================================\");\n\n\n\n for c in l.get_merge_fields(None) {\n", "file_path": "examples/list_merge_fields.rs", "rank": 32, "score": 49929.35240765257 }, { "content": "///\n\n/// Resource Filter\n\n///\n\npub trait ResourceFilter {\n\n ///\n\n /// Build request payload for this resource\n\n ///\n\n fn build_payload(&self) -> HashMap<String, String>;\n\n}\n\n\n\n///\n\n/// Struct Simple Filter\n\n///\n\n/// It filter have the the common fields for request filter\n\n///\n\n#[derive(Debug, Clone)]\n\npub struct SimpleFilter {\n\n /// A comma-separated list of fields to return. Reference\n\n /// parameters of sub-objects with dot notation.\n\n pub fields: Option<String>,\n\n /// A comma-separated list of fields to exclude. Reference\n\n /// parameters of sub-objects with dot notation.\n\n pub exclude_fields: Option<String>,\n", "file_path": "src/iter.rs", "rank": 33, "score": 49053.51471816271 }, { "content": "///\n\n/// Build Iter\n\n///\n\npub trait BuildIter {\n\n /// Define the type of the Filter\n\n type FilterItem;\n\n /// Define the type of Item inner of collection\n\n type Item;\n\n /// Define the type collection\n\n type Collection;\n\n\n\n ///\n\n /// Create a resource from the data passed by parameter\n\n ///\n\n fn update_item(&self, data: &Self::Item, api: Rc<MailchimpApi>) -> Self::Item;\n\n ///\n\n /// Update Filter Offset\n\n ///\n\n fn update_filter_offset(&self, filter: &Self::FilterItem) -> Self::FilterItem;\n\n}\n\n\n\n///\n\n/// Malchimp Iterator\n", "file_path": "src/iter.rs", "rank": 34, "score": 49053.51471816271 }, { "content": "#[derive(Serialize, Deserialize, Debug, Clone)]\n\nstruct UpdateParamsForWorkflowEmail {\n\n /// Settings for the campaign including the email subject, from name, and from email address.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub settings: Option<AutomationCampaignSettingsType>,\n\n /// The delay settings for an Automation email.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub delay: Option<AutomationDelayType>,\n\n}\n\n\n\nimpl AutomationWorkflowType {\n\n // ============== Actions ==============\n\n ///\n\n /// Detiene todos los emails para esta automatización\n\n ///\n\n /// En caso de ser satisfactoria la ejecución, devuelve None,\n\n /// en caso contrario devuelve el error, con su respectiva descripción\n\n ///\n\n pub fn pause_all_emails(&self) -> Option<MailchimpErrorType> {\n\n let mut b_endpoint = self.get_base_endpoint();\n\n b_endpoint.push_str(\"/actions/pause-all-emails\");\n", "file_path": "src/types/automation_campaign.rs", "rank": 35, "score": 48644.13225228025 }, { "content": "///\n\n/// Definición que deben cumplir para poder extaer datos mediante HTTP\n\n///\n\npub trait HttpReq {\n\n ///\n\n /// Función para leer los recursos desde el servidor\n\n ///\n\n /// Argumentos:\n\n /// url: Url\n\n /// headers: HeaderMap\n\n ///\n\n fn get(\n\n &self,\n\n url: Url,\n\n headers: HeaderMap,\n\n basic_auth: &Option<BasicAuth>,\n\n ) -> MailchimpResult<String>;\n\n ///\n\n /// Función para crear algún recurso en el servidor\n\n ///\n\n /// Argumentos:\n\n /// url: Url\n\n /// headers: Headers\n", "file_path": "src/internal/request.rs", "rank": 36, "score": 47567.2412805979 }, { "content": "///\n\n/// Mailchimp Collection\n\n///\n\npub trait MailchimpCollection<T> {\n\n /// Total Items\n\n fn get_total_items(&self) -> u64;\n\n\n\n /// Data\n\n fn get_values(&self) -> Vec<T>;\n\n}\n\n\n", "file_path": "src/iter.rs", "rank": 38, "score": 45334.89012288788 }, { "content": "fn print_report_type(report: &ReportType) {\n\n println!(\"\\n\\nReports\");\n\n println!(\"\\tid {:?}\", report.id);\n\n println!(\"\\tcampaign_title {:?}\", report.campaign_title);\n\n println!(\"\\treport_type {:?}\", report.report_type);\n\n println!(\"\\tlist_id {:?}\", report.list_id);\n\n println!(\"\\tlist_is_active {:?}\", report.list_is_active);\n\n println!(\"\\tlist_is_active {:?}\", report.list_is_active);\n\n println!(\"\\tlist_name {:?}\", report.list_name);\n\n println!(\"\\tsubject_line {:?}\", report.subject_line);\n\n println!(\"\\tpreview_text {:?}\", report.preview_text);\n\n println!(\"\\temails_sent {:?}\", report.emails_sent);\n\n println!(\"\\tabuse_reports {:?}\", report.abuse_reports);\n\n println!(\"\\tunsubscribed {:?}\", report.unsubscribed);\n\n println!(\"\\tsend_time {:?}\", report.send_time);\n\n println!(\"\\trss_last_send {:?}\", report.rss_last_send);\n\n println!(\"\\tbounces {:?}\", report.bounces);\n\n println!(\"\\tforwards {:?}\", report.forwards);\n\n println!(\"\\topens {:?}\", report.opens);\n\n println!(\"\\tclicks {:?}\", report.clicks);\n", "file_path": "examples/reports.rs", "rank": 39, "score": 41069.460769126905 }, { "content": " /// Stats for the list. Many of these are cached for at least five minutes.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub stats: Option<StatisticsType>,\n\n}\n\n\n\nimpl MailchimpListStats {\n\n pub fn create_stats<'a>(data: &ListType, account_name: &'a str) -> Self {\n\n MailchimpListStats {\n\n measurement_name: \"mailchimp_lists\".to_string(),\n\n client_name: account_name.to_string(),\n\n name: data.name.clone(),\n\n stats: data.stats.clone(),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// ====================================================================\n\n///\n\n///\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 40, "score": 36594.45332543232 }, { "content": " /// The total orders for a campaign.\n\n #[serde(default)]\n\n pub report_summary_ecommerce_total_orders: u64,\n\n /// The total spent for a campaign. Calculated as the sum of\n\n /// all order totals with no deductions.\n\n #[serde(default)]\n\n pub report_summary_ecommerce_total_spent: f32,\n\n /// The total revenue for a campaign. Calculated as the sum of\n\n /// all order totals minus shipping and tax totals.\n\n #[serde(default)]\n\n pub report_summary_ecommerce_total_revenue: f32,\n\n}\n\n\n\nimpl MailchimpCampaignStats {\n\n pub fn create_stats<'a>(data: &CampaignType, account_name: &'a str) -> Self {\n\n let mut settings = (Some(String::new()), None, Some(String::new()));\n\n if let Some(rc) = data.recipients.as_ref() {\n\n settings.0 = rc.list_name.clone();\n\n settings.1 = rc.recipient_count;\n\n }\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 41, "score": 36590.8169420954 }, { "content": " pub report_summary_subscriber_clicks: Option<u64>,\n\n /// The number of unique clicks, divided by the total number of successful deliveries.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub report_summary_click_rate: Option<f32>,\n\n}\n\n\n\nimpl MailchimpAutomationStats {\n\n pub fn create_stats<'a>(data: &AutomationWorkflowType, account_name: &'a str) -> Self {\n\n let r_list_name = if let Some(r) = &data.recipients {\n\n r.list_name.clone()\n\n } else {\n\n None\n\n };\n\n\n\n let s_title = if let Some(r) = &data.settings {\n\n r.title.clone()\n\n } else {\n\n None\n\n };\n\n\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 42, "score": 36590.69071686691 }, { "content": "}\n\n\n\n///\n\n/// ====================================================================\n\n///\n\n/// Lists\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct MailchimpListStats {\n\n // Name of measurement\n\n #[serde(default)]\n\n pub measurement_name: String,\n\n // Mailchimp client or account name\n\n #[serde(default)]\n\n pub client_name: String,\n\n\n\n // ============ STATS =============\n\n /// The name of the list.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub name: Option<String>,\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 43, "score": 36587.53847556697 }, { "content": "/// ====================================================================\n\n///\n\n/// Automatizaciones\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct MailchimpAutomationStats {\n\n // Name of measurement\n\n #[serde(default)]\n\n pub measurement_name: String,\n\n // Mailchimp client or account name\n\n #[serde(default)]\n\n pub client_name: String,\n\n\n\n // ============ STATS =============\n\n ///The current status of the Automation.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub status: Option<String>,\n\n /// The total number of emails sent for the Automation.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub emails_sent: Option<u64>,\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 44, "score": 36587.09623571638 }, { "content": " pub client_name: String,\n\n\n\n // ============ STATS =============\n\n /// There are four types of campaigns you can create in Mailchimp. A/B Split\n\n /// campaigns have been deprecated and variate campaigns should be used instead.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\", rename = \"type\")]\n\n pub campaign_type: Option<String>,\n\n /// The current status of the campaign.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub status: Option<String>,\n\n /// The total number of emails sent for this campaign.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub emails_sent: Option<u64>,\n\n /// List Name.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub recipients_list_name: Option<String>,\n\n /// Count of the recipients on the associated list. Formatted as an integer..\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub recipients_recipient_count: Option<u64>,\n\n /// The title of the Automation.\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 45, "score": 36586.657504452116 }, { "content": " pub industry_stats_bounce_rate: f32,\n\n /// The average unique click rate for all campaigns in the account’s specified industry.\n\n #[serde(default)]\n\n pub industry_stats_click_rate: f32,\n\n}\n\n\n\nimpl MailchimpAccountStats {\n\n pub fn create_stats(ar: &ApiRootType) -> Self {\n\n MailchimpAccountStats {\n\n measurement_name: \"mailchimp_account\".to_string(),\n\n client_name: ar.account_name.clone(),\n\n total_subscribers: ar.total_subscribers,\n\n industry_stats_open_rate: ar.industry_stats.open_rate,\n\n industry_stats_bounce_rate: ar.industry_stats.bounce_rate,\n\n industry_stats_click_rate: ar.industry_stats.click_rate,\n\n }\n\n }\n\n}\n\n\n\n///\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 46, "score": 36584.65892351163 }, { "content": "/// ====================================================================\n\n/// Mailchimp user account Stats\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct MailchimpAccountStats {\n\n // Name of measurement\n\n #[serde(default)]\n\n pub measurement_name: String,\n\n // Mailchimp client or account name\n\n #[serde(default)]\n\n pub client_name: String,\n\n // ============ STATS =============\n\n /// The total number of subscribers across all lists in the account.\n\n #[serde(default)]\n\n pub total_subscribers: u64,\n\n /// The average unique open rate for all campaigns in the account’s specified industry.\n\n #[serde(default)]\n\n pub industry_stats_open_rate: f32,\n\n /// The average bounce rate for all campaigns in the account’s specified industry.\n\n #[serde(default)]\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 47, "score": 36584.27707863479 }, { "content": " report_summary_open_rate: Some(rs_v.2),\n\n report_summary_clicks: Some(rs_v.3),\n\n report_summary_subscriber_clicks: Some(rs_v.4),\n\n report_summary_click_rate: Some(rs_v.5),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// ====================================================================\n\n///\n\n/// Campaigns\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct MailchimpCampaignStats {\n\n // Name of measurement\n\n #[serde(default)]\n\n pub measurement_name: String,\n\n // Mailchimp client or account name\n\n #[serde(default)]\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 48, "score": 36584.20242304282 }, { "content": "\n\n println!(\"MQTT Settings: {:?}\", mqtt_settings);\n\n\n\n let mqtt_client = connect_mqtt(\n\n mqtt_settings.2.as_str(),\n\n mqtt_settings.0.as_str(),\n\n mqtt_settings.1.as_str(),\n\n );\n\n\n\n // Init API\n\n let api = MailchimpApi::new(&apk);\n\n // ========== Mailchimp Account Stats ========\n\n let api_root = ApiRoot::new(api.clone());\n\n\n\n // Ejemplo de como obtener todas la automatizaciones\n\n let account_info = api_root.get_info(HashMap::new());\n\n let mut account_name = String::new();\n\n\n\n match account_info {\n\n Ok(account) => {\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 49, "score": 36583.916098548 }, { "content": " // Mailchimp client or account name\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub recipients_list_name: Option<String>,\n\n /// The title of the Automation.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub title: Option<String>,\n\n /// The total number of opens for a campaign.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub report_summary_opens: Option<u64>,\n\n /// The number of unique opens.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub report_summary_unique_opens: Option<u64>,\n\n /// The number of unique opens divided by the total number of successful deliveries.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub report_summary_open_rate: Option<f32>,\n\n /// The total number of clicks for an campaign.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub report_summary_clicks: Option<u64>,\n\n /// The number of unique clicks.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 50, "score": 36583.340283138044 }, { "content": " let mut rs_v = (0 as u64, 0 as u64, 0.0, 0 as u64, 0 as u64, 0.0);\n\n\n\n if let Some(rp) = &data.report_summary {\n\n rs_v.0 = rp.opens;\n\n rs_v.1 = rp.unique_opens;\n\n rs_v.2 = rp.open_rate;\n\n rs_v.3 = rp.clicks;\n\n rs_v.4 = rp.subscriber_clicks;\n\n rs_v.5 = rp.click_rate;\n\n }\n\n\n\n MailchimpAutomationStats {\n\n measurement_name: \"mailchimp_automation\".to_string(),\n\n client_name: account_name.to_string(),\n\n status: data.status.clone(),\n\n emails_sent: data.emails_sent.clone(),\n\n recipients_list_name: r_list_name,\n\n title: s_title,\n\n report_summary_opens: Some(rs_v.0),\n\n report_summary_unique_opens: Some(rs_v.1),\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 51, "score": 36580.8317680975 }, { "content": " #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub title: Option<String>,\n\n /// The total number of opens for a campaign.\n\n #[serde(default)]\n\n pub report_summary_opens: u64,\n\n /// The number of unique opens.\n\n #[serde(default)]\n\n pub report_summary_unique_opens: u64,\n\n /// The number of unique opens divided by the total number of successful deliveries.\n\n #[serde(default)]\n\n pub report_summary_open_rate: f32,\n\n /// The total number of clicks for an campaign.\n\n #[serde(default)]\n\n pub report_summary_clicks: u64,\n\n /// The number of unique clicks.\n\n #[serde(default)]\n\n pub report_summary_subscriber_clicks: u64,\n\n /// The number of unique clicks, divided by the total number of successful deliveries.\n\n #[serde(default)]\n\n pub report_summary_click_rate: f32,\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 52, "score": 36580.393788222325 }, { "content": " MailchimpCampaignStats {\n\n measurement_name: \"mailchimp_campaigns\".to_string(),\n\n client_name: account_name.to_string(),\n\n campaign_type: data.campaign_type.clone(),\n\n status: data.status.clone(),\n\n emails_sent: data.emails_sent,\n\n recipients_list_name: settings.0,\n\n recipients_recipient_count: settings.1,\n\n title: settings.2,\n\n report_summary_opens: report_summary.0,\n\n report_summary_unique_opens: report_summary.1,\n\n report_summary_open_rate: report_summary.2,\n\n report_summary_clicks: report_summary.3,\n\n report_summary_subscriber_clicks: report_summary.4,\n\n report_summary_click_rate: report_summary.5,\n\n report_summary_ecommerce_total_orders: report_summary.6,\n\n report_summary_ecommerce_total_spent: report_summary.7,\n\n report_summary_ecommerce_total_revenue: report_summary.8,\n\n }\n\n }\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 53, "score": 36577.52532252185 }, { "content": " if let Some(ss) = data.settings.as_ref() {\n\n settings.2 = ss.title.clone();\n\n }\n\n\n\n let mut report_summary = (0, 0, 0.0, 0, 0, 0.0, 0, 0.0, 0.0);\n\n if let Some(rs) = data.report_summary.as_ref() {\n\n report_summary.0 = rs.opens;\n\n report_summary.1 = rs.unique_opens;\n\n report_summary.2 = rs.open_rate;\n\n report_summary.3 = rs.clicks;\n\n report_summary.4 = rs.subscriber_clicks;\n\n report_summary.5 = rs.click_rate;\n\n\n\n if let Some(e) = &rs.ecommerce {\n\n report_summary.6 = e.total_orders;\n\n report_summary.7 = e.total_spent;\n\n report_summary.8 = e.total_revenue;\n\n }\n\n }\n\n\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 54, "score": 36576.81501254635 }, { "content": "extern crate paho_mqtt as mqtt;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\nuse dotenv::dotenv;\n\nuse std::env;\n\nuse std::process;\n\n\n\nuse mailchimp::types::{\n\n ApiRootType, AutomationWorkflowType, CampaignType, ListType, StatisticsType,\n\n};\n\nuse mailchimp::{\n\n ApiRoot, Automations, AutomationsFilter, CampaignFilter, Campaigns, ListFilter, Lists,\n\n MailchimpApi,\n\n};\n\n\n\nuse std::collections::HashMap;\n\nuse std::time::Duration;\n\n\n\n///\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 55, "score": 36576.728576603644 }, { "content": " let stats = MailchimpAccountStats::create_stats(&account);\n\n account_name = account.account_name.clone();\n\n send_message(&mqtt_client, \"mailchimp/stats/account\", &stats);\n\n }\n\n Err(e) => println!(\"Error: {:?}\", e),\n\n }\n\n\n\n // ========== Mailchimp Automations ========\n\n let automation = Automations::new(api.clone());\n\n\n\n for aut in automation.iter(AutomationsFilter::default()) {\n\n let stats = MailchimpAutomationStats::create_stats(&aut, &account_name);\n\n send_message(&mqtt_client, \"mailchimp/stats/automations\", &stats);\n\n }\n\n // ========== Mailchimp Campaigns ========\n\n let campaigns = Campaigns::new(api.clone());\n\n\n\n for data in campaigns.iter(CampaignFilter::default()) {\n\n let stats = MailchimpCampaignStats::create_stats(&data, &account_name);\n\n send_message(&mqtt_client, \"mailchimp/stats/campaigns\", &stats);\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 56, "score": 36575.042514986984 }, { "content": "///\n\n/// Example used to share statistics from a specific Mailchimp account through MQTT and be monitored by Grafana\n\n///\n\n/// Requirements:\n\n/// You need to have some MQTT Broker active, to send the information through it\n\n///\n\n/// Create archive named ``.env`` in the root of the directory with the following info and run the example\n\n/// MAILCHIMP_API_KEY=<API KEY>\n\n/// MQTT_USER=<MQTT_USER>\n\n/// MQTT_PASSWORD=<MQTT_PASSWORD>\n\n/// MQTT_HOST=<MQTT_HOST>\n\n///\n\n/// Dependencies\n\n///\n\n/// # This library is meant to be used on development or testing environments\n\n/// # in which setting environment variables is not practical.\n\n/// dotenv = \"^0.13\"\n\n/// # Eclipse Paho MQTT Rust Client Library\n\n/// paho-mqtt = {version=\"^0.5\", default-features=false}\n\n///\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 57, "score": 36575.01639421398 }, { "content": " }\n\n\n\n // ========== Mailchimp Lists ========\n\n let lists = Lists::new(api.clone());\n\n\n\n for data in lists.iter(ListFilter::default()) {\n\n let stats = MailchimpListStats::create_stats(&data, &account_name);\n\n send_message(&mqtt_client, \"mailchimp/stats/lists\", &stats);\n\n }\n\n\n\n disconnect(&mqtt_client);\n\n}\n", "file_path": "examples/pub_mqtt_stats.rs", "rank": 58, "score": 36574.450789725 }, { "content": " ///\n\n /// Argument:\n\n /// note: The content of the note. Note length is limited to 1,000 characters.\n\n ///\n\n pub fn create_note<'a>(&self, note: &'a str) -> MailchimpResult<ListMemberNote> {\n\n // POST /lists/{list_id}/members/{subscriber_hash}/notes\n\n let mut endpoint = self.get_base_endpoint();\n\n endpoint.push_str(\"/notes\");\n\n let mut payload = HashMap::new();\n\n payload.insert(\"note\".to_string(), note.to_string());\n\n self._api\n\n .post::<ListMemberNote, HashMap<String, String>>(&endpoint, payload)\n\n }\n\n}\n\n\n\n///\n\n/// Get information about members in a specific Mailchimp list.\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct CollectionListMembers {\n", "file_path": "src/types/list_members.rs", "rank": 59, "score": 27.36750878835596 }, { "content": " &self,\n\n email_address: &'a str,\n\n ) -> MailchimpResult<AutomationEmailQueueType> {\n\n // POST /automations/{workflow_id}/emails/{workflow_email_id}/queue\n\n let mut queue_endpoint = self._endpoint.clone();\n\n queue_endpoint.push_str(\"/queue\");\n\n let mut payload = HashMap::new();\n\n payload.insert(\"email_address\".to_string(), email_address.to_string());\n\n self._api\n\n .post::<AutomationEmailQueueType, HashMap<String, String>>(&queue_endpoint, payload)\n\n }\n\n\n\n ///\n\n /// Obtiene el Endpoint de este recurso\n\n ///\n\n ///\n\n pub fn get_endpoint(&self) -> &String {\n\n &self._endpoint\n\n }\n\n\n", "file_path": "src/types/workflow_email.rs", "rank": 60, "score": 27.360275640878633 }, { "content": " pub fn set_api(&mut self, api: Rc<MailchimpApi>) {\n\n self._api = api;\n\n }\n\n ///\n\n /// Set Endpoint\n\n ///\n\n pub fn set_endpoint<'a>(&mut self, endpoint: &'a str) {\n\n self._endpoint = endpoint.to_string();\n\n }\n\n ///\n\n /// Get Base Endpoint\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n let mut endpoint = self._endpoint.clone();\n\n endpoint.push_str(format!(\"/{}\", self.id).as_str());\n\n endpoint\n\n }\n\n}\n", "file_path": "src/types/landing_pages.rs", "rank": 61, "score": 26.411082697345226 }, { "content": " ///\n\n pub fn set_endpoint<'a>(&mut self, n_endpoint: &'a str) {\n\n self._endpoint = n_endpoint.to_string();\n\n }\n\n\n\n ///\n\n /// Get resource endpoint\n\n ///\n\n ///\n\n pub fn get_base_endpoint(&self) -> String {\n\n let mut endpoint = self._endpoint.clone() + \"/\";\n\n endpoint.push_str(&self.id);\n\n endpoint\n\n }\n\n\n\n fn build_list_endpoint(&self) -> String {\n\n format!(\"/list/{}/members/{}\", self.list_id, self.id)\n\n }\n\n\n\n ///\n", "file_path": "src/types/list_members.rs", "rank": 62, "score": 26.32969723361696 }, { "content": " #[serde(default)]\n\n pub archive_type: String,\n\n}\n\n\n\nimpl Default for UploadArchive {\n\n fn default() -> Self {\n\n UploadArchive {\n\n archive_content: \"\".to_string(),\n\n archive_type: \"\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Variate Content\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct VariateContent {\n\n /// Label used to identify the content option.\n\n #[serde(default)]\n", "file_path": "src/types/campaign_content.rs", "rank": 63, "score": 25.24013891743674 }, { "content": "\n\n ///\n\n /// Add a member to a static segment.\n\n ///\n\n /// Arguments:\n\n /// email_address: Email address for a subscriber.\n\n ///\n\n pub fn add_member<'a>(&self, email_address: &'a str) -> MailchimpResult<ListMember> {\n\n // POST /lists/{list_id}/segments/{segment_id}/members\n\n let mut endpoint = self.get_base_endpoint();\n\n endpoint.push_str(\"/members\");\n\n let mut payload = HashMap::new();\n\n payload.insert(\"email_address\".to_string(), email_address.to_string());\n\n\n\n match self\n\n ._api\n\n .post::<ListMember, HashMap<String, String>>(&endpoint, payload)\n\n {\n\n Ok(data) => {\n\n let mut n_data = data.clone();\n", "file_path": "src/types/list_segments.rs", "rank": 64, "score": 24.916667525240513 }, { "content": " ) -> MailchimpResult<ConversationMessage> {\n\n let mut endpoint = self.get_base_endpoint();\n\n endpoint.push_str(\"/messages/\");\n\n endpoint.push_str(message_id);\n\n\n\n let mut payload = HashMap::new();\n\n payload.insert(\"message_id\".to_string(), message_id.to_string());\n\n self._api.get::<ConversationMessage>(&endpoint, payload)\n\n }\n\n ///\n\n /// Update API\n\n ///\n\n pub fn set_api(&mut self, n_api: Rc<MailchimpApi>) {\n\n self._api = n_api;\n\n }\n\n\n\n fn get_base_endpoint(&self) -> String {\n\n let mut endpoint = \"conversations/\".to_string();\n\n endpoint.push_str(&self.id);\n\n endpoint\n", "file_path": "src/types/conversations.rs", "rank": 65, "score": 24.860758985016567 }, { "content": " /// email_address: The list member’s email address.\n\n ///\n\n pub fn add_subscriber_to_workflow<'a>(\n\n &self,\n\n email_address: &'a str,\n\n ) -> MailchimpResult<AutomationSubscriberType> {\n\n // POST /automations/{workflow_id}/removed-subscribers\n\n let mut queue_endpoint = self.get_base_endpoint() + \"/removed-subscribers\";\n\n queue_endpoint.push_str(\"/queue\");\n\n let mut payload = HashMap::new();\n\n payload.insert(\"email_address\".to_string(), email_address.to_string());\n\n self._api\n\n .post::<AutomationSubscriberType, HashMap<String, String>>(&queue_endpoint, payload)\n\n }\n\n\n\n // ============== Private Functions ==============\n\n fn get_base_endpoint(&self) -> String {\n\n // /automations/{workflow_id}\n\n let mut b_endpoint = String::from(\"automations/\");\n\n b_endpoint.push_str(self.id.as_ref().unwrap());\n", "file_path": "src/types/automation_campaign.rs", "rank": 66, "score": 24.850368741373554 }, { "content": " }\n\n }\n\n }\n\n ///\n\n ///\n\n /// Argumentos\n\n /// endpoint: Endpoint hacia donde se van a enviar los datos\n\n /// payload: Dato a enviar\n\n ///\n\n ///\n\n pub fn patch_edge<'a, T, P>(&self, endpoint: &'a str, payload: P) -> MailchimpResult<T>\n\n where\n\n T: DeserializeOwned,\n\n P: Serialize,\n\n {\n\n let api_url = self.build_url(endpoint, &HashMap::new());\n\n let headers = self.build_headers();\n\n let mut result = self\n\n .req\n\n .patch::<P>(api_url, headers, payload, &self.basic_auth)?;\n", "file_path": "src/internal/api.rs", "rank": 67, "score": 24.771688906362158 }, { "content": " let endpoint = self.get_base_endpoint();\n\n match self._api.delete::<EmptyType>(&endpoint, HashMap::new()) {\n\n Ok(_) => None,\n\n Err(e) => Some(e),\n\n }\n\n }\n\n\n\n ///\n\n /// Set API\n\n ///\n\n pub fn set_api(&mut self, api: Rc<MailchimpApi>) {\n\n self._api = api;\n\n }\n\n ///\n\n /// Set Endpoint\n\n ///\n\n pub fn set_endpoint<'a>(&mut self, endpoint: &'a str) {\n\n self._endpoint = endpoint.to_string();\n\n }\n\n ///\n\n /// Get Base Endpoint\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n let mut endpoint = self._endpoint.clone();\n\n endpoint.push_str(format!(\"/{}\", self.id).as_str());\n\n endpoint\n\n }\n\n}\n", "file_path": "src/types/list_webhooks.rs", "rank": 68, "score": 24.628258405857814 }, { "content": " let endpoint = self.get_base_endpoint();\n\n match self._api.delete::<EmptyType>(&endpoint, HashMap::new()) {\n\n Ok(_) => None,\n\n Err(e) => Some(e),\n\n }\n\n }\n\n ///\n\n /// Set API\n\n ///\n\n pub fn set_api(&mut self, api: Rc<MailchimpApi>) {\n\n self._api = api;\n\n }\n\n ///\n\n /// Set Endpoint\n\n ///\n\n pub fn set_endpoint<'a>(&mut self, endpoint: &'a str) {\n\n self._endpoint = endpoint.to_string();\n\n }\n\n ///\n\n /// Get Base Endpoint\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n let mut endpoint = self._endpoint.clone();\n\n endpoint.push_str(format!(\"/{}\", self.merge_id).as_str());\n\n endpoint\n\n }\n\n}\n", "file_path": "src/types/list_merge_fields.rs", "rank": 69, "score": 24.532436298212772 }, { "content": " ///\n\n ///\n\n pub fn post_edge<'a, T, P>(&self, endpoint: &'a str, payload: P) -> MailchimpResult<T>\n\n where\n\n T: DeserializeOwned,\n\n P: Serialize,\n\n {\n\n let api_url = self.build_url(endpoint, &HashMap::new());\n\n let headers = self.build_headers();\n\n let mut result = self\n\n .req\n\n .post::<P>(api_url, headers, payload, &self.basic_auth)?;\n\n if result.len() == 0 {\n\n result = \"{}\".to_string();\n\n }\n\n match serde_json::from_str(&result) {\n\n Ok(sr) => Ok(sr),\n\n Err(e) => {\n\n println!(\"Post Edge {:?}\", e);\n\n Err(MailchimpErrorType::default())\n", "file_path": "src/internal/api.rs", "rank": 70, "score": 23.877949300933757 }, { "content": " n_data.set_api(self._api.clone());\n\n n_data.set_endpoint(&endpoint);\n\n Ok(data)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n\n\n ///\n\n /// Set API\n\n ///\n\n pub fn set_api(&mut self, n_api: Rc<MailchimpApi>) {\n\n self._api = n_api;\n\n }\n\n\n\n /// Set Endpoint\n\n pub fn set_endpoint<'a>(&mut self, n_endpoint: &'a str) {\n\n self._endpoint = n_endpoint.to_string();\n\n }\n\n\n", "file_path": "src/types/list_segments.rs", "rank": 71, "score": 23.814422332772153 }, { "content": " }\n\n}\n\n\n\nimpl WorkflowEmailType {\n\n // ============== Actions ==============\n\n ///\n\n /// Detiene un email automatizado\n\n ///\n\n /// En caso de ser satisfactoria la ejecución, devuelve None,\n\n /// en caso contrario devuelve el error, con su respectiva descripción\n\n ///\n\n pub fn pause_all_emails(&self) -> Option<MailchimpErrorType> {\n\n let mut b_endpoint = self._endpoint.clone();\n\n b_endpoint.push_str(\"/actions/pause\");\n\n match self\n\n ._api\n\n .post::<EmptyType, HashMap<String, String>>(b_endpoint.as_str(), HashMap::new())\n\n {\n\n Ok(_) => None,\n\n Err(e) => Some(e),\n", "file_path": "src/types/workflow_email.rs", "rank": 72, "score": 23.386574712444318 }, { "content": " #[serde(default)]\n\n pub share_password: String,\n\n}\n\n\n\nimpl Default for ShareReportType {\n\n fn default() -> Self {\n\n Self {\n\n share_url: String::new(),\n\n share_password: String::new(),\n\n }\n\n }\n\n}\n\n\n\n// ============ Reports ==============\n\n///\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct ReportType {\n\n /// A string that uniquely identifies this campaign.\n\n #[serde(default)]\n", "file_path": "src/types/report.rs", "rank": 74, "score": 23.09792933494796 }, { "content": " ///\n\n pub fn update(&self, param: ListParam) -> MailchimpResult<ListType> {\n\n // PATCH /lists/{list_id}\n\n self._api\n\n .patch::<ListType, ListParam>(self.get_base_endpoint().as_str(), param)\n\n }\n\n\n\n ///\n\n /// Get Endpoint\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n String::from(\"lists/\") + self.id.as_ref().unwrap().as_str()\n\n }\n\n}\n\n\n\n///\n\n/// List param for new List\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct ListParam {\n", "file_path": "src/types/list.rs", "rank": 75, "score": 22.55282271947132 }, { "content": " let mut endpoint = self.get_base_endpoint() + \"/feedback/\";\n\n endpoint = endpoint + feedback_id;\n\n let mut payload = HashMap::new();\n\n if let Some(f) = fields {\n\n payload.insert(\"fields\".to_string(), f.clone());\n\n }\n\n if let Some(ex) = exclude_fields {\n\n payload.insert(\"exclude_fields\".to_string(), ex.clone());\n\n }\n\n\n\n match self._api.get::<CampaignFeedbackType>(&endpoint, payload) {\n\n Ok(feedback) => {\n\n let mut n_f = feedback;\n\n n_f.set_api(self._api.clone());\n\n n_f.set_endpoint(&endpoint);\n\n Ok(n_f)\n\n }\n\n Err(e) => Err(e),\n\n }\n\n }\n\n\n\n ///\n\n /// Return the endpoint path\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n String::from(\"campaigns/\") + &self.id.as_ref().unwrap()\n\n }\n\n}\n", "file_path": "src/types/campaign.rs", "rank": 77, "score": 22.31923645106906 }, { "content": "\n\nimpl Default for TemplateContent {\n\n fn default() -> Self {\n\n TemplateContent {\n\n id: \"\".to_string(),\n\n sections: HashMap::new(),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Use this template to generate the HTML content for the campaign.\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct UploadArchive {\n\n /// The base64-encoded representation of the archive file.\n\n #[serde(default)]\n\n pub archive_content: String,\n\n /// The type of encoded file. Defaults to zip.\n\n /// Possible Values: zip tar.gz tar.bz2 tar tgz tbz\n", "file_path": "src/types/campaign_content.rs", "rank": 78, "score": 22.254100344012414 }, { "content": "\n\n///\n\n/// Campaign Feedback Builder\n\n///\n\n#[derive(Debug)]\n\npub struct CampaignFeedbackBuilder {\n\n /// Endpoint\n\n pub endpoint: String,\n\n}\n\n\n\nimpl BuildIter for CampaignFeedbackBuilder {\n\n type Item = CampaignFeedbackType;\n\n type FilterItem = SimpleFilter;\n\n type Collection = CollectionCampaignFeedback;\n\n\n\n ///\n\n /// Crea un recurso a partir del dato pasado por parámetro\n\n ///\n\n fn update_item(&self, data: &Self::Item, api: Rc<MailchimpApi>) -> Self::Item {\n\n let mut in_data = data.clone();\n", "file_path": "src/types/campaign_feedback.rs", "rank": 79, "score": 22.100328806339963 }, { "content": " fn default() -> Self {\n\n AutomationDelayType {\n\n amount: Some(0),\n\n delay_type: Some(\"\".to_string()),\n\n direction: Some(\"\".to_string()),\n\n action: Some(\"\".to_string()),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Recipient Type\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct RecipientType {\n\n /// The unique list id.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n\n pub list_id: Option<String>,\n\n /// Desc: The status of the list used, namely if it’s deleted or disabled.\n\n #[serde(default, skip_serializing_if = \"Option::is_none\")]\n", "file_path": "src/types/automation_campaign.rs", "rank": 80, "score": 21.983442563882278 }, { "content": " ///\n\n /// Actualiza el valor del Endpoint para este recurso\n\n ///\n\n /// Argumentos:\n\n /// n_endpoint: Nuevo Endpoint\n\n ///\n\n pub fn set_endpoint<'a>(&mut self, n_endpoint: &'a str) {\n\n self._endpoint = n_endpoint.to_string();\n\n }\n\n}\n\n\n\n/// Workflow Emails Type\n\n///\n\n/// Endpoint\n\n/// GET /automations/{workflow_id}/emails\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct WorkflowEmailsType {\n\n /// An array of objects, each representing an email in an Automation workflow.\n\n #[serde(default)]\n\n pub emails: Vec<WorkflowEmailType>,\n\n /// Desc: The total number of items matching the query regardless of pagination.\n\n #[serde(default)]\n\n pub total_items: u64,\n\n /// Desc: A list of link types and descriptions for the API schema documents.\n\n #[serde(default)]\n\n pub _links: Vec<LinkType>,\n\n}\n", "file_path": "src/types/workflow_email.rs", "rank": 81, "score": 21.936457326408025 }, { "content": " pub endpoint: String,\n\n}\n\n\n\nimpl BuildIter for ListInterestBuilder {\n\n type Item = ListInterest;\n\n type FilterItem = SimpleFilter;\n\n type Collection = CollectionListInterest;\n\n\n\n ///\n\n /// Crea un recurso a partir del dato pasado por parámetro\n\n ///\n\n fn update_item(&self, data: &Self::Item, api: Rc<MailchimpApi>) -> Self::Item {\n\n let mut in_data = data.clone();\n\n in_data.set_api(api);\n\n in_data.set_endpoint(&self.endpoint);\n\n in_data\n\n }\n\n ///\n\n /// Actualiza el offset\n\n ///\n\n fn update_filter_offset(&self, filter: &Self::FilterItem) -> Self::FilterItem {\n\n let mut f = filter.clone();\n\n f.offset = Some(f.count.unwrap() + f.offset.unwrap());\n\n f\n\n }\n\n}\n", "file_path": "src/types/list_interests.rs", "rank": 82, "score": 21.867580503020967 }, { "content": " if result.len() == 0 {\n\n result = \"{}\".to_string();\n\n }\n\n match serde_json::from_str(&result) {\n\n Ok(sr) => Ok(sr),\n\n Err(e) => {\n\n println!(\"Post Edge {:?}\", e);\n\n Err(MailchimpErrorType::default())\n\n }\n\n }\n\n }\n\n ///\n\n ///\n\n /// Argumentos\n\n /// endpoint: Endpoint hacia donde se van a enviar los datos\n\n /// payload: Dato a enviar\n\n ///\n\n ///\n\n pub fn put_edge<'a, T, P>(&self, endpoint: &'a str, payload: P) -> MailchimpResult<T>\n\n where\n", "file_path": "src/internal/api.rs", "rank": 83, "score": 21.784384586533157 }, { "content": "impl Default for TimeSerieType {\n\n fn default() -> Self {\n\n Self {\n\n timestamp: String::new(),\n\n emails_sent: 0,\n\n unique_opens: 0,\n\n recipients_clicks: 0,\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// The url and password for the VIP report.\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct ShareReportType {\n\n /// The URL for the VIP report.\n\n #[serde(default)]\n\n pub share_url: String,\n\n /// If password protected, the password for the VIP report.\n", "file_path": "src/types/report.rs", "rank": 84, "score": 21.543347584774025 }, { "content": " ///\n\n pub fn new<'a>(title: &'a str, ic_type: &'a str, display_order: Option<u64>) -> Self {\n\n InterestCategoryParam {\n\n title: Some(title.to_string()),\n\n display_order: display_order,\n\n ic_type: Some(ic_type.to_string()),\n\n }\n\n }\n\n}\n\n\n\nimpl ListInterestCategory {\n\n ///\n\n /// Update the api instance\n\n ///\n\n pub fn set_api(&mut self, n_api: Rc<MailchimpApi>) {\n\n self._api = n_api\n\n }\n\n ///\n\n /// Update the endpoint value\n\n ///\n", "file_path": "src/types/list_interest_categories.rs", "rank": 85, "score": 21.360982133479116 }, { "content": " pub endpoint: String,\n\n}\n\n\n\nimpl BuildIter for ListInterestCategoryBuilder {\n\n type Item = ListInterestCategory;\n\n type FilterItem = ListInterestCategoryFilter;\n\n type Collection = CollectionListInterestCategories;\n\n\n\n ///\n\n /// Crea un recurso a partir del dato pasado por parámetro\n\n ///\n\n fn update_item(&self, data: &Self::Item, api: Rc<MailchimpApi>) -> Self::Item {\n\n let mut in_data = data.clone();\n\n in_data.set_api(api);\n\n in_data.set_endpoint(&self.endpoint);\n\n in_data\n\n }\n\n ///\n\n /// Actualiza el offset\n\n ///\n\n fn update_filter_offset(&self, filter: &Self::FilterItem) -> Self::FilterItem {\n\n let mut f = filter.clone();\n\n f.offset = Some(f.count.unwrap() + f.offset.unwrap());\n\n f\n\n }\n\n}\n", "file_path": "src/types/list_interest_categories.rs", "rank": 86, "score": 21.349196064585684 }, { "content": " /// note: The content of the note. Note length is limited to 1,000 characters.\n\n ///\n\n pub fn update<'a>(&self, note: &'a str) -> MailchimpResult<ListMemberNote> {\n\n // PATCH /lists/{list_id}/members/{subscriber_hash}/notes/{note_id}\n\n let endpoint = self.get_base_endpoint();\n\n let mut payload = HashMap::new();\n\n payload.insert(\"note\".to_string(), note.to_string());\n\n self._api\n\n .patch::<ListMemberNote, HashMap<String, String>>(&endpoint, payload)\n\n }\n\n\n\n ///\n\n /// Private function to build endpoint string\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n // /lists/{list_id}/members/{subscriber_hash}/notes\n\n format!(\"{:?}/{:?}\", self._endpoint, self.id)\n\n }\n\n}\n\n\n", "file_path": "src/types/list_member_notes.rs", "rank": 87, "score": 21.29781791874657 }, { "content": " /// Details about the specific feedback item.\n\n #[serde(default)]\n\n pub details: String,\n\n}\n\n\n\nimpl Default for ChecklistItem {\n\n fn default() -> Self {\n\n ChecklistItem {\n\n item_type: \"\".to_string(),\n\n id: 0,\n\n heading: \"\".to_string(),\n\n details: \"\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n///\n\n/// Review the send checklist for your campaign, and resolve any issues before sending.\n\n///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n", "file_path": "src/types/campaign_send_checklist.rs", "rank": 88, "score": 21.17583272623427 }, { "content": " ///\n\n pub fn get_specific_interest<'a>(\n\n &self,\n\n interest_id: &'a str,\n\n ) -> MailchimpResult<ListInterestCategory> {\n\n // GET /lists/{list_id}/interest-categories/{interest_category_id}/interests/{interest_id}\n\n let mut endpoint = self.get_base_endpoint();\n\n endpoint.push_str(\"/interests/\");\n\n endpoint.push_str(interest_id);\n\n\n\n self._api\n\n .get::<ListInterestCategory>(&endpoint, HashMap::new())\n\n }\n\n\n\n ///\n\n /// Private function to build endpoint string\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n // /lists/{list_id}/interest-categories/{interest_category_id}\n\n let mut endpoint = self._endpoint.clone();\n", "file_path": "src/types/list_interest_categories.rs", "rank": 89, "score": 21.154512719748418 }, { "content": "///\n\n#[derive(Serialize, Deserialize, Debug, Clone)]\n\npub struct ListNote {\n\n /// The note id.\n\n #[serde(default)]\n\n pub note_id: u64,\n\n /// The date and time the note was created in ISO 8601 format.\n\n #[serde(default)]\n\n pub created_at: String,\n\n /// The author of the note.\n\n #[serde(default)]\n\n pub created_by: String,\n\n /// The content of the note.\n\n #[serde(default)]\n\n pub note: String,\n\n}\n\n\n\nimpl Default for ListNote {\n\n fn default() -> Self {\n\n ListNote {\n", "file_path": "src/types/list_members.rs", "rank": 90, "score": 21.139490918920288 }, { "content": " ///\n\n /// Return de base endpoint for the resource\n\n ///\n\n fn get_base_endpoint(&self) -> String {\n\n let mut endpoint = self._endpoint.clone();\n\n endpoint.push_str(format!(\"/{}\", &self.id).as_str());\n\n endpoint\n\n }\n\n}\n", "file_path": "src/types/list_segments.rs", "rank": 91, "score": 21.10174605354453 }, { "content": " }\n\n }\n\n\n\n ///\n\n /// Inicia un email automatizado\n\n ///\n\n /// En caso de ser satisfactoria la ejecución, devuelve None,\n\n /// en caso contrario devuelve el error, con su respectiva descripción\n\n ///\n\n pub fn start_all_emails(&self) -> Option<MailchimpErrorType> {\n\n let mut b_endpoint = self._endpoint.clone();\n\n b_endpoint.push_str(\"/actions/start\");\n\n match self\n\n ._api\n\n .post::<EmptyType, HashMap<String, String>>(b_endpoint.as_str(), HashMap::new())\n\n {\n\n Ok(_) => None,\n\n Err(e) => Some(e),\n\n }\n\n }\n", "file_path": "src/types/workflow_email.rs", "rank": 92, "score": 21.073559813717726 }, { "content": " ///\n\n pub fn get_feedbacks(\n\n &self,\n\n fields: Option<String>,\n\n exclude_fields: Option<String>,\n\n ) -> MalchimpIter<CampaignFeedbackBuilder> {\n\n // GET /campaigns/{campaign_id}/feedback\n\n let endpoint = self.get_base_endpoint() + \"/feedback\";\n\n let mut filters = SimpleFilter::default();\n\n\n\n if let Some(f) = fields {\n\n filters.fields = Some(f.clone())\n\n }\n\n if let Some(ex) = exclude_fields {\n\n filters.exclude_fields = Some(ex.clone())\n\n }\n\n\n\n let payload = filters.build_payload();\n\n let response = self\n\n ._api\n", "file_path": "src/types/campaign.rs", "rank": 93, "score": 21.055367719143284 }, { "content": "\n\nimpl ResourceFilter for CampaignFilter {\n\n fn build_payload(&self) -> HashMap<String, String> {\n\n let mut payload = HashMap::new();\n\n\n\n if self.fields.is_some() {\n\n payload.insert(\"fields\".to_string(), self.fields.as_ref().unwrap().clone());\n\n }\n\n if self.exclude_fields.is_some() {\n\n payload.insert(\n\n \"exclude_fields\".to_string(),\n\n self.exclude_fields.as_ref().unwrap().clone(),\n\n );\n\n }\n\n if self.count.is_some() {\n\n payload.insert(\n\n \"count\".to_string(),\n\n format!(\"{:}\", self.count.as_ref().unwrap().clone()),\n\n );\n\n }\n", "file_path": "src/campaigns.rs", "rank": 94, "score": 20.90651879950199 }, { "content": "}\n\n\n\nimpl ResourceFilter for MessagesFilter {\n\n fn build_payload(&self) -> HashMap<String, String> {\n\n let mut payload = HashMap::new();\n\n\n\n if self.fields.is_some() {\n\n payload.insert(\"fields\".to_string(), self.fields.as_ref().unwrap().clone());\n\n }\n\n if self.exclude_fields.is_some() {\n\n payload.insert(\n\n \"exclude_fields\".to_string(),\n\n self.exclude_fields.as_ref().unwrap().clone(),\n\n );\n\n }\n\n if self.count.is_some() {\n\n payload.insert(\n\n \"count\".to_string(),\n\n format!(\"{:}\", self.count.as_ref().unwrap().clone()),\n\n );\n", "file_path": "src/types/conversation_messages.rs", "rank": 95, "score": 20.90651879950199 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl ResourceFilter for ReportsFilter {\n\n fn build_payload(&self) -> HashMap<String, String> {\n\n let mut payload = HashMap::new();\n\n\n\n if self.fields.is_some() {\n\n payload.insert(\"fields\".to_string(), self.fields.as_ref().unwrap().clone());\n\n }\n\n if self.exclude_fields.is_some() {\n\n payload.insert(\n\n \"exclude_fields\".to_string(),\n\n self.exclude_fields.as_ref().unwrap().clone(),\n\n );\n\n }\n\n if self.count.is_some() {\n\n payload.insert(\n\n \"count\".to_string(),\n", "file_path": "src/types/report.rs", "rank": 96, "score": 20.890013775687876 }, { "content": " let mut endpoint = self.get_base_endpoint().clone();\n\n endpoint.push_str(\"/emails/\");\n\n endpoint.push_str(workflow_email_id);\n\n\n\n let payload = UpdateParamsForWorkflowEmail {\n\n settings: Some(settings.clone()),\n\n delay: Some(delay.clone()),\n\n };\n\n\n\n let response = self\n\n ._api\n\n .patch::<WorkflowEmailType, UpdateParamsForWorkflowEmail>(endpoint.as_str(), payload);\n\n match response {\n\n Ok(workflow_email) => {\n\n let mut eml = workflow_email;\n\n eml.set_api(self._api.clone());\n\n eml.set_endpoint(&endpoint);\n\n Ok(eml)\n\n }\n\n Err(e) => Err(e),\n", "file_path": "src/types/automation_campaign.rs", "rank": 97, "score": 20.881788907357297 }, { "content": "impl ResourceFilter for AuthorizedFilter {\n\n fn build_payload(&self) -> HashMap<String, String> {\n\n let mut payload = HashMap::new();\n\n\n\n if self.fields.is_some() {\n\n payload.insert(\"fields\".to_string(), self.fields.as_ref().unwrap().clone());\n\n }\n\n if self.exclude_fields.is_some() {\n\n payload.insert(\n\n \"exclude_fields\".to_string(),\n\n self.exclude_fields.as_ref().unwrap().clone(),\n\n );\n\n }\n\n if self.count.is_some() {\n\n payload.insert(\n\n \"count\".to_string(),\n\n format!(\"{:}\", self.count.as_ref().unwrap().clone()),\n\n );\n\n }\n\n if self.offset.is_some() {\n", "file_path": "src/authorized_apps.rs", "rank": 98, "score": 20.818246109323447 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl ResourceFilter for ListMembersFilter {\n\n fn build_payload(&self) -> HashMap<String, String> {\n\n let mut payload = HashMap::new();\n\n\n\n if self.fields.is_some() {\n\n payload.insert(\"fields\".to_string(), self.fields.as_ref().unwrap().clone());\n\n }\n\n if self.exclude_fields.is_some() {\n\n payload.insert(\n\n \"exclude_fields\".to_string(),\n\n self.exclude_fields.as_ref().unwrap().clone(),\n\n );\n\n }\n\n if self.count.is_some() {\n\n payload.insert(\n\n \"count\".to_string(),\n", "file_path": "src/types/list_members.rs", "rank": 99, "score": 20.793444485824686 } ]
Rust
grader/src/submission/mod.rs
programming-in-th/rusty-grader
7538071915566577692cafc36ba7dadcc983654c
use crate::errors::{GraderError, GraderResult}; use crate::instance; use crate::instance::{Instance, RunVerdict}; use crate::submission::result::*; use crate::utils::{get_base_path, get_code_extension, get_env, get_message}; use manifest::Manifest; use std::{fs, io::Write, path::Path, path::PathBuf, process::Command}; pub mod manifest; pub mod result; #[cfg(test)] mod tests; #[derive(Debug)] pub enum SubmissionStatus { Initialized, Compiling, Compiled, CompilationError(String), Running(u64), Done(SubmissionResult), } impl Default for SubmissionStatus { fn default() -> Self { SubmissionStatus::Initialized } } #[derive(Debug)] pub enum SubmissionMessage { Status(SubmissionStatus), RunResult(RunResult), GroupResult(GroupResult), } impl Default for SubmissionMessage { fn default() -> Self { SubmissionMessage::Status(SubmissionStatus::Initialized) } } pub trait DisplayFnT: FnMut(SubmissionMessage) {} impl<F> DisplayFnT for F where F: FnMut(SubmissionMessage) {} impl std::fmt::Debug for dyn DisplayFnT { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "DisplayFunction") } } pub type DisplayFn<'a> = Box<dyn DisplayFnT + 'a>; #[derive(Default)] pub struct Submission<'a> { pub task_id: String, pub submission_id: String, pub language: String, pub code_path: Vec<PathBuf>, pub task_manifest: Manifest, pub tmp_path: PathBuf, pub task_path: PathBuf, pub bin_path: PathBuf, pub message_handler: Option<DisplayFn<'a>>, } impl<'a> std::fmt::Display for Submission<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "Submission {} {} {} {:?} {:?} {:?} {:?} {:?}", self.task_id, self.submission_id, self.language, self.code_path, self.task_manifest, self.tmp_path, self.task_path, self.bin_path ) } } impl<'a> Submission<'a> { pub fn from<T>( task_id: T, submission_id: T, language: T, code: &[String], message_handler: Option<DisplayFn<'a>>, ) -> GraderResult<Self> where T: Into<String>, { let task_id = task_id.into(); let submission_id = submission_id.into(); let language = language.into(); let tmp_path = PathBuf::from(get_env("TEMPORARY_PATH")).join(&submission_id); fs::create_dir(&tmp_path)?; let extension = get_code_extension(&language); let task_path = get_base_path().join("tasks").join(&task_id); if task_path.join("compile_files").is_dir() { let entries = fs::read_dir(task_path.join("compile_files"))?; for entry in entries { let path = entry?; fs::copy(&path.path(), tmp_path.join(&path.file_name()))?; } } Ok(Submission { task_id, submission_id, language, code_path: code .iter() .enumerate() .map(|(idx, val)| { let code_path = tmp_path.join(format!("code_{}.{}", &idx.to_string(), &extension)); let mut file = fs::File::create(&code_path)?; file.write_all(val.as_bytes())?; Ok(code_path) }) .collect::<GraderResult<Vec<_>>>()?, task_manifest: Manifest::from(task_path.join("manifest.yaml"))?, tmp_path, task_path, bin_path: PathBuf::new(), message_handler, }) } pub fn compile(&mut self) -> GraderResult<()> { if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::Status(SubmissionStatus::Compiling)) } let compiler_path = get_base_path() .join("scripts") .join("compile_scripts") .join(&self.language); let mut args = vec![&self.tmp_path]; self.code_path.iter().for_each(|path| { args.push(&path); }); let mut tmp_compile_files = vec![]; if let Some(compile_files) = &self.task_manifest.compile_files { for compile_file in compile_files .get(&self.language) .ok_or(GraderError::invalid_index())? { tmp_compile_files.push(self.tmp_path.join(&compile_file)); } } tmp_compile_files.iter().for_each(|path| { args.push(&path); }); let compile_output = Command::new(compiler_path).args(args).output()?; let compile_output_args = String::from_utf8(compile_output.stdout.clone())? .lines() .map(|s| s.to_string()) .collect::<Vec<String>>(); let return_code: i32 = compile_output_args .get(0) .ok_or(GraderError::invalid_index())? .parse()?; self.bin_path = PathBuf::from( compile_output_args .get(1) .ok_or(GraderError::invalid_index())?, ); if let Some(message_handler) = &mut self.message_handler { match return_code { 0 => message_handler(SubmissionMessage::Status(SubmissionStatus::Compiled)), _ => message_handler(SubmissionMessage::Status( SubmissionStatus::CompilationError(String::from_utf8(compile_output.stdout)?), )), } } Ok(()) } fn run_each(&mut self, checker: &Path, runner: &Path, index: u64) -> GraderResult<RunResult> { if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::Status(SubmissionStatus::Running(index))) } let input_path = self .task_path .join("testcases") .join(format!("{}.in", index)); let output_path = self.tmp_path.join(format!("output_{}", index)); let sol_path = self .task_path .join("testcases") .join(format!("{}.sol", index)); let mut instance = instance! { time_limit: self.task_manifest.time_limit.ok_or(GraderError::invalid_value())?, memory_limit: self.task_manifest.memory_limit.ok_or(GraderError::invalid_value())? * 1000, bin_path: self.bin_path.clone(), input_path: input_path.clone(), output_path: output_path.clone(), runner_path: runner.to_path_buf() }; instance.init()?; let instance_result = instance.run()?; let mut run_result = RunResult::from( self.submission_id.to_owned(), index, instance_result.time_usage, instance_result.memory_usage, ); run_result.status = match instance_result.status { RunVerdict::VerdictOK => { let args = vec![&input_path, &output_path, &sol_path]; let checker_result = Command::new(&checker).args(args).output()?; let checker_output = String::from_utf8(checker_result.stdout)? .trim_end_matches('\n') .lines() .map(|s| s.to_string()) .collect::<Vec<String>>(); run_result.score = checker_output .get(1) .ok_or(GraderError::invalid_index())? .parse()?; run_result.message = checker_output .get(2) .map_or(String::new(), |v| v.to_owned()); checker_output .get(0) .ok_or(GraderError::invalid_index())? .as_str() .to_owned() } RunVerdict::VerdictTLE => String::from("Time Limit Exceeded"), RunVerdict::VerdictMLE => String::from("Memory Limit Exceeded"), RunVerdict::VerdictRE => String::from("Runtime Error"), RunVerdict::VerdictSG => String::from("Signal Error"), _ => String::from("Judge Error"), }; if run_result.message.is_empty() { run_result.message = get_message(&run_result.status); } if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::RunResult(run_result.clone())) } Ok(run_result) } pub fn run(&mut self) -> GraderResult<SubmissionResult> { let checker = self.task_manifest .checker .as_ref() .map_or(self.task_path.join("checker"), |file| { get_base_path() .join("scripts") .join("checker_scripts") .join(&file) }); let grouper = self.task_manifest .grouper .as_ref() .map_or(self.task_path.join("grouper"), |file| { get_base_path() .join("scripts") .join("grouper_scripts") .join(&file) }); let runner = get_base_path() .join("scripts") .join("runner_scripts") .join(&self.language); let mut last_test = 1; let mut total_score: f64 = 0.0; let mut total_full_score: u64 = 0; let mut group_results = Vec::new(); for (group_index, (full_score, tests)) in self.task_manifest.groups.clone().iter().enumerate() { total_full_score += full_score; let mut skip = false; let mut args = vec![full_score.to_string()]; let mut group_result = GroupResult::from( *full_score, self.submission_id.to_owned(), (group_index + 1) as u64, ); for index in last_test..(last_test + tests) { let run_result = if skip { RunResult::from(self.submission_id.to_owned(), index, 0.0, 0) } else { self.run_each(&checker, &runner, index)? }; args.push(run_result.score.to_string()); skip = &run_result.status != "Correct" && &run_result.status != "Partially Correct"; group_result.run_result.push(run_result); } if !skip { let grouper_result = Command::new(&grouper).args(args).output()?; group_result.score = String::from_utf8(grouper_result.stdout)? .trim_end_matches('\n') .parse()?; total_score += group_result.score; } if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::GroupResult(group_result.clone())); } group_results.push(group_result); last_test += tests; } let submission_result = SubmissionResult { score: total_score, full_score: total_full_score, submission_id: self.submission_id.to_owned(), group_result: group_results, }; if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::Status(SubmissionStatus::Done( submission_result.clone(), ))); } Ok(submission_result) } } impl<'a> Drop for Submission<'a> { fn drop(&mut self) { fs::remove_dir_all(&self.tmp_path); } }
use crate::errors::{GraderError, GraderResult}; use crate::instance; use crate::instance::{Instance, RunVerdict}; use crate::submission::result::*; use crate::utils::{get_base_path, get_code_extension, get_env, get_message}; use manifest::Manifest; use std::{fs, io::Write, path::Path, path::PathBuf, process::Command}; pub mod manifest; pub mod result; #[cfg(test)] mod tests; #[derive(Debug)] pub enum SubmissionStatus { Initialized, Compiling, Compiled, CompilationError(String), Running(u64), Done(SubmissionResult), } impl Default for SubmissionStatus { fn default() -> Self { SubmissionStatus::Initialized } } #[derive(Debug)] pub enum SubmissionMessage { Status(SubmissionStatus), RunResult(RunResult), GroupResult(GroupResult), } impl Default for SubmissionMessage { fn default() -> Self { SubmissionMessage::Status(SubmissionStatus::Initialized) } } pub trait DisplayFnT: FnMut(SubmissionMessage) {} impl<F> DisplayFnT for F where F: FnMut(SubmissionMessage) {} impl std::fmt::Debug for dyn DisplayFnT { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "DisplayFunction") } } pub type DisplayFn<'a> = Box<dyn DisplayFnT + 'a>; #[derive(Default)] pub struct Submission<'a> { pub task_id: String, pub submission_id: String, pub language: String, pub code_path: Vec<PathBuf>, pub task_manifest: Manifest, pub tmp_path: PathBuf, pub task_path: PathBuf, pub bin_path: PathBuf, pub message_handler: Option<DisplayFn<'a>>, } impl<'a> std::fmt::Display for Submission<'a> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!( f, "Submission {} {} {} {:?} {:?} {:?} {:?} {:?}", self.task_id, self.submission_id, self.language, self.code_path, self.task_manifest, self.tmp_path, self.task_path, self.bin_path ) } } impl<'a> Submission<'a> { pub fn from<T>( task_id: T, submission_id: T, language: T, code: &[String], message_handler: Option<DisplayFn<'a>>, ) -> GraderResult<Self> where T: Into<String>, { let task_id = task_id.into(); let submission_id = submission_id.into(); let language = language.into(); let tmp_path = PathBuf::from(get_env("TEMPORARY_PATH")).join(&submission_id); fs::create_dir(&tmp_path)?; let extension = get_code_extension(&language); let task_path = get_base_path().join("tasks").join(&task_id); if task_path.join("compile_files").is_dir() { let entries = fs::read_dir(task_path.join("compile_files"))?; for entry in entries { let path = entry?; fs::copy(&path.path(), tmp_path.join(&path.file_name()))?; } } Ok(Submission { task_id, submission_id, language, code_path: code .iter() .enumerate() .map(|(idx, val)| { let code_path = tmp_path.join(format!("code_{}.{}", &idx.to_string(), &extension)); let mut file = fs::File::create(&code_path)?; file.write_all(val.as_bytes())?; Ok(code_path) }) .collect::<GraderResult<Vec<_>>>()?, task_manifest: Manifest::from(task_path.join("manifest.yaml"))?, tmp_path, task_path, bin_path: PathBuf::new(), message_handler, }) } pub fn compile(&mut self) -> GraderResult<()> { if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::Status(SubmissionStatus::Compiling)) } let compiler_path = get_base_path() .join("scripts") .join("compile_scripts") .join(&self.language); let mut args = vec![&self.tmp_path]; self.code_path.iter().for_each(|path| { args.push(&path); }); let mut tmp_compile_files = vec![]; if let Some(compile_files) = &self.task_manifest.compile_files { for compile_file in compile_files .get(&self.language) .ok_or(GraderError::invalid_index())? { tmp_compile_files.push(self.tmp_path.join(&compile_file)); } } tmp_compile_files.iter().for_each(|path| { args.push(&path); }); let compile_output = Command::new(compiler_path).args(args).output()?; let compile_output_args = String::from_utf8(compile_output.stdout.clone())? .lines() .map(|s| s.to_string()) .collect::<Vec<String>>(); let return_code: i32 = compile_output_args .get(0) .ok_or(GraderError::invalid_index())? .parse()?; self.bin_path = PathBuf::from( compile_output_args .get(1) .ok_or(GraderError::invalid_index())?, ); if let Some(message_handler) = &mut self.message_handler { match return_code { 0 => message_handler(SubmissionMessage::Status(SubmissionStatus::Compiled)), _ => message_handler(SubmissionMessage::Status( SubmissionStatus::CompilationError(String::from_utf8(compile_output.stdout)?), )), } } Ok(()) }
pub fn run(&mut self) -> GraderResult<SubmissionResult> { let checker = self.task_manifest .checker .as_ref() .map_or(self.task_path.join("checker"), |file| { get_base_path() .join("scripts") .join("checker_scripts") .join(&file) }); let grouper = self.task_manifest .grouper .as_ref() .map_or(self.task_path.join("grouper"), |file| { get_base_path() .join("scripts") .join("grouper_scripts") .join(&file) }); let runner = get_base_path() .join("scripts") .join("runner_scripts") .join(&self.language); let mut last_test = 1; let mut total_score: f64 = 0.0; let mut total_full_score: u64 = 0; let mut group_results = Vec::new(); for (group_index, (full_score, tests)) in self.task_manifest.groups.clone().iter().enumerate() { total_full_score += full_score; let mut skip = false; let mut args = vec![full_score.to_string()]; let mut group_result = GroupResult::from( *full_score, self.submission_id.to_owned(), (group_index + 1) as u64, ); for index in last_test..(last_test + tests) { let run_result = if skip { RunResult::from(self.submission_id.to_owned(), index, 0.0, 0) } else { self.run_each(&checker, &runner, index)? }; args.push(run_result.score.to_string()); skip = &run_result.status != "Correct" && &run_result.status != "Partially Correct"; group_result.run_result.push(run_result); } if !skip { let grouper_result = Command::new(&grouper).args(args).output()?; group_result.score = String::from_utf8(grouper_result.stdout)? .trim_end_matches('\n') .parse()?; total_score += group_result.score; } if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::GroupResult(group_result.clone())); } group_results.push(group_result); last_test += tests; } let submission_result = SubmissionResult { score: total_score, full_score: total_full_score, submission_id: self.submission_id.to_owned(), group_result: group_results, }; if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::Status(SubmissionStatus::Done( submission_result.clone(), ))); } Ok(submission_result) } } impl<'a> Drop for Submission<'a> { fn drop(&mut self) { fs::remove_dir_all(&self.tmp_path); } }
fn run_each(&mut self, checker: &Path, runner: &Path, index: u64) -> GraderResult<RunResult> { if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::Status(SubmissionStatus::Running(index))) } let input_path = self .task_path .join("testcases") .join(format!("{}.in", index)); let output_path = self.tmp_path.join(format!("output_{}", index)); let sol_path = self .task_path .join("testcases") .join(format!("{}.sol", index)); let mut instance = instance! { time_limit: self.task_manifest.time_limit.ok_or(GraderError::invalid_value())?, memory_limit: self.task_manifest.memory_limit.ok_or(GraderError::invalid_value())? * 1000, bin_path: self.bin_path.clone(), input_path: input_path.clone(), output_path: output_path.clone(), runner_path: runner.to_path_buf() }; instance.init()?; let instance_result = instance.run()?; let mut run_result = RunResult::from( self.submission_id.to_owned(), index, instance_result.time_usage, instance_result.memory_usage, ); run_result.status = match instance_result.status { RunVerdict::VerdictOK => { let args = vec![&input_path, &output_path, &sol_path]; let checker_result = Command::new(&checker).args(args).output()?; let checker_output = String::from_utf8(checker_result.stdout)? .trim_end_matches('\n') .lines() .map(|s| s.to_string()) .collect::<Vec<String>>(); run_result.score = checker_output .get(1) .ok_or(GraderError::invalid_index())? .parse()?; run_result.message = checker_output .get(2) .map_or(String::new(), |v| v.to_owned()); checker_output .get(0) .ok_or(GraderError::invalid_index())? .as_str() .to_owned() } RunVerdict::VerdictTLE => String::from("Time Limit Exceeded"), RunVerdict::VerdictMLE => String::from("Memory Limit Exceeded"), RunVerdict::VerdictRE => String::from("Runtime Error"), RunVerdict::VerdictSG => String::from("Signal Error"), _ => String::from("Judge Error"), }; if run_result.message.is_empty() { run_result.message = get_message(&run_result.status); } if let Some(message_handler) = &mut self.message_handler { message_handler(SubmissionMessage::RunResult(run_result.clone())) } Ok(run_result) }
function_block-full_function
[ { "content": "pub fn get_code_extension(language: &str) -> String {\n\n let config = load_yaml(get_base_path().join(\"scripts\").join(\"config.yaml\"));\n\n\n\n for lang in yaml_unwrap_hash(config, \"language\")\n\n .unwrap()\n\n .into_vec()\n\n .unwrap()\n\n {\n\n if Some(language) == lang[\"id\"].as_str() {\n\n return yaml_unwrap_hash(lang, \"extension\")\n\n .unwrap()\n\n .into_string()\n\n .unwrap();\n\n }\n\n }\n\n\n\n String::new()\n\n}\n\n\n", "file_path": "grader/src/utils.rs", "rank": 1, "score": 155540.05632385868 }, { "content": "pub fn get_base_path() -> PathBuf {\n\n PathBuf::from(env::var(\"BASE_PATH\").unwrap())\n\n}\n\n\n", "file_path": "grader/src/utils.rs", "rank": 2, "score": 149504.05042104292 }, { "content": "#[test]\n\nfn should_compile_python_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.py\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000002\", \"python\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 3, "score": 144338.36075021964 }, { "content": "#[test]\n\nfn should_compile_java_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.java\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000022\", \"java\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 4, "score": 144338.3607502196 }, { "content": "#[test]\n\nfn should_compile_go_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.go\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000020\", \"go\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 5, "score": 144338.36075021964 }, { "content": "#[test]\n\nfn should_compile_rust_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.rs\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000003\", \"rust\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 6, "score": 144338.3607502196 }, { "content": "#[test]\n\nfn should_compile_cpp_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000001\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 7, "score": 144338.36075021964 }, { "content": "pub fn load_yaml(path: PathBuf) -> Yaml {\n\n let file = fs::read_to_string(path).expect(\"Unable to read yaml file\");\n\n YamlLoader::load_from_str(&file)\n\n .unwrap()\n\n .into_iter()\n\n .next()\n\n .unwrap()\n\n}\n\n\n", "file_path": "grader/src/utils.rs", "rank": 8, "score": 135034.8623591428 }, { "content": "#[test]\n\nfn should_get_ok() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"should_get_ok\");\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n output_path: tmp_dir.0.join(\"output.txt\"),\n\n runner_path: get_example_dir().join(\"scripts\").join(\"runner_scripts\").join(\"cpp\")\n\n };\n\n\n\n instance.init()?;\n\n let result = instance.run()?;\n\n\n\n assert_eq!(result.status, RunVerdict::VerdictOK);\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 9, "score": 126207.60488000396 }, { "content": "#[test]\n\nfn should_handle_messaging() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.cpp\")).unwrap();\n\n let mut v: Vec<SubmissionMessage> = Vec::new();\n\n {\n\n let mut submission = Submission::from(\n\n \"a_plus_b\",\n\n \"000024\",\n\n \"cpp\",\n\n &vec![code],\n\n Some(Box::new(|msg| {\n\n v.push(msg);\n\n })),\n\n )?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "grader/src/submission/tests.rs", "rank": 10, "score": 124207.25540602443 }, { "content": "#[test]\n\nfn should_complete_initialize_instance() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"initialize_instance\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n output_path: tmp_dir.0.join(\"output.txt\"),\n\n runner_path: get_example_dir().join(\"scripts\").join(\"runner_scripts\").join(\"cpp\")\n\n };\n\n\n\n instance.init()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 11, "score": 122944.82667169839 }, { "content": "pub fn get_env(name: &'static str) -> String {\n\n env::var(name).unwrap()\n\n}\n\n\n", "file_path": "grader/src/utils.rs", "rank": 12, "score": 121868.84940810755 }, { "content": "#[test]\n\nfn should_run_go_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.go\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000021\", \"go\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 13, "score": 121015.07776738874 }, { "content": "#[test]\n\nfn should_run_python_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.py\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000011\", \"python\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 14, "score": 121015.07776738874 }, { "content": "#[test]\n\nfn should_run_java_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.java\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000023\", \"java\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 15, "score": 121015.07776738874 }, { "content": "#[test]\n\nfn should_run_cpp_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000005\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 16, "score": 121015.07776738874 }, { "content": "#[test]\n\nfn should_run_rust_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.rs\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000015\", \"rust\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 17, "score": 121015.07776738874 }, { "content": "#[test]\n\nfn should_complete_initialize_submission() {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.cpp\")).unwrap();\n\n\n\n let _submission = Submission::from(\"a_plus_b\", \"000000\", \"cpp\", &[code], None);\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 18, "score": 120288.5523822789 }, { "content": "#[test]\n\nfn should_read_log_correctly_when_ok() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let test_log = get_example_dir().join(\"etc\").join(\"log_ok.txt\");\n\n let tmp_log = get_tmp_path().join(\"test_log_ok.txt\");\n\n fs::copy(&test_log, &tmp_log).unwrap();\n\n\n\n let instance = instance! {\n\n log_file: tmp_log,\n\n memory_limit: 4000\n\n };\n\n\n\n let result = instance.get_result()?;\n\n\n\n assert_eq!(\n\n result,\n\n InstanceResult {\n\n status: RunVerdict::VerdictOK,\n\n time_usage: 0.002,\n\n memory_usage: 480,\n\n }\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 19, "score": 119904.51061395914 }, { "content": "#[test]\n\nfn should_error_if_input_path_is_wrong() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"test_input_path_is_wrong\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: base_dir.join(\"input_wrong_path\"),\n\n runner_path: base_dir.join(\"run_cpp\")\n\n };\n\n\n\n let _init_result = instance.init();\n\n assert_eq!(\n\n _init_result,\n\n Err(GraderError::InvalidIo {\n\n msg: String::from(\"No such file or directory (os error 2)\")\n\n })\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 20, "score": 119808.78972836585 }, { "content": "#[test]\n\nfn should_error_if_runner_path_is_wrong() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"test_runner_path_is_wrong\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n runner_path: base_dir.join(\"run_cpp_wrong_path\")\n\n };\n\n\n\n let _init_result = instance.init();\n\n assert_eq!(\n\n _init_result,\n\n Err(GraderError::InvalidIo {\n\n msg: String::from(\"No such file or directory (os error 2)\")\n\n })\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 21, "score": 119808.78972836585 }, { "content": "#[test]\n\nfn should_error_if_output_path_is_wrong() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"test_output_path_is_wrong\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin_wrong_path\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n runner_path: base_dir.join(\"run_cpp\")\n\n };\n\n\n\n let _init_result = instance.init();\n\n assert_eq!(\n\n _init_result,\n\n Err(GraderError::InvalidIo {\n\n msg: String::from(\"No such file or directory (os error 2)\")\n\n })\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 22, "score": 119808.78972836585 }, { "content": "#[test]\n\nfn should_run_cpp_sg_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_SG.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000009\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(_result.group_result[0].run_result[0].status, \"Signal Error\");\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(_result.group_result[1].run_result[0].status, \"Signal Error\");\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 23, "score": 118040.54860411456 }, { "content": "#[test]\n\nfn should_run_python_tle_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_TLE.py\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000012\", \"python\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Time Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Time Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 24, "score": 118040.54860411456 }, { "content": "#[test]\n\nfn should_run_python_mle_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_MLE.py\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000013\", \"python\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Memory Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Memory Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 25, "score": 118040.54860411458 }, { "content": "#[test]\n\nfn should_run_rust_tle_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_TLE.rs\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000016\", \"rust\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Time Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Time Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 26, "score": 118040.54860411458 }, { "content": "#[test]\n\nfn should_run_rust_sg_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_SG.rs\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000019\", \"rust\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(_result.group_result[0].run_result[0].status, \"Signal Error\");\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(_result.group_result[1].run_result[0].status, \"Signal Error\");\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 27, "score": 118040.54860411458 }, { "content": "#[test]\n\nfn should_run_python_re_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_RE.py\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000014\", \"python\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Runtime Error\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Runtime Error\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 28, "score": 118040.54860411456 }, { "content": "#[test]\n\nfn should_run_rust_re_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_RE.rs\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000018\", \"rust\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Runtime Error\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Runtime Error\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 29, "score": 118040.54860411456 }, { "content": "#[test]\n\nfn should_run_cpp_tle_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_TLE.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000006\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Time Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Time Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 30, "score": 118040.54860411456 }, { "content": "#[test]\n\nfn should_run_cpp_mle_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_MLE.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000007\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Memory Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Memory Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 31, "score": 118040.54860411458 }, { "content": "#[test]\n\nfn should_run_rust_mle_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_MLE.rs\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000017\", \"rust\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Memory Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Memory Limit Exceeded\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 32, "score": 118040.54860411456 }, { "content": "#[test]\n\nfn should_run_cpp_with_header_successfully() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_h.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b_h\", \"000010\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n assert_eq!(_result.score, 100.0);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 33, "score": 118040.54860411458 }, { "content": "#[test]\n\nfn should_remove_tmp_dir_after_out_of_scope() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let tmp_path;\n\n {\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000004\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n tmp_path = submission.tmp_path.clone();\n\n }\n\n\n\n assert!(!tmp_path.exists());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 34, "score": 118040.54860411458 }, { "content": "#[test]\n\nfn should_run_cpp_re_skipped() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let code = fs::read_to_string(get_example_dir().join(\"etc\").join(\"a_plus_b_RE.cpp\")).unwrap();\n\n\n\n let mut submission = Submission::from(\"a_plus_b\", \"000008\", \"cpp\", &vec![code], None)?;\n\n submission.compile()?;\n\n\n\n let _result = submission.run()?;\n\n\n\n assert_eq!(_result.score, 0.0);\n\n\n\n assert_eq!(_result.group_result[0].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[0].run_result[0].status,\n\n \"Runtime Error\"\n\n );\n\n assert_eq!(_result.group_result[0].run_result[1].status, \"\");\n\n\n\n assert_eq!(_result.group_result[1].score, 0.0);\n\n assert_eq!(\n\n _result.group_result[1].run_result[0].status,\n\n \"Runtime Error\"\n\n );\n\n assert_eq!(_result.group_result[1].run_result[1].status, \"\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/submission/tests.rs", "rank": 35, "score": 118040.54860411456 }, { "content": "pub fn get_message(status: &str) -> String {\n\n let config = load_yaml(get_base_path().join(\"scripts\").join(\"config.yaml\"));\n\n yaml_unwrap_hash(yaml_unwrap_hash(config, \"message\").unwrap(), status)\n\n .map_or(String::new(), |value| value.into_string().unwrap())\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n use crate::utils::get_env;\n\n use std::{env, fs, path::PathBuf, process::Command};\n\n\n\n pub struct TempDir(pub PathBuf);\n\n\n\n impl Drop for TempDir {\n\n fn drop(&mut self) {\n\n fs::remove_dir_all(&self.0).expect(\"Unable to remove tmp directory\");\n\n }\n\n }\n\n\n\n impl TempDir {\n", "file_path": "grader/src/utils.rs", "rank": 36, "score": 112398.05422581665 }, { "content": "struct test {\n\n test *p;\n\n};\n\n\n\nlong a, b;\n\ntest *ptr;\n\n\n\nint main() {\n\n scanf(\"%lld %lld\", &a, &b);\n\n printf(\"%lld\\n\", a + b);\n\n a = (long) (ptr->p->p);\n\n\n\n return 0;\n\n}\n", "file_path": "example/etc/a_plus_b_SG.cpp", "rank": 37, "score": 102133.51087935327 }, { "content": "#[test]\n\nfn should_get_mle() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"should_get_mle\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b_MLE.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 32,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n runner_path: get_example_dir().join(\"scripts\").join(\"runner_scripts\").join(\"cpp\")\n\n };\n\n\n\n instance.init()?;\n\n let result = instance.run()?;\n\n\n\n assert_eq!(result.status, RunVerdict::VerdictMLE);\n\n Ok(())\n\n}\n", "file_path": "grader/src/instance/tests.rs", "rank": 38, "score": 101791.56737868855 }, { "content": "#[test]\n\nfn should_get_re() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"should_get_re\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b_RE.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 1.0,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n runner_path: get_example_dir().join(\"scripts\").join(\"runner_scripts\").join(\"cpp\")\n\n };\n\n\n\n instance.init()?;\n\n let result = instance.run()?;\n\n\n\n assert_eq!(result.status, RunVerdict::VerdictRE);\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 39, "score": 101791.56737868855 }, { "content": "#[test]\n\nfn should_get_tle() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let base_dir = get_example_dir().join(\"etc\");\n\n let tmp_dir = TempDir::new(\"should_get_tle\");\n\n\n\n compile_cpp(&tmp_dir.0, &base_dir.join(\"a_plus_b_TLE.cpp\"));\n\n\n\n let mut instance = instance! {\n\n time_limit: 0.1,\n\n memory_limit: 512000,\n\n bin_path: tmp_dir.0.join(\"bin\"),\n\n input_path: get_example_dir().join(\"tasks\").join(\"a_plus_b\").join(\"testcases\").join(\"1.in\"),\n\n runner_path: get_example_dir().join(\"scripts\").join(\"runner_scripts\").join(\"cpp\")\n\n };\n\n\n\n instance.init()?;\n\n let result = instance.run()?;\n\n\n\n assert_eq!(result.status, RunVerdict::VerdictTLE);\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 40, "score": 101791.56737868855 }, { "content": "#[test]\n\nfn should_trigger_when_read_log_with_to() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let test_log = get_example_dir().join(\"etc\").join(\"log_to.txt\");\n\n let tmp_log = get_tmp_path().join(\"test_log_to.txt\");\n\n fs::copy(&test_log, &tmp_log).unwrap();\n\n\n\n let instance = instance! {\n\n log_file: tmp_log,\n\n memory_limit: 4000\n\n };\n\n\n\n let result = instance.get_result()?;\n\n\n\n assert_eq!(\n\n result,\n\n InstanceResult {\n\n status: RunVerdict::VerdictTLE,\n\n time_usage: 2.099,\n\n memory_usage: 448,\n\n }\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 41, "score": 99390.5316704294 }, { "content": "#[test]\n\nfn should_trigger_when_read_log_with_re() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let test_log = get_example_dir().join(\"etc\").join(\"log_re.txt\");\n\n let tmp_log = get_tmp_path().join(\"test_log_re.txt\");\n\n fs::copy(&test_log, &tmp_log).unwrap();\n\n\n\n let instance = instance! {\n\n log_file: tmp_log,\n\n memory_limit: 4000\n\n };\n\n\n\n let result = instance.get_result()?;\n\n\n\n assert_eq!(\n\n result,\n\n InstanceResult {\n\n status: RunVerdict::VerdictRE,\n\n time_usage: 0.002,\n\n memory_usage: 460,\n\n }\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 42, "score": 97153.20294227885 }, { "content": "#[test]\n\nfn should_trigger_when_read_log_with_xx() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let test_log = get_example_dir().join(\"etc\").join(\"log_xx.txt\");\n\n let tmp_log = get_tmp_path().join(\"test_log_xx.txt\");\n\n fs::copy(&test_log, &tmp_log).unwrap();\n\n\n\n let instance = instance! {\n\n log_file: tmp_log,\n\n memory_limit: 4000\n\n };\n\n\n\n let result = instance.get_result()?;\n\n\n\n assert_eq!(\n\n result,\n\n InstanceResult {\n\n status: RunVerdict::VerdictXX,\n\n ..Default::default()\n\n }\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 43, "score": 97153.20294227885 }, { "content": "#[test]\n\nfn should_trigger_when_read_log_with_sg() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let test_log = get_example_dir().join(\"etc\").join(\"log_sg.txt\");\n\n let tmp_log = get_tmp_path().join(\"test_log_sg.txt\");\n\n fs::copy(&test_log, &tmp_log).unwrap();\n\n\n\n let instance = instance! {\n\n log_file: tmp_log,\n\n memory_limit: 4000\n\n };\n\n\n\n let result = instance.get_result()?;\n\n\n\n assert_eq!(\n\n result,\n\n InstanceResult {\n\n status: RunVerdict::VerdictSG,\n\n time_usage: 0.006,\n\n memory_usage: 448,\n\n }\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 44, "score": 97153.20294227885 }, { "content": "#[test]\n\nfn should_trigger_when_read_log_with_mle() -> GraderResult<()> {\n\n dotenv().ok();\n\n\n\n let test_log = get_example_dir().join(\"etc\").join(\"log_mle.txt\");\n\n let tmp_log = get_tmp_path().join(\"test_log_mle.txt\");\n\n fs::copy(&test_log, &tmp_log).unwrap();\n\n\n\n let instance = instance! {\n\n log_file: tmp_log,\n\n memory_limit: 1000\n\n };\n\n\n\n let result = instance.get_result()?;\n\n\n\n assert_eq!(\n\n result,\n\n InstanceResult {\n\n status: RunVerdict::VerdictMLE,\n\n time_usage: 0.090,\n\n memory_usage: 1000,\n\n }\n\n );\n\n Ok(())\n\n}\n\n\n", "file_path": "grader/src/instance/tests.rs", "rank": 45, "score": 97153.20294227885 }, { "content": "#define long long long\n\n\n", "file_path": "example/tasks/a_plus_b_h/compile_files/code_0.h", "rank": 46, "score": 65664.06132823312 }, { "content": "long a_plus_b(long a, long b);\n", "file_path": "example/tasks/a_plus_b_h/compile_files/code_0.h", "rank": 47, "score": 65664.06132823312 }, { "content": "using namespace std;\n", "file_path": "example/tasks/a_plus_b_h/compile_files/code_0.h", "rank": 48, "score": 65664.06132823312 }, { "content": "fn yaml_unwrap_hash(yaml: Yaml, arg: &str) -> Option<Yaml> {\n\n yaml.into_hash().unwrap().remove(&Yaml::String(s!(arg)))\n\n}\n\n\n", "file_path": "grader/src/utils.rs", "rank": 49, "score": 64639.22159224562 }, { "content": "use crate::errors::{GraderError, GraderResult};\n\nuse crate::utils::load_yaml;\n\nuse std::{collections::BTreeMap, path::PathBuf};\n\n\n\n#[derive(Default, Debug)]\n\npub struct Manifest {\n\n pub task_id: String,\n\n pub output_only: bool,\n\n pub time_limit: Option<f64>,\n\n pub memory_limit: Option<u64>,\n\n pub limit: Option<BTreeMap<String, (f64, u64)>>,\n\n pub compile_files: Option<BTreeMap<String, Vec<String>>>,\n\n pub checker: Option<String>,\n\n pub grouper: Option<String>,\n\n pub groups: Vec<(u64, u64)>,\n\n}\n\n\n\nimpl Manifest {\n\n pub fn from(path: PathBuf) -> GraderResult<Self> {\n\n let yaml = load_yaml(path);\n", "file_path": "grader/src/submission/manifest.rs", "rank": 50, "score": 54760.089322843174 }, { "content": " limit[\"time_limit\"]\n\n .as_f64()\n\n .ok_or(GraderError::invalid_value())?,\n\n limit[\"memory_limit\"]\n\n .as_i64()\n\n .ok_or(GraderError::invalid_value())?\n\n as u64,\n\n ),\n\n ))\n\n })\n\n .collect::<GraderResult<BTreeMap<_, _>>>()?)\n\n })\n\n .transpose()?,\n\n compile_files: yaml[\"compile_files\"]\n\n .as_hash()\n\n .map(|compile_files| {\n\n compile_files\n\n .iter()\n\n .map(|(language, files)| -> GraderResult<(String, Vec<String>)> {\n\n Ok((\n", "file_path": "grader/src/submission/manifest.rs", "rank": 51, "score": 54758.305239062785 }, { "content": " language\n\n .as_str()\n\n .ok_or(GraderError::invalid_value())?\n\n .to_owned(),\n\n files\n\n .as_vec()\n\n .ok_or(GraderError::invalid_value())?\n\n .iter()\n\n .map(|file| {\n\n Ok(file\n\n .as_str()\n\n .ok_or(GraderError::invalid_value())?\n\n .to_owned())\n\n })\n\n .collect::<GraderResult<Vec<_>>>()?,\n\n ))\n\n })\n\n .collect()\n\n })\n\n .transpose()?,\n", "file_path": "grader/src/submission/manifest.rs", "rank": 52, "score": 54751.598665772864 }, { "content": " Ok(Manifest {\n\n task_id: yaml[\"task_id\"]\n\n .as_str()\n\n .ok_or(GraderError::invalid_value())?\n\n .to_owned(),\n\n output_only: yaml[\"output_only\"].as_bool().unwrap_or(false),\n\n time_limit: yaml[\"time_limit\"].as_f64(),\n\n memory_limit: yaml[\"memory_limit\"].as_i64().map(|limit| limit as u64),\n\n limit: yaml[\"limit\"]\n\n .as_hash()\n\n .map(|limits| -> GraderResult<BTreeMap<String, (f64, u64)>> {\n\n Ok(limits\n\n .iter()\n\n .map(|(language, limit)| {\n\n Ok((\n\n language\n\n .as_str()\n\n .ok_or(GraderError::invalid_value())?\n\n .to_owned(),\n\n (\n", "file_path": "grader/src/submission/manifest.rs", "rank": 53, "score": 54750.87484987572 }, { "content": " checker: yaml[\"checker\"].as_str().map(|checker| checker.to_owned()),\n\n grouper: yaml[\"grouper\"].as_str().map(|grouper| grouper.to_owned()),\n\n groups: yaml[\"groups\"]\n\n .as_vec()\n\n .map(|groups| {\n\n groups\n\n .iter()\n\n .map(|group| {\n\n Ok((\n\n group[\"full_score\"]\n\n .as_i64()\n\n .ok_or(GraderError::invalid_value())?\n\n as u64,\n\n group[\"tests\"]\n\n .as_i64()\n\n .ok_or(GraderError::invalid_value())?\n\n as u64,\n\n ))\n\n })\n\n .collect::<GraderResult<Vec<_>>>()\n\n })\n\n .ok_or(GraderError::invalid_value())??,\n\n })\n\n }\n\n}\n", "file_path": "grader/src/submission/manifest.rs", "rank": 54, "score": 54750.308124731135 }, { "content": " }\n\n}\n\n\n\n#[derive(Default, Debug, Clone)]\n\npub struct GroupResult {\n\n pub score: f64,\n\n pub full_score: u64,\n\n pub submission_id: String,\n\n pub group_index: u64,\n\n pub run_result: Vec<RunResult>,\n\n}\n\n\n\nimpl GroupResult {\n\n pub fn from(full_score: u64, submission_id: String, index: u64) -> Self {\n\n GroupResult {\n\n full_score,\n\n submission_id,\n\n group_index: index,\n\n ..Default::default()\n\n }\n", "file_path": "grader/src/submission/result.rs", "rank": 72, "score": 53134.904030461425 }, { "content": "#[derive(Default, Debug, PartialEq, Clone)]\n\npub struct RunResult {\n\n pub submission_id: String,\n\n pub test_index: u64,\n\n pub status: String,\n\n pub time_usage: f64,\n\n pub memory_usage: u64,\n\n pub score: f64,\n\n pub message: String,\n\n}\n\n\n\nimpl RunResult {\n\n pub fn from(submission_id: String, index: u64, time_usage: f64, memory_usage: u64) -> Self {\n\n RunResult {\n\n submission_id,\n\n test_index: index,\n\n time_usage,\n\n memory_usage,\n\n ..Default::default()\n\n }\n", "file_path": "grader/src/submission/result.rs", "rank": 73, "score": 53134.52975409814 }, { "content": " }\n\n}\n\n\n\n#[derive(Default, Debug, Clone)]\n\npub struct SubmissionResult {\n\n pub score: f64,\n\n pub full_score: u64,\n\n pub submission_id: String,\n\n pub group_result: Vec<GroupResult>,\n\n}\n", "file_path": "grader/src/submission/result.rs", "rank": 74, "score": 53133.71078822168 }, { "content": "use super::*;\n\n\n\nuse crate::errors::GraderResult;\n\nuse crate::utils::tests::get_example_dir;\n\nuse dotenv::dotenv;\n\nuse std::fs;\n\n\n\n#[test]\n", "file_path": "grader/src/submission/tests.rs", "rank": 75, "score": 53074.85375808791 }, { "content": "struct Reader {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n x: *mut Stdin,\n\n q: StdinLock<'static>,\n\n} //'\n\n\n\n#[allow(dead_code)]\n\nimpl Reader {\n\n fn new() -> Self {\n\n let x = Box::into_raw(Box::new(stdin()));\n\n let q = unsafe { &*x }.lock();\n\n Self {\n\n x,\n\n q,\n\n buf: v!([]),\n\n pos: 0,\n\n }\n\n }\n\n\n", "file_path": "example/etc/a_plus_b.rs", "rank": 76, "score": 52807.654067653835 }, { "content": "#include \"code_0.h\"\n\n#include <bits/stdc++.h>\n\n\n\n#define long long long\n\n\n\nusing namespace std;\n\n\n\nlong a, b;\n\n\n\nint main() {\n\n scanf(\"%lld %lld\", &a, &b);\n\n printf(\"%lld\\n\", a_plus_b(a, b));\n\n\n\n return 0;\n\n}\n", "file_path": "example/tasks/a_plus_b_h/compile_files/grader.cpp", "rank": 77, "score": 52358.71206845846 }, { "content": "struct Reader {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n x: *mut Stdin,\n\n q: StdinLock<'static>,\n\n} //'\n\n\n\n#[allow(dead_code)]\n\nimpl Reader {\n\n fn new() -> Self {\n\n let x = Box::into_raw(Box::new(stdin()));\n\n let q = unsafe { &*x }.lock();\n\n Self {\n\n x,\n\n q,\n\n buf: v!([]),\n\n pos: 0,\n\n }\n\n }\n\n\n", "file_path": "example/etc/a_plus_b_RE.rs", "rank": 78, "score": 51638.82492345066 }, { "content": "struct Reader {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n x: *mut Stdin,\n\n q: StdinLock<'static>,\n\n} //'\n\n\n\n#[allow(dead_code)]\n\nimpl Reader {\n\n fn new() -> Self {\n\n let x = Box::into_raw(Box::new(stdin()));\n\n let q = unsafe { &*x }.lock();\n\n Self {\n\n x,\n\n q,\n\n buf: v!([]),\n\n pos: 0,\n\n }\n\n }\n\n\n", "file_path": "example/etc/a_plus_b_MLE.rs", "rank": 79, "score": 51638.82492345066 }, { "content": "struct Reader {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n x: *mut Stdin,\n\n q: StdinLock<'static>,\n\n} //'\n\n\n\n#[allow(dead_code)]\n\nimpl Reader {\n\n fn new() -> Self {\n\n let x = Box::into_raw(Box::new(stdin()));\n\n let q = unsafe { &*x }.lock();\n\n Self {\n\n x,\n\n q,\n\n buf: v!([]),\n\n pos: 0,\n\n }\n\n }\n\n\n", "file_path": "example/etc/a_plus_b_SG.rs", "rank": 80, "score": 51638.82492345066 }, { "content": "struct Reader {\n\n buf: Vec<u8>,\n\n pos: usize,\n\n x: *mut Stdin,\n\n q: StdinLock<'static>,\n\n} //'\n\n\n\n#[allow(dead_code)]\n\nimpl Reader {\n\n fn new() -> Self {\n\n let x = Box::into_raw(Box::new(stdin()));\n\n let q = unsafe { &*x }.lock();\n\n Self {\n\n x,\n\n q,\n\n buf: v!([]),\n\n pos: 0,\n\n }\n\n }\n\n\n", "file_path": "example/etc/a_plus_b_TLE.rs", "rank": 81, "score": 51638.82492345066 }, { "content": "fn main() {\n\n let (mut rin, mut rout) = rio();\n\n l!(a, b = rin.u());\n\n writeln!(rout, \"{}\", a + b).unwrap();\n\n}\n", "file_path": "example/etc/a_plus_b.rs", "rank": 82, "score": 43260.375523017305 }, { "content": "fn main() {\n\n let (mut rin, mut rout) = rio();\n\n l!(a, b = rin.u());\n\n writeln!(rout, \"{}\", a + b).unwrap();\n\n let mut vec = vec![];\n\n for i in 0..10000000 {\n\n vec.push(i);\n\n }\n\n}\n", "file_path": "example/etc/a_plus_b_MLE.rs", "rank": 83, "score": 42190.95007319364 }, { "content": "fn main() {\n\n let (mut rin, mut rout) = rio();\n\n l!(a, b = rin.u());\n\n writeln!(rout, \"{}\", a + b).unwrap();\n\n unsafe { std::ptr::null_mut::<i32>().write(42) };\n\n}\n", "file_path": "example/etc/a_plus_b_SG.rs", "rank": 84, "score": 42190.95007319364 }, { "content": "fn main() {\n\n let (mut rin, mut rout) = rio();\n\n l!(a, b = rin.u());\n\n writeln!(rout, \"{}\", a + b).unwrap();\n\n while true {\n\n \n\n }\n\n}\n", "file_path": "example/etc/a_plus_b_TLE.rs", "rank": 85, "score": 42190.95007319364 }, { "content": "fn main() {\n\n let (mut rin, mut rout) = rio();\n\n l!(a, b = rin.u());\n\n writeln!(rout, \"{}\", a + b).unwrap();\n\n panic!();\n\n}\n", "file_path": "example/etc/a_plus_b_RE.rs", "rank": 86, "score": 42190.95007319364 }, { "content": "fn rio() -> (Reader, BufWriter<Stdout>) {\n\n (Reader::new(), BufWriter::new(stdout()))\n\n}\n\n\n", "file_path": "example/etc/a_plus_b.rs", "rank": 87, "score": 34722.58354301366 }, { "content": "fn rio() -> (Reader, BufWriter<Stdout>) {\n\n (Reader::new(), BufWriter::new(stdout()))\n\n}\n\n\n", "file_path": "example/etc/a_plus_b_SG.rs", "rank": 88, "score": 33938.542789986546 }, { "content": "fn rio() -> (Reader, BufWriter<Stdout>) {\n\n (Reader::new(), BufWriter::new(stdout()))\n\n}\n\n\n", "file_path": "example/etc/a_plus_b_RE.rs", "rank": 89, "score": 33938.542789986546 }, { "content": "fn rio() -> (Reader, BufWriter<Stdout>) {\n\n (Reader::new(), BufWriter::new(stdout()))\n\n}\n\n\n", "file_path": "example/etc/a_plus_b_TLE.rs", "rank": 90, "score": 33938.542789986546 }, { "content": "fn rio() -> (Reader, BufWriter<Stdout>) {\n\n (Reader::new(), BufWriter::new(stdout()))\n\n}\n\n\n", "file_path": "example/etc/a_plus_b_MLE.rs", "rank": 91, "score": 33938.542789986546 }, { "content": "\n\n pub fn run(&self) -> GraderResult<InstanceResult> {\n\n let args = self.get_run_arguments()?;\n\n Command::new(get_env(\"ISOLATE_PATH\")).args(args).output()?;\n\n\n\n let result = self.get_result()?;\n\n if result.status == RunVerdict::VerdictOK {\n\n fs::copy(&self.box_path.join(\"output\"), &self.output_path)?;\n\n }\n\n Ok(result)\n\n }\n\n}\n\n\n\nimpl Drop for Instance {\n\n fn drop(&mut self) {\n\n Command::new(get_env(\"ISOLATE_PATH\"))\n\n .args(&[\"--cleanup\", \"--cg\", \"-b\"])\n\n .arg(self.box_id.to_string())\n\n .output();\n\n\n\n if self.log_file.is_file() {\n\n fs::remove_file(&self.log_file);\n\n }\n\n }\n\n}\n", "file_path": "grader/src/instance/mod.rs", "rank": 92, "score": 27607.63894503703 }, { "content": "use crate::combine_argument;\n\nuse crate::errors::{GraderError, GraderResult};\n\nuse crate::utils::get_env;\n\nuse std::{fs, path::PathBuf, process::Command};\n\n\n\n#[cfg(test)]\n\nmod tests;\n\n\n\n/// Instance define a single test case to run in isolated environment\n\n#[derive(Default, Debug)]\n\npub struct Instance {\n\n pub box_path: PathBuf,\n\n pub log_file: PathBuf,\n\n pub box_id: u64,\n\n pub bin_path: PathBuf,\n\n pub time_limit: f64,\n\n pub memory_limit: u64,\n\n pub input_path: PathBuf,\n\n pub output_path: PathBuf,\n\n pub runner_path: PathBuf,\n", "file_path": "grader/src/instance/mod.rs", "rank": 93, "score": 27605.371226591637 }, { "content": " if memory_limit_exceeded\n\n || result.memory_usage >= self.memory_limit && result.status == Default::default()\n\n {\n\n result.status = RunVerdict::VerdictMLE;\n\n }\n\n Ok(result)\n\n }\n\n\n\n pub fn init(&mut self) -> GraderResult<()> {\n\n for tmp_box_idx in 1..=1000 {\n\n let box_path = Command::new(get_env(\"ISOLATE_PATH\"))\n\n .args(&[\"--init\", \"--cg\", \"-b\"])\n\n .arg(tmp_box_idx.to_string())\n\n .output()?;\n\n\n\n if box_path.status.success() {\n\n let box_path = String::from_utf8(box_path.stdout)?;\n\n self.box_path = PathBuf::from(box_path.trim_end_matches('\\n')).join(\"box\");\n\n self.box_id = tmp_box_idx;\n\n break;\n", "file_path": "grader/src/instance/mod.rs", "rank": 94, "score": 27604.838020507887 }, { "content": " pub status: RunVerdict,\n\n pub time_usage: f64,\n\n pub memory_usage: u64,\n\n}\n\n\n\nimpl Instance {\n\n fn get_run_arguments(&self) -> GraderResult<Vec<String>> {\n\n Ok(combine_argument![\n\n \"-b\",\n\n self.box_id.to_string(),\n\n \"-M\",\n\n self.log_file\n\n .to_str()\n\n .ok_or(GraderError::invalid_to_str())?\n\n .to_string(),\n\n \"-t\",\n\n self.time_limit.to_string(),\n\n \"-w\",\n\n (self.time_limit + 5.0).to_string(),\n\n \"-x\",\n", "file_path": "grader/src/instance/mod.rs", "rank": 95, "score": 27604.42066937389 }, { "content": "}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum RunVerdict {\n\n VerdictOK,\n\n VerdictTLE,\n\n VerdictMLE,\n\n VerdictRE,\n\n VerdictXX,\n\n VerdictSG,\n\n}\n\n\n\nimpl Default for RunVerdict {\n\n fn default() -> Self {\n\n Self::VerdictOK\n\n }\n\n}\n\n\n\n#[derive(Default, PartialEq, Debug)]\n\npub struct InstanceResult {\n", "file_path": "grader/src/instance/mod.rs", "rank": 96, "score": 27603.447593664005 }, { "content": " (self.time_limit + 1.0).to_string(),\n\n \"-i\",\n\n \"input\",\n\n \"-o\",\n\n \"output\",\n\n \"--processes=128\",\n\n \"--cg\",\n\n \"--cg-timing\",\n\n format!(\"--cg-mem={}\", self.memory_limit),\n\n format!(\"--dir={}\", get_env(\"ALTERNATIVE_PATH\")),\n\n \"--run\",\n\n \"--\",\n\n \"runner\"\n\n ])\n\n }\n\n\n\n pub fn get_result(&self) -> GraderResult<InstanceResult> {\n\n let log_content = fs::read_to_string(&self.log_file)?;\n\n let mut result: InstanceResult = Default::default();\n\n let mut memory_limit_exceeded = false;\n", "file_path": "grader/src/instance/mod.rs", "rank": 97, "score": 27603.310901708126 }, { "content": " for log_line in log_content.lines() {\n\n let args: Vec<&str> = log_line.split(':').collect();\n\n if args.len() >= 2 {\n\n match &*args[0] {\n\n \"status\" => {\n\n result.status = match &*args[1] {\n\n \"RE\" => RunVerdict::VerdictRE,\n\n \"SG\" => RunVerdict::VerdictSG,\n\n \"TO\" => RunVerdict::VerdictTLE,\n\n \"XX\" => RunVerdict::VerdictXX,\n\n _ => RunVerdict::VerdictSG,\n\n }\n\n }\n\n \"time\" => result.time_usage = args[1].parse()?,\n\n \"cg-mem\" => result.memory_usage = args[1].parse()?,\n\n \"cg-oom-killed\" => memory_limit_exceeded = args[1].trim() == \"1\",\n\n _ => (),\n\n }\n\n }\n\n }\n", "file_path": "grader/src/instance/mod.rs", "rank": 98, "score": 27601.439335962335 }, { "content": " }\n\n }\n\n\n\n let tmp_path = get_env(\"TEMPORARY_PATH\");\n\n self.log_file = PathBuf::from(tmp_path).join(format!(\"tmp_log_{}.txt\", self.box_id));\n\n\n\n fs::copy(&self.input_path, &self.box_path.join(\"input\"))?;\n\n\n\n fs::copy(\n\n &self.bin_path,\n\n &self.box_path.join(\n\n self.bin_path\n\n .file_name()\n\n .ok_or(GraderError::invalid_to_str())?,\n\n ),\n\n )?;\n\n\n\n fs::copy(&self.runner_path, &self.box_path.join(\"runner\"))?;\n\n Ok(())\n\n }\n", "file_path": "grader/src/instance/mod.rs", "rank": 99, "score": 27598.022167928935 } ]
Rust
api/swim_form_derive/src/structural/mod.rs
swimos/swim-rust
f1a2be7bb4eb8f38e6ecc19bba4a8c876016183c
use crate::structural::model::enumeration::{EnumDef, EnumModel, SegregatedEnumModel}; use crate::structural::model::record::{SegregatedStructModel, StructDef, StructModel}; use crate::structural::model::StructLike; use crate::structural::model::ValidateFrom; use crate::structural::read::DeriveStructuralReadable; use crate::structural::write::DeriveStructuralWritable; use proc_macro2::TokenStream; use quote::ToTokens; use swim_utilities::errors::Errors; use syn::{Data, DeriveInput, Generics}; pub mod model; pub mod read; pub mod write; pub fn build_derive_structural_form(input: DeriveInput) -> Result<TokenStream, Errors<syn::Error>> { match &input.data { Data::Struct(ds) => { let def = StructDef::new(&input.ident, &input, &input.attrs, ds); struct_derive_structural_form(def, &input.generics) } Data::Enum(de) => { let def = EnumDef::new(&input.ident, &input, &input.attrs, de); enum_derive_structural_form(def, &input.generics) } _ => Err(Errors::of(syn::Error::new_spanned( input, "Union types are not supported.", ))), } } pub fn build_derive_structural_writable( input: DeriveInput, ) -> Result<TokenStream, Errors<syn::Error>> { match &input.data { Data::Struct(ds) => { let def = StructDef::new(&input.ident, &input, &input.attrs, ds); struct_derive_structural_writable(def, &input.generics) } Data::Enum(de) => { let def = EnumDef::new(&input.ident, &input, &input.attrs, de); enum_derive_structural_writable(def, &input.generics) } _ => Err(Errors::of(syn::Error::new_spanned( input, "Union types are not supported.", ))), } } fn struct_derive_structural_writable<'a, Flds: StructLike>( input: StructDef<'a, Flds>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = StructModel::validate(input).into_result()?; let segregated = SegregatedStructModel::from(&model); let derive = DeriveStructuralWritable(segregated, generics); Ok(derive.into_token_stream()) } fn struct_derive_structural_form<'a, Flds: StructLike>( input: StructDef<'a, Flds>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = StructModel::validate(input).into_result()?; let segregated = SegregatedStructModel::from(&model); let derive_writable = DeriveStructuralWritable(segregated.clone(), generics); let derive_readable = DeriveStructuralReadable(segregated, generics); Ok(quote! { #derive_writable #derive_readable }) } fn enum_derive_structural_form<'a>( input: EnumDef<'a>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = EnumModel::validate(input).into_result()?; let segregated = SegregatedEnumModel::from(&model); let derive_writable = DeriveStructuralWritable(segregated.clone(), generics); let derive_readable = DeriveStructuralReadable(segregated, generics); Ok(quote! { #derive_writable #derive_readable }) } fn enum_derive_structural_writable<'a>( input: EnumDef<'a>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = EnumModel::validate(input).into_result()?; let segregated = SegregatedEnumModel::from(&model); let derive = DeriveStructuralWritable(segregated, generics); Ok(derive.into_token_stream()) } pub fn build_derive_structural_readable( input: DeriveInput, ) -> Result<TokenStream, Errors<syn::Error>> { match &input.data { Data::Struct(ds) => { let def = StructDef::new(&input.ident, &input, &input.attrs, ds); struct_derive_structural_readable(def, &input.generics) } Data::Enum(de) => { let def = EnumDef::new(&input.ident, &input, &input.attrs, de); enum_derive_structural_readable(def, &input.generics) } _ => Err(Errors::of(syn::Error::new_spanned( input, "Union types are not supported.", ))), } } fn struct_derive_structural_readable<Flds: StructLike>( input: StructDef<'_, Flds>, generics: &Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = StructModel::validate(input).into_result()?; let segregated = SegregatedStructModel::from(&model); let derive = DeriveStructuralReadable(segregated, generics); Ok(derive.into_token_stream()) } fn enum_derive_structural_readable( input: EnumDef<'_>, generics: &Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = EnumModel::validate(input).into_result()?; let segregated = SegregatedEnumModel::from(&model); let derive = DeriveStructuralReadable(segregated, generics); Ok(derive.into_token_stream()) } fn add_bounds(original: &Generics, generics: &mut Generics, bound: syn::TraitBound) { let bounds = original.type_params().map(|param| { let id = &param.ident; parse_quote!(#id: #bound) }); let where_clause = generics.make_where_clause(); for bound in bounds.into_iter() { where_clause.predicates.push(bound); } }
use crate::structural::model::enumeration::{EnumDef, EnumModel, SegregatedEnumModel}; use crate::structural::model::record::{SegregatedStructModel, StructDef, StructModel}; use crate::structural::model::StructLike; use crate::structural::model::ValidateFrom; use crate::structural::read::DeriveStructuralReadable; use crate::structural::write::DeriveStructuralWritable; use proc_macro2::TokenStream; use quote::ToTokens; use swim_utilities::errors::Errors; use syn::{Data, DeriveInput, Generics}; pub mod model; pub mod read; pub mod write; pub fn build_derive_structural_form(input: DeriveInput) -> Result<TokenStream, Errors<syn::Error>> { match &input.data { Data::Struct(ds) => { let def = StructDef::new(&input.ident, &input, &input.attrs, ds); struct_derive_structural_form(def, &input.generics) } Data::Enum(de) => { let def = EnumDef::new(&input.ident, &input, &input.attrs, de); enum_derive_structural_form(def, &input.generics) } _ => Err(Errors::of(syn::Error::new_spanned( input, "Union types are not supported.", ))), } } pub fn build_derive_structural_writable( input: DeriveInput, ) -> Result<TokenStream, Errors<syn::Error>> { match &input.data { Data::Struct(ds) => { let def = StructDef::new(&input.ident, &input, &input.attrs, ds); struct_derive_structural_writable(def, &input.generics) } Data::Enum(de) => { let def = EnumDef::new(&input.ident, &input, &input.attrs, de); enum_derive_structural_writable(def, &input.generics) } _ => Err(Errors::of(syn::Error::new_spanned( input, "Union types are not supported.", ))), } } fn struct_derive_structural_writable<'a, Flds: StructLike>( input: StructDef<'a, Flds>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = StructModel::validate(input).into_result()?; let segregated = SegregatedStructModel::from(&model); let derive = DeriveStructuralWritable(segregated, generics); Ok(derive.into_token_stream()) } fn struct_derive_structural_form<'a, Flds: StructLike>( input: StructDef<'a, Flds>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = StructModel::validate(input).into_result()?; let segregated = SegregatedStructModel::from(&model); let derive_writable = DeriveStructuralWritable(segregated.clone(), generics); let derive_readable = DeriveStructuralReadable(segregated, generics); Ok(quote! { #derive_writable #derive_readable }) } fn enum_derive_structural_form<'a>( input: EnumDef<'a>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = EnumModel::validate(input).into_result()?; let segregated = SegregatedEnumModel::from(&model); let derive_writable = DeriveStructuralWritable(segregated.clone(), generics); let derive_readable = DeriveStructuralReadable(segregated, generics); Ok(quote! { #derive_writable #derive_readable }) } fn enum_derive_structural_writable<'a>( input: EnumDef<'a>, generics: &'a Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = EnumModel::validate(input).into_result()?; let segregated = SegregatedEnumModel::from(&model); let derive = DeriveStructuralWritable(segregated, generics); Ok(derive.into_token_stream()) } pub fn build_derive_structural_readable( input: DeriveInput, ) -> Result<TokenStream, Errors<syn::Error>> { match &input.data { Data::Struct(ds) => { let def = StructDef::new(&input.ident, &input, &input.attrs, ds); struct_derive_structural_readable(def, &input.generics) } Data::Enum(de) => { let def = EnumDef::new(&input.ident, &input, &input.attrs, de); enum_derive_structural_readable(def, &input.generics) } _ => Err(Errors::of(syn::Error::new_spanned( input, "Union types are not supported.", ))), } } fn struct_derive_structural_readable<Flds: StructLike>( input: StructDef<'_, Flds>, generics: &Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = StructModel:
nerics, generics: &mut Generics, bound: syn::TraitBound) { let bounds = original.type_params().map(|param| { let id = &param.ident; parse_quote!(#id: #bound) }); let where_clause = generics.make_where_clause(); for bound in bounds.into_iter() { where_clause.predicates.push(bound); } }
:validate(input).into_result()?; let segregated = SegregatedStructModel::from(&model); let derive = DeriveStructuralReadable(segregated, generics); Ok(derive.into_token_stream()) } fn enum_derive_structural_readable( input: EnumDef<'_>, generics: &Generics, ) -> Result<TokenStream, Errors<syn::Error>> { let model = EnumModel::validate(input).into_result()?; let segregated = SegregatedEnumModel::from(&model); let derive = DeriveStructuralReadable(segregated, generics); Ok(derive.into_token_stream()) } fn add_bounds(original: &Ge
random
[ { "content": "fn recognize_item(input: ReadEvent<'_>) -> ItemEvent {\n\n match input {\n\n ReadEvent::Extant => ItemEvent::Primitive(Value::Extant),\n\n ReadEvent::Number(NumericValue::Int(n)) => {\n\n ItemEvent::Primitive(if let Ok(m) = i32::try_from(n) {\n\n Value::Int32Value(m)\n\n } else {\n\n Value::Int64Value(n)\n\n })\n\n }\n\n ReadEvent::Number(NumericValue::UInt(n)) => {\n\n ItemEvent::Primitive(if let Ok(m) = i32::try_from(n) {\n\n Value::Int32Value(m)\n\n } else if let Ok(m) = i64::try_from(n) {\n\n Value::Int64Value(m)\n\n } else if let Ok(m) = u32::try_from(n) {\n\n Value::UInt32Value(m)\n\n } else {\n\n Value::UInt64Value(n)\n\n })\n", "file_path": "api/swim_form/src/structural/read/from_model/mod.rs", "rank": 1, "score": 338020.2756923876 }, { "content": "pub fn build_derive_tag(input: syn::DeriveInput) -> Result<TokenStream, Errors<syn::Error>> {\n\n match &input.data {\n\n syn::Data::Enum(enum_ty) => {\n\n if enum_ty.variants.iter().any(|var| !var.fields.is_empty()) {\n\n Err(Errors::of(syn::Error::new_spanned(\n\n input,\n\n ENUM_WITH_FIELDS_ERR,\n\n )))\n\n } else {\n\n let validated = enum_ty\n\n .variants\n\n .iter()\n\n .validate_collect(true, |v| {\n\n let rename = crate::modifiers::fold_attr_meta(\n\n FORM_PATH,\n\n v.attrs.iter(),\n\n None,\n\n crate::modifiers::acc_rename,\n\n );\n\n rename.map(move |rename| (&v.ident, rename))\n", "file_path": "api/swim_form_derive/src/tag/mod.rs", "rank": 2, "score": 337764.9734215977 }, { "content": "fn num_attributes<'a>(model: &'a SegregatedStructModel<'a>) -> TokenStream {\n\n let base_attrs = model.fields.header.attributes.len() + 1;\n\n if let BodyFields::ReplacedBody(fld) = model.fields.body {\n\n let body_fld = match &fld.selector {\n\n FieldSelector::Named(id) => quote!(&self.#id),\n\n FieldSelector::Ordinal(i) => {\n\n let idx = syn::Index::from(*i);\n\n quote!(&self.#idx)\n\n }\n\n };\n\n quote!(#base_attrs + swim_form::structural::write::StructuralWritable::num_attributes(#body_fld))\n\n } else {\n\n quote!(#base_attrs)\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 3, "score": 335446.53555878333 }, { "content": "pub fn validate_input_ast(input_ast: &DeriveInput, ty: InputAstType) -> Result<(), InputAstError> {\n\n match input_ast.data {\n\n Data::Enum(_) => Err(InputAstError::Enum(ty, input_ast.ident.span())),\n\n Data::Union(_) => Err(InputAstError::Union(ty, input_ast.ident.span())),\n\n _ => {\n\n if !input_ast.generics.params.is_empty() {\n\n Err(InputAstError::Generic(ty, input_ast.ident.span()))\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "swim_server/agent_derive/src/utils.rs", "rank": 4, "score": 327872.52262785117 }, { "content": "pub fn derive_swim_agent(input: DeriveInput) -> Result<TokenStream, TokenStream> {\n\n if let Err(error) = validate_input_ast(&input, InputAstType::Agent) {\n\n return Err(TokenStream::from(quote! {#error}));\n\n }\n\n\n\n let args = match SwimAgentAttrs::from_derive_input(&input) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return Err(TokenStream::from(e.write_errors()));\n\n }\n\n };\n\n\n\n let (agent_name, config_type, agent_fields) = get_agent_data(args)?;\n\n\n\n let lanes = agent_fields\n\n .iter()\n\n .map(|agent_field| &agent_field.lane_name);\n\n let tasks = agent_fields\n\n .iter()\n\n .map(|agent_field| &agent_field.task_name);\n", "file_path": "swim_server/agent_derive/src/agent.rs", "rank": 5, "score": 326152.20578414295 }, { "content": "pub fn derive_agent_lifecycle(args: AttributeArgs, input: DeriveInput) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args = match AgentAttrs::from_list(&args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input.ident.clone();\n\n let agent_name = &args.agent;\n\n let on_start_callback =\n\n parse_callback(&args.on_start, lifecycle_name.clone(), CallbackKind::Start);\n\n\n\n let start_body = match on_start_callback {\n\n Callback::Default { .. } => {\n\n quote! {ready(()).boxed()}\n", "file_path": "swim_server/agent_derive/src/agent.rs", "rank": 6, "score": 326152.20578414295 }, { "content": "fn num_attributes_case<'a>(model: &'a SegregatedStructModel<'a>, by_ref: bool) -> TokenStream {\n\n let base_attrs = model.fields.header.attributes.len() + 1;\n\n if let BodyFields::ReplacedBody(fld) = model.fields.body {\n\n let name = &fld.selector;\n\n let body_fld = if by_ref {\n\n quote!(#name)\n\n } else {\n\n quote!(&#name)\n\n };\n\n quote!(#base_attrs + swim_form::structural::write::StructuralWritable::num_attributes(#body_fld))\n\n } else {\n\n quote!(#base_attrs)\n\n }\n\n}\n\n\n\npub struct NumAttrsEnum<'a>(&'a SegregatedEnumModel<'a>);\n\n\n\nimpl<'a> ToTokens for NumAttrsEnum<'a> {\n\n fn to_tokens(&self, tokens: &mut TokenStream) {\n\n let NumAttrsEnum(SegregatedEnumModel { inner, variants }) = self;\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 7, "score": 315142.1984915576 }, { "content": "#[proc_macro_derive(Form, attributes(form))]\n\npub fn derive_form(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n build_derive_structural_form(input)\n\n .unwrap_or_else(errs_to_compile_errors)\n\n .into()\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/lib.rs", "rank": 8, "score": 298092.27690646786 }, { "content": "#[proc_macro_derive(Tag, attributes(form))]\n\npub fn derive_tag(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n build_derive_tag(input)\n\n .unwrap_or_else(errs_to_compile_errors)\n\n .into()\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/lib.rs", "rank": 9, "score": 298092.27690646786 }, { "content": "struct WriteWithFn<'a>(&'a SegregatedStructModel<'a>);\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 10, "score": 294645.5866819885 }, { "content": "struct WriteIntoFn<'a>(&'a SegregatedStructModel<'a>);\n\n\n\nimpl<'a> ToTokens for DeriveStructuralWritable<'a, SegregatedEnumModel<'a>> {\n\n fn to_tokens(&self, tokens: &mut TokenStream) {\n\n let DeriveStructuralWritable(model, generics) = self;\n\n let SegregatedEnumModel { inner, variants } = model;\n\n let EnumModel { name, .. } = inner;\n\n let writer_trait = make_writer_trait();\n\n\n\n let mut new_generics = (*generics).clone();\n\n super::add_bounds(\n\n *generics,\n\n &mut new_generics,\n\n parse_quote!(swim_form::structural::write::StructuralWritable),\n\n );\n\n\n\n let (impl_lst, ty_params, where_clause) = new_generics.split_for_impl();\n\n\n\n let impl_block = if variants.is_empty() {\n\n quote! {\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 11, "score": 294645.58668198844 }, { "content": "#[proc_macro_derive(ValueSchema, attributes(form))]\n\npub fn derive_validated_form(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n build_validated_form(input)\n\n .unwrap_or_else(to_compile_errors)\n\n .into()\n\n}\n", "file_path": "api/swim_schema_derive/src/lib.rs", "rank": 12, "score": 294598.5338348759 }, { "content": "#[proc_macro_derive(StructuralReadable, attributes(form))]\n\npub fn derive_structural_readable(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n build_derive_structural_readable(input)\n\n .unwrap_or_else(errs_to_compile_errors)\n\n .into()\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/lib.rs", "rank": 13, "score": 294598.5338348759 }, { "content": "#[proc_macro_derive(StructuralWritable, attributes(form))]\n\npub fn derive_structural_writable(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n build_derive_structural_writable(input)\n\n .unwrap_or_else(errs_to_compile_errors)\n\n .into()\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/lib.rs", "rank": 14, "score": 294598.5338348759 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn derive_lane(\n\n trait_name: &str,\n\n typ: Ident,\n\n gen_lifecycle: bool,\n\n task_name: Ident,\n\n agent_name: Ident,\n\n input_ast: DeriveInput,\n\n lane_type: TokenStream,\n\n item_type: TokenStream,\n\n lane_tasks_impl: LaneTasksImpl,\n\n imports: TokenStream,\n\n field: Option<TokenStream>,\n\n lane_kind: TokenStream,\n\n) -> proc_macro::TokenStream {\n\n let public_derived = quote! {\n\n #input_ast\n\n\n\n struct #task_name<T, S>\n\n where\n\n T: core::ops::Fn(&#agent_name) -> &#lane_type + Send + Sync + 'static,\n", "file_path": "swim_server/agent_derive/src/lanes/mod.rs", "rank": 15, "score": 291716.9101274343 }, { "content": "pub fn derive_command_lifecycle(attr_args: AttributeArgs, input_ast: DeriveInput) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input_ast, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args = match CommandAttrs::from_list(&attr_args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input_ast.ident.clone();\n\n let gen_lifecycle = args\n\n .gen_lifecycle\n\n .unwrap_or_else(|| !has_fields(&input_ast.data));\n\n let task_name = get_task_struct_name(&input_ast.ident.to_string());\n\n let agent_name = args.agent.clone();\n\n let command_type = &args.command_type;\n\n let on_command_callback =\n", "file_path": "swim_server/agent_derive/src/lanes/command.rs", "rank": 16, "score": 286991.39939394966 }, { "content": "pub fn derive_demand_lifecycle(attr_args: AttributeArgs, input_ast: DeriveInput) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input_ast, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args = match DemandAttrs::from_list(&attr_args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input_ast.ident.clone();\n\n let gen_lifecycle = args\n\n .gen_lifecycle\n\n .unwrap_or_else(|| !has_fields(&input_ast.data));\n\n let task_name = get_task_struct_name(&input_ast.ident.to_string());\n\n let agent_name = args.agent.clone();\n\n let event_type = &args.event_type;\n\n let on_cue_callback = parse_callback(&args.on_cue, task_name.clone(), CallbackKind::Cue);\n", "file_path": "swim_server/agent_derive/src/lanes/demand.rs", "rank": 17, "score": 286991.39939394966 }, { "content": "pub fn derive_map_lifecycle(attr_args: AttributeArgs, input_ast: DeriveInput) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input_ast, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args = match MapAttrs::from_list(&attr_args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input_ast.ident.clone();\n\n let gen_lifecycle = args\n\n .gen_lifecycle\n\n .unwrap_or_else(|| !has_fields(&input_ast.data));\n\n let task_name = get_task_struct_name(&input_ast.ident.to_string());\n\n let agent_name = args.agent.clone();\n\n let key_type = &args.key_type;\n\n let value_type = &args.value_type;\n", "file_path": "swim_server/agent_derive/src/lanes/map.rs", "rank": 18, "score": 286991.39939394966 }, { "content": "pub fn derive_action_lifecycle(attr_args: AttributeArgs, input_ast: DeriveInput) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input_ast, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args = match ActionAttrs::from_list(&attr_args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input_ast.ident.clone();\n\n let gen_lifecycle = args\n\n .gen_lifecycle\n\n .unwrap_or_else(|| !has_fields(&input_ast.data));\n\n let task_name = get_task_struct_name(&input_ast.ident.to_string());\n\n let agent_name = args.agent.clone();\n\n let command_type = &args.command_type;\n\n let response_type = &args.response_type;\n", "file_path": "swim_server/agent_derive/src/lanes/action.rs", "rank": 19, "score": 286991.39939394966 }, { "content": "pub fn derive_value_lifecycle(attr_args: AttributeArgs, input_ast: DeriveInput) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input_ast, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args: ValueAttrs = match ValueAttrs::from_list(&attr_args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input_ast.ident.clone();\n\n let gen_lifecycle = args\n\n .gen_lifecycle\n\n .unwrap_or_else(|| !has_fields(&input_ast.data));\n\n\n\n let task_name = get_task_struct_name(&input_ast.ident.to_string());\n\n let agent_name = args.agent.clone();\n\n let event_type = &args.event_type;\n", "file_path": "swim_server/agent_derive/src/lanes/value.rs", "rank": 20, "score": 286991.39939394966 }, { "content": "#[proc_macro_derive(SwimAgent, attributes(lifecycle, agent))]\n\npub fn swim_agent(input: TokenStream) -> TokenStream {\n\n let input_ast = parse_macro_input!(input as DeriveInput);\n\n let ident = input_ast.ident.clone();\n\n let derived = match derive_swim_agent(input_ast) {\n\n Ok(derived) => derived,\n\n Err(ts) => return ts,\n\n };\n\n\n\n as_const(\"SwimAgent\", ident, derived.into()).into()\n\n}\n\n\n\n/// An attribute for creating agent lifecycles for swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used.\n\n///\n\n/// The `on_start` attribute can be used to register a callback.\n\n/// It will resolve to a method with the same name if no value is provided, or to a method\n\n/// matching the custom value, if one is provided.\n\n/// If the `on_start` attribute is ommitted, no callback function will be used.\n\n///\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 23, "score": 283654.075931887 }, { "content": "/// Create a new supply lane model. Returns a new supply lane model and a stream that events can be\n\n/// received from.\n\npub fn make_lane_model<Event>(\n\n buffer_size: NonZeroUsize,\n\n) -> (SupplyLane<Event>, ReceiverStream<Event>)\n\nwhere\n\n Event: Send + Sync + Clone + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel(buffer_size.get());\n\n let lane = SupplyLane::new(tx);\n\n\n\n (lane, ReceiverStream::new(rx))\n\n}\n\n\n\nimpl Lane for StatelessLifecycleTasks {\n\n fn name(&self) -> &str {\n\n self.name.as_str()\n\n }\n\n\n\n fn kind(&self) -> LaneKind {\n\n self.kind\n\n }\n", "file_path": "swim_server/src/agent/lane/model/supply/mod.rs", "rank": 24, "score": 273789.552243451 }, { "content": "/// Create a new demand lane model. Returns a new demand lane model and a stream of unit values that\n\n/// represent a cue request.\n\npub fn make_lane_model<Event>(\n\n buffer_size: NonZeroUsize,\n\n) -> (DemandLane<Event>, impl Stream<Item = ()> + Send + 'static)\n\nwhere\n\n Event: Send + Sync + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel(buffer_size.get());\n\n let lane = DemandLane::new(tx);\n\n (lane, ReceiverStream::new(rx))\n\n}\n", "file_path": "swim_server/src/agent/lane/model/demand/mod.rs", "rank": 25, "score": 273789.47027562576 }, { "content": "pub fn default_config() -> ConfigType {\n\n ConfigType::Unit\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ConfigType {\n\n Struct(Ident),\n\n Unit,\n\n}\n\n\n\nimpl ToTokens for ConfigType {\n\n fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) {\n\n match self {\n\n ConfigType::Struct(ident) => ident.to_tokens(tokens),\n\n ConfigType::Unit => {\n\n Group::new(Delimiter::Parenthesis, TokenStream2::new()).to_tokens(tokens)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "swim_server/agent_derive/src/agent.rs", "rank": 26, "score": 270450.74133224715 }, { "content": "/// Create a new private command lane model and a stream of the received commands.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `buffer_size` - Buffer size for the MPSC channel that transmits the commands.\n\npub fn make_private_lane_model<T>(\n\n buffer_size: NonZeroUsize,\n\n) -> (CommandLane<T>, ReceiverStream<Command<T>>)\n\nwhere\n\n T: Send + Sync + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel(buffer_size.get());\n\n let lane = CommandLane::new(tx);\n\n (lane, ReceiverStream::new(rx))\n\n}\n\n\n\nimpl<T> LaneModel for CommandLane<T> {\n\n type Event = T;\n\n\n\n fn same_lane(this: &Self, other: &Self) -> bool {\n\n Arc::ptr_eq(&this.id, &other.id)\n\n }\n\n}\n\n\n\nimpl<T> Debug for CommandLane<T> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n f.debug_tuple(\"CommandLane\")\n\n .field(&type_of::<fn(T) -> T>())\n\n .finish()\n\n }\n\n}\n", "file_path": "swim_server/src/agent/lane/model/command/mod.rs", "rank": 27, "score": 270226.86443899584 }, { "content": "/// Create a new public command lane model and a stream of the received commands.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `buffer_size` - Buffer size for the MPSC channel that transmits the commands.\n\npub fn make_public_lane_model<T>(\n\n buffer_size: NonZeroUsize,\n\n) -> (\n\n CommandLane<T>,\n\n CommandStream<T>,\n\n Commander<T>,\n\n CommandStream<T>,\n\n)\n\nwhere\n\n T: Send + Sync + 'static,\n\n{\n\n let (event_tx, event_rx) = mpsc::channel(buffer_size.get());\n\n let (local_commands_tx, local_commands_rx) = mpsc::channel(buffer_size.get());\n\n let lane = CommandLane::new(local_commands_tx);\n\n (\n\n lane,\n\n ReceiverStream::new(event_rx),\n\n Commander(event_tx),\n\n ReceiverStream::new(local_commands_rx),\n\n )\n\n}\n\n\n", "file_path": "swim_server/src/agent/lane/model/command/mod.rs", "rank": 28, "score": 270226.86443899584 }, { "content": "pub fn ungroup(mut ty: &Type) -> &Type {\n\n while let Type::Group(group) = ty {\n\n ty = &group.elem;\n\n }\n\n ty\n\n}\n", "file_path": "macro_utilities/src/generics.rs", "rank": 29, "score": 270121.0377415477 }, { "content": "// Builds the ValueSchema implementation from the DeriveInput.\n\npub fn build_validated_form(\n\n input: DeriveInput,\n\n) -> Result<proc_macro2::TokenStream, Vec<syn::Error>> {\n\n let mut context = Context::default();\n\n let type_contents = match build_type_contents(&mut context, &input) {\n\n Some(cont) => type_contents_to_validated(&mut context, cont),\n\n None => return Err(context.check().unwrap_err()),\n\n };\n\n\n\n context.check()?;\n\n\n\n let generics = build_generics(&type_contents, &input.generics);\n\n\n\n let structure_name = &input.ident;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n let type_contents = type_contents_to_tokens(&type_contents);\n\n\n\n let ts = quote! {\n\n impl #impl_generics swim_schema::ValueSchema for #structure_name #ty_generics #where_clause\n\n {\n\n fn schema() -> swim_schema::schema::StandardSchema {\n\n #type_contents\n\n }\n\n }\n\n };\n\n\n\n Ok(ts)\n\n}\n\n\n", "file_path": "api/swim_schema_derive/src/validated_form/mod.rs", "rank": 30, "score": 269746.36337158806 }, { "content": "fn write_value_into(field: &FieldModel) -> TokenStream {\n\n let field_index = &field.selector;\n\n quote! {\n\n body_writer = body_writer.write_value_into(#field_index)?;\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 31, "score": 269699.43719398696 }, { "content": "fn write_attr_into(field: &FieldModel) -> TokenStream {\n\n let field_index = &field.selector;\n\n let literal_name = field.resolve_name();\n\n quote! {\n\n rec_writer = rec_writer.write_attr_into(#literal_name, #field_index)?;\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 32, "score": 269699.43719398696 }, { "content": "fn write_slot_into(field: &FieldModel) -> TokenStream {\n\n let field_index = &field.selector;\n\n let literal_name = field.resolve_name();\n\n quote! {\n\n if !swim_form::structural::write::StructuralWritable::omit_as_field(&#field_index) {\n\n body_writer = body_writer.write_slot_into(#literal_name, #field_index)?;\n\n }\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 33, "score": 269699.43719398696 }, { "content": "/// Build a [`TypeContents`] input from an abstract syntax tree. Returns [`Option::None`] if\n\n/// there was an error that was encountered while parsing the tree. The underlying error is\n\n/// added to the [`Context]`. If [`derive_valid`] is `true`, then [`[form(valid(..))]`]\n\n/// attributes are parsed into the [`TypeContents`] representation.\n\npub fn build_type_contents<'t>(\n\n context: &mut Context,\n\n input: &'t syn::DeriveInput,\n\n) -> Option<TypeContents<'t, FormDescriptor, FormField<'t>>> {\n\n let type_contents = match &input.data {\n\n Data::Enum(data) => {\n\n if !input.attrs.get_attributes(context, FORM_PATH).is_empty() {\n\n context.error_spanned_by(input, \"Tags are only supported on enumeration variants.\");\n\n return None;\n\n }\n\n\n\n let variants = data\n\n .variants\n\n .iter()\n\n .map(|variant| {\n\n let attributes = variant.attrs.get_attributes(context, FORM_PATH);\n\n let mut container_label =\n\n parse_container_tag(context, &variant.ident, attributes);\n\n let (compound_type, fields, manifest) = parse_struct(\n\n context,\n", "file_path": "api/swim_schema_derive/src/form/form_parser.rs", "rank": 34, "score": 266387.6568931552 }, { "content": "fn write_slot_ref(field: &FieldModel) -> TokenStream {\n\n let field_index = &field.selector;\n\n let literal_name = field.resolve_name();\n\n quote! {\n\n if !swim_form::structural::write::StructuralWritable::omit_as_field(#field_index) {\n\n body_writer = body_writer.write_slot(&#literal_name, #field_index)?;\n\n }\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 35, "score": 265517.90521895076 }, { "content": "fn write_value_ref(field: &FieldModel) -> TokenStream {\n\n let field_index = &field.selector;\n\n quote! {\n\n body_writer = body_writer.write_value(#field_index)?;\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 36, "score": 265517.90521895076 }, { "content": "fn write_attr_ref(field: &FieldModel) -> TokenStream {\n\n let field_index = &field.selector;\n\n let literal_name = field.resolve_name();\n\n quote! {\n\n rec_writer = rec_writer.write_attr(std::borrow::Cow::Borrowed(#literal_name), #field_index)?;\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 37, "score": 265517.90521895076 }, { "content": "/// Create a new action lane model and a stream of the received commands.\n\n///\n\n/// #Arguments\n\n///\n\n/// * `buffer_size` - Buffer size for the MPSC channel that transmits the commands.\n\npub fn make_lane_model<Command, Response>(\n\n buffer_size: NonZeroUsize,\n\n) -> (\n\n ActionLane<Command, Response>,\n\n impl Stream<Item = Action<Command, Response>> + Send + 'static,\n\n)\n\nwhere\n\n Command: Send + Sync + 'static,\n\n Response: Send + Sync + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel(buffer_size.get());\n\n let lane = ActionLane::new(tx);\n\n (lane, ReceiverStream::new(rx))\n\n}\n\n\n\nimpl<Command, Response> LaneModel for ActionLane<Command, Response> {\n\n type Event = Command;\n\n\n\n fn same_lane(this: &Self, other: &Self) -> bool {\n\n Arc::ptr_eq(&this.id, &other.id)\n", "file_path": "swim_server/src/agent/lane/model/action/mod.rs", "rank": 38, "score": 264683.04101014516 }, { "content": "pub fn feed_field<R>(\n\n name: &'static str,\n\n field: &mut Option<R::Target>,\n\n recognizer: &mut R,\n\n event: ReadEvent<'_>,\n\n) -> Option<Result<(), ReadError>>\n\nwhere\n\n R: Recognizer,\n\n{\n\n if field.is_some() {\n\n Some(Err(ReadError::DuplicateField(Text::new(name))))\n\n } else {\n\n match recognizer.feed_event(event) {\n\n Some(Ok(t)) => {\n\n *field = Some(t);\n\n Some(Ok(()))\n\n }\n\n Some(Err(e)) => Some(Err(e)),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n", "file_path": "api/swim_form/src/structural/read/recognizer/mod.rs", "rank": 39, "score": 263513.6431183905 }, { "content": "/// Converts between `TypeContents<'f, FormDescriptor, FormField<'f>>` and\n\n/// `TypeContents<'f, ValueSchemaDescriptor, ValidatedField<'f>>`. Parsing any attributes of the\n\n/// path `#[form(schema(..))]`.\n\npub fn type_contents_to_validated<'f>(\n\n ctx: &mut Context,\n\n type_contents: TypeContents<'f, FormDescriptor, FormField<'f>>,\n\n) -> TypeContents<'f, ValueSchemaDescriptor, ValidatedField<'f>> {\n\n match type_contents {\n\n TypeContents::Struct(repr) => TypeContents::Struct({\n\n let attrs = repr.input.attrs.get_attributes(ctx, FORM_PATH);\n\n let descriptor =\n\n ValueSchemaDescriptor::from(ctx, attrs, repr.compound_type, repr.descriptor);\n\n\n\n StructRepr {\n\n input: repr.input,\n\n compound_type: repr.compound_type,\n\n fields: map_fields_to_validated(&repr.input, ctx, repr.fields, &descriptor),\n\n descriptor,\n\n }\n\n }),\n\n TypeContents::Enum(EnumRepr { input, variants }) => {\n\n let variants = variants\n\n .into_iter()\n", "file_path": "api/swim_schema_derive/src/validated_form/vf_parser.rs", "rank": 40, "score": 262464.3436991179 }, { "content": "/// Create a new demand map lane model. Returns a demand map lane instance and a topic containing a\n\n/// stream of cued values.\n\n///\n\n/// # Arguments\n\n/// `buffer_size`: the size of the topic's buffer.\n\n/// `lifecycle_sender`: a sender to the `DemandMapLaneLifecycle`.\n\npub fn make_lane_model<Key, Value>(\n\n buffer_size: NonZeroUsize,\n\n lifecycle_sender: mpsc::Sender<DemandMapLaneCommand<Key, Value>>,\n\n) -> (\n\n DemandMapLane<Key, Value>,\n\n mpsc::Receiver<DemandMapLaneEvent<Key, Value>>,\n\n)\n\nwhere\n\n Key: Send + Clone + Form + Sync + 'static,\n\n Value: Send + Clone + Form + Sync + 'static,\n\n{\n\n let (tx, rx) = mpsc::channel(buffer_size.get());\n\n let lane = DemandMapLane::new(tx, lifecycle_sender);\n\n (lane, rx)\n\n}\n", "file_path": "swim_server/src/agent/lane/model/demand_map/mod.rs", "rank": 41, "score": 261249.84481948146 }, { "content": "#[proc_macro_attribute]\n\npub fn value_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_value_lifecycle)\n\n}\n\n\n\n/// An attribute for creating lifecycles for map lanes on swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used and the\n\n/// type of the `MapLane` to which it will be applied.\n\n///\n\n/// The `on_start` and `on_event` attributes can be used to register callbacks.\n\n/// They will resolve to methods with the same name if no value is provided, or to methods\n\n/// matching the custom values, if any are provided.\n\n/// If the `on_start` or `on_event` attribute is ommitted, no callback function will be used for it.\n\n///\n\n/// # Example\n\n/// Map lifecycle for a `MapLane` with types [`String`] and [`i32`] on the `TestAgent`, created with\n\n/// the default names for the `on_start` and `on_event` callbacks.\n\n///\n\n/// ```rust\n\n/// use swim_server::map_lifecycle;\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 42, "score": 261133.23829266374 }, { "content": "#[proc_macro_attribute]\n\npub fn agent_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_agent_lifecycle)\n\n}\n\n\n\n/// An attribute for creating lifecycles for command lanes on swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used and the\n\n/// type of the `CommandLane` to which it will be applied.\n\n///\n\n/// The `on_command` attribute can be used to register a callback.\n\n/// It will resolve to a method with the same name if no value is provided, or to a method\n\n/// matching the custom value, if one is provided.\n\n/// If the `on_command` attribute is ommitted, no callback function will be used.\n\n///\n\n/// # Example\n\n/// Command lifecycle for a `CommandLane` with type [`String`] on the `TestAgent`, created with the\n\n/// default name for the `on_command` callback.\n\n///\n\n/// ```\n\n/// use swim_server::command_lifecycle;\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 43, "score": 261133.23829266377 }, { "content": "#[proc_macro_attribute]\n\npub fn demand_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_demand_lifecycle)\n\n}\n\n\n\n/// An attribute for creating lifecycles for demand map lanes on swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used and the\n\n/// type of the `DemandMapLane` to which it will be applied.\n\n///\n\n/// The `on_sync` and `on_cue` attributes can be used to register callbacks.\n\n/// They will resolve to methods with the same name if no value is provided, or to methods\n\n/// matching the custom values, if any are provided.\n\n/// If the `on_sync` or `on_cue` attribute is ommitted, no callback function will be used for it.\n\n///\n\n/// # Example\n\n/// Demand lifecycle for a `DemandMapLane` with types [`String`] and [`i32`] on the `TestAgent`,\n\n/// created with default names for the `on_sync` and `on_cue` callbacks.\n\n///\n\n/// ```rust\n\n/// use swim_server::demand_map_lifecycle;\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 44, "score": 261133.23829266377 }, { "content": "#[proc_macro_attribute]\n\npub fn map_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_map_lifecycle)\n\n}\n\n\n\n/// An attribute for creating lifecycles for demand lanes on swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used and the\n\n/// type of the `DemandLane` to which it will be applied.\n\n///\n\n/// The `on_cue` attribute can be used to register a callback.\n\n/// It will resolve to a method with the same name if no value is provided, or to a method\n\n/// matching the custom value, if one is provided.\n\n/// If the `on_cue` attribute is ommitted, no callback function will be used.\n\n///\n\n/// # Example\n\n/// Demand lifecycle for a `DemandLane` with type [`i32`] on the `TestAgent`, created with the\n\n/// default name for the `on_cue` callback.\n\n///\n\n/// ```rust\n\n/// use swim_server::demand_lifecycle;\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 45, "score": 261133.23829266377 }, { "content": "#[proc_macro_attribute]\n\npub fn command_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_command_lifecycle)\n\n}\n\n\n\n/// An attribute for creating lifecycles for action lanes on swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used and the\n\n/// types of the `ActionLane` to which it will be applied.\n\n///\n\n/// The `on_command` attribute can be used to register a callback.\n\n/// It will resolve to a method with the same name if no value is provided, or to a method\n\n/// matching the custom value, if one is provided.\n\n/// If the `on_command` attribute is ommitted, no callback function will be used.\n\n///\n\n/// # Example\n\n/// Action lifecycle for an `ActionLane` with types [`String`] and [`i32`] on the `TestAgent`,\n\n/// created with the default name for the `on_command` callback.\n\n///\n\n/// ```rust\n\n/// use swim_server::action_lifecycle;\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 46, "score": 261133.23829266377 }, { "content": "#[proc_macro_attribute]\n\npub fn action_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_action_lifecycle)\n\n}\n\n\n\n/// An attribute for creating lifecycles for value lanes on swim agents.\n\n///\n\n/// The attribute requires the name of the swim agent with which this lifecycle will be used and the\n\n/// type of the `ValueLane` to which it will be applied.\n\n///\n\n/// The `on_start` and `on_event` attributes can be used to register callbacks.\n\n/// They will resolve to methods with the same name if no value is provided, or to methods\n\n/// matching the custom values, if any are provided.\n\n/// If the `on_start` or `on_event` attribute is ommitted, no callback function will be used for it.\n\n///\n\n/// # Example\n\n/// Value lifecycle for a `ValueLane` with type [`i32`] on the `TestAgent`, created with the default\n\n/// names for the `on_start` and `on_event` callbacks.\n\n///\n\n/// ```rust\n\n/// use swim_server::value_lifecycle;\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 47, "score": 261133.23829266377 }, { "content": "#[proc_macro_attribute]\n\npub fn demand_map_lifecycle(args: TokenStream, input: TokenStream) -> TokenStream {\n\n derive(args, input, derive_demand_map_lifecycle)\n\n}\n", "file_path": "swim_server/agent_derive/src/lib.rs", "rank": 48, "score": 257912.96357640915 }, { "content": "pub fn separator(input: Span<'_>) -> IResult<Span<'_>, char> {\n\n use nom::character::streaming as character;\n\n character::one_of(\",;\")(input)\n\n}\n\n\n\nmacro_rules! token_mod {\n\n ($name:ident, $submod:ident) => {\n\n pub mod $name {\n\n\n\n use super::*;\n\n use nom::bytes::$submod::tag_no_case;\n\n use nom::character::$submod as character;\n\n use nom::character::$submod::not_line_ending;\n\n use nom::multi::many0;\n\n use nom::number::$submod as number;\n\n\n\n pub fn identifier(input: Span<'_>) -> IResult<Span<'_>, &str> {\n\n map(\n\n recognize(pair(\n\n character::satisfy(is_identifier_start),\n", "file_path": "api/formats/swim_recon/src/parser/tokens.rs", "rank": 49, "score": 256618.09195283114 }, { "content": "#[proc_macro_attribute]\n\npub fn stringify_attr(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let args = parse_macro_input!(args as AttributeArgs);\n\n\n\n let mut context = Context::default();\n\n let container_attrs = stringify_container_attrs(&mut context, args).unwrap_or_default();\n\n\n\n stringify_data(context, input, quote!(#(#container_attrs)*))\n\n}\n\n\n", "file_path": "macro_utilities/stringify_attr/attr_derive/src/lib.rs", "rank": 50, "score": 254809.57239574 }, { "content": "/// Convert a map lane observer into a stream of events.\n\npub fn summaries_to_events<K, V>(\n\n observer: Observer<TransactionSummary<Value, V>>,\n\n) -> impl Stream<Item = MapLaneEvent<K, V>> + Send\n\nwhere\n\n K: Form + Send + 'static,\n\n V: Send + Sync + 'static,\n\n{\n\n observer\n\n .into_stream()\n\n .transform_flat_map(DecomposeSummary::default())\n\n}\n\n\n\nimpl<K, V> DeferredSubscription<MapLaneEvent<K, V>> for MapSubscriber<K, V>\n\nwhere\n\n K: Form + Send + 'static,\n\n V: Send + Sync + 'static,\n\n{\n\n type View = FlatmapStream<ObserverStream<TransactionSummary<Value, V>>, DecomposeSummary<K, V>>;\n\n\n\n fn subscribe(&self) -> Option<Self::View> {\n\n self.inner.subscribe().ok().map(|obs| {\n\n obs.into_stream()\n\n .transform_flat_map(DecomposeSummary::default())\n\n })\n\n }\n\n}\n\n\n", "file_path": "swim_server/src/agent/lane/model/map/mod.rs", "rank": 51, "score": 253910.0132378286 }, { "content": "pub fn type_event_ref<K: Form>(event: &MapEvent<Value>) -> Result<MapEvent<K>, ReadError> {\n\n match event {\n\n MapEvent::Initial => Ok(MapEvent::Initial),\n\n MapEvent::Update(k) => K::try_from_value(k).map(MapEvent::Update),\n\n MapEvent::Remove(k) => K::try_from_value(k).map(MapEvent::Remove),\n\n MapEvent::Take(n) => Ok(MapEvent::Take(*n)),\n\n MapEvent::Drop(n) => Ok(MapEvent::Drop(*n)),\n\n MapEvent::Clear => Ok(MapEvent::Clear),\n\n }\n\n}\n\n\n", "file_path": "swim_client/src/downlink/typed/map/mod.rs", "rank": 52, "score": 253818.2604861546 }, { "content": "pub fn parse_config(value: String) -> ConfigType {\n\n ConfigType::Struct(string_to_ident(value))\n\n}\n\n\n", "file_path": "swim_server/agent_derive/src/agent.rs", "rank": 53, "score": 252140.5605106744 }, { "content": "#[proc_macro_attribute]\n\npub fn stringify_attr_raw(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let args = parse_macro_input!(args as AttributeArgs);\n\n\n\n let mut context = Context::default();\n\n let container_opt = stringify_container_attrs_raw(&mut context, args);\n\n let container_ts = match container_opt {\n\n Some(attr) => {\n\n quote!(#attr)\n\n }\n\n None => TokenStream2::new(),\n\n };\n\n\n\n stringify_data(context, input, container_ts)\n\n}\n\n\n", "file_path": "macro_utilities/stringify_attr/attr_derive/src/lib.rs", "rank": 54, "score": 251816.8145483481 }, { "content": "pub fn streamed_value_lane<T, Store>(\n\n name: impl Into<String>,\n\n buffer_size: NonZeroUsize,\n\n transient: bool,\n\n store: Store,\n\n) -> (ValueLane<T>, Observer<T>, Option<Box<dyn StoreIo>>)\n\nwhere\n\n Store: NodeStore,\n\n T: Default + Send + Sync + Serialize + DeserializeOwned + 'static,\n\n{\n\n let lane_id = store\n\n .lane_id_of(&name.into())\n\n .expect(\"Failed to fetch lane id\");\n\n let model = ValueDataModel::new(store, lane_id);\n\n\n\n let (lane, observer) = ValueLane::store_observable(&model, buffer_size, Default::default());\n\n\n\n let store_io: Option<Box<dyn StoreIo>> = if transient {\n\n None\n\n } else {\n\n Some(Box::new(ValueLaneStoreIo::new(\n\n observer.clone().into_stream(),\n\n model,\n\n )))\n\n };\n\n\n\n (lane, observer, store_io)\n\n}\n", "file_path": "swim_server/src/agent/lane/model/value/mod.rs", "rank": 55, "score": 250212.68535204575 }, { "content": "pub fn string_literal(input: Span<'_>) -> IResult<Span<'_>, Cow<'_, str>> {\n\n use nom::character::streaming as character;\n\n map_res(\n\n delimited(\n\n character::char('\"'),\n\n recognize(many0_count(alt((\n\n recognize(character::satisfy(|c| c != '\\\\' && c != '\\\"')),\n\n recognize(escape),\n\n )))),\n\n character::char('\"'),\n\n ),\n\n resolve_escapes,\n\n )(input)\n\n}\n\n\n", "file_path": "api/formats/swim_recon/src/parser/tokens.rs", "rank": 56, "score": 248552.0125978609 }, { "content": "pub fn attr(name: &'static str) -> ReadEvent<'static> {\n\n ReadEvent::StartAttribute(Cow::Borrowed(name))\n\n}\n\n\n", "file_path": "api/swim_form/src/structural/read/from_model/tests.rs", "rank": 57, "score": 248251.9035973145 }, { "content": "/// Create a recognizer for the body of the header attribute of a record.\n\n///\n\n/// #Arguments\n\n/// * `has_body` - Whehter there is a field lifted to be the body of the header.\n\n/// * `make_fields` - Factory to construct the state of the recognizer.\n\n/// * `num_slots` - The number of slots lifted into the header.\n\n/// * `vtable` - Functions that are generated by the macro that determine how incoming events\n\n/// modify the state.\n\npub fn header_recognizer<T, Flds, MkFlds>(\n\n has_body: bool,\n\n make_fields: MkFlds,\n\n num_slots: u32,\n\n vtable: HeaderVTable<T, Flds>,\n\n) -> FirstOf<HeaderRecognizer<T, Flds>, HeaderRecognizer<T, Flds>>\n\nwhere\n\n MkFlds: Fn() -> Flds,\n\n{\n\n let simple = HeaderRecognizer::new(has_body, true, num_slots, make_fields(), vtable);\n\n let flattened = HeaderRecognizer::new(has_body, false, num_slots, make_fields(), vtable);\n\n FirstOf::new(simple, flattened)\n\n}\n\n\n\n/// #Arguments\n\n/// * `has_body` - Whehter there is a field lifted to be the body of the header.\n\n/// * `flattened` - Whether the record containing the fields has been flattened into the attribute\n\n/// body (and so does not have explicit record body delimiting).\n\n/// * `num_slots` - The number of slots lifted into the header.\n\n/// * `fields` - The state of the recognizer.\n", "file_path": "api/swim_form/src/structural/read/recognizer/mod.rs", "rank": 58, "score": 242083.37105285385 }, { "content": "/// Create a new map lane with an attached observer that is split into a stream of events\n\n/// and a subscription handle.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `buffer_size` - The size of the buffer for the observer.\n\n/// * `transient` - Whether to persist the lane's state\n\n/// * `store` - A node store which for persisting data, if the lane is not transient.\n\npub fn streamed_map_lane<K, V, Store>(\n\n name: impl Into<String>,\n\n buffer_size: NonZeroUsize,\n\n transient: bool,\n\n store: Store,\n\n) -> StreamedMapLane<K, V, impl Stream<Item = MapLaneEvent<K, V>>>\n\nwhere\n\n K: Form + Send + Sync + Serialize + DeserializeOwned + Debug + 'static,\n\n V: Send + Sync + Debug + Serialize + DeserializeOwned + 'static,\n\n Store: NodeStore,\n\n{\n\n let (lane, observer) = MapLane::observable(buffer_size);\n\n let subscriber = MapSubscriber::new(observer.subscriber());\n\n let stream = summaries_to_events::<K, V>(observer.clone());\n\n\n\n let store_io: Option<Box<dyn StoreIo>> = if transient {\n\n None\n\n } else {\n\n let lane_id = store\n\n .lane_id_of(&name.into())\n", "file_path": "swim_server/src/agent/lane/model/map/mod.rs", "rank": 59, "score": 241886.92552117785 }, { "content": "/// Deconstructs a structure or enumeration into its fields. For example:\n\n/// ```\n\n/// struct S {\n\n/// a: i32,\n\n/// b: i32\n\n/// }\n\n/// ```\n\n///\n\n/// Will produce the following:\n\n/// ```compile_fail\n\n/// { a, b }\n\n/// ```\n\npub fn deconstruct_type(\n\n compound_type: &CompoundTypeKind,\n\n fields: &[&Label],\n\n as_ref: bool,\n\n) -> TokenStream {\n\n let fields: Vec<_> = fields\n\n .iter()\n\n .map(|name| match &name {\n\n Label::Unmodified(ident) => {\n\n quote! { #ident }\n\n }\n\n Label::Renamed { old_label, .. } => {\n\n quote! { #old_label }\n\n }\n\n Label::Foreign(ident, ..) => {\n\n quote! { #ident }\n\n }\n\n un @ Label::Anonymous(_) => {\n\n let binding = &un.as_ident();\n\n quote! { #binding }\n", "file_path": "macro_utilities/src/utilities.rs", "rank": 60, "score": 241508.97864195908 }, { "content": "pub fn derive_events_body(\n\n task_name: &Ident,\n\n on_event_func_name: &Ident,\n\n) -> proc_macro2::TokenStream {\n\n quote!(\n\n let #task_name {\n\n mut lifecycle,\n\n event_stream,\n\n projection,\n\n ..\n\n } = *self;\n\n\n\n let model = projection(context.agent()).clone();\n\n let mut events = event_stream.take_until(context.agent_stop_event());\n\n\n\n let mut scan_stream = events.owning_scan(None, |prev_val, event| async move {\n\n Some((\n\n Some(event.clone()),\n\n ValueLaneEvent {\n\n previous: prev_val,\n", "file_path": "swim_server/agent_derive/src/lanes/value.rs", "rank": 61, "score": 241432.1177893289 }, { "content": "pub fn derive<F>(\n\n args: proc_macro::TokenStream,\n\n input: proc_macro::TokenStream,\n\n f: F,\n\n) -> proc_macro::TokenStream\n\nwhere\n\n F: Fn(AttributeArgs, DeriveInput) -> proc_macro::TokenStream,\n\n{\n\n let input = parse_macro_input!(input as DeriveInput);\n\n let args = parse_macro_input!(args as AttributeArgs);\n\n\n\n f(args, input)\n\n}\n", "file_path": "swim_server/agent_derive/src/utils.rs", "rank": 62, "score": 241011.87000431938 }, { "content": "pub fn write_string_literal(literal: &str, f: &mut Formatter<'_>) -> Result<(), std::fmt::Error> {\n\n if identifier::is_identifier(literal) {\n\n f.write_str(literal)\n\n } else if needs_escape(literal) {\n\n write!(f, \"\\\"{}\\\"\", escape_text(literal))\n\n } else {\n\n write!(f, \"\\\"{}\\\"\", literal)\n\n }\n\n}\n\n\n\nstatic DIGITS: [char; 16] = [\n\n '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',\n\n];\n\n\n", "file_path": "api/swim_model/src/lib.rs", "rank": 63, "score": 240830.14688472258 }, { "content": "pub fn derive_events_body(\n\n on_sync: &Callback,\n\n on_cue: &Callback,\n\n on_remove: &Callback,\n\n) -> Option<TokenStream2> {\n\n match (on_sync, on_cue, on_remove) {\n\n (\n\n Callback::Custom {\n\n task_name,\n\n func_name: sync_func_name,\n\n },\n\n Callback::Custom {\n\n func_name: cue_func_name,\n\n ..\n\n },\n\n Callback::Custom {\n\n func_name: remove_func_name,\n\n ..\n\n },\n\n ) => {\n", "file_path": "swim_server/agent_derive/src/lanes/demand_map.rs", "rank": 64, "score": 238554.06725922972 }, { "content": "fn compute_num_slots(fields: &[&FieldModel], by_ref: bool) -> TokenStream {\n\n let increments = fields.iter().map(|field| {\n\n let field_index = &field.selector;\n\n let fld = if by_ref {\n\n quote!(&#field_index)\n\n } else {\n\n field_index.to_token_stream()\n\n };\n\n quote! {\n\n if !swim_form::structural::write::StructuralWritable::omit_as_field(#fld) {\n\n num_slots += 1;\n\n }\n\n }\n\n });\n\n quote! {\n\n let mut num_slots: usize = 0;\n\n #(#increments)*\n\n }\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 65, "score": 238501.13489487988 }, { "content": "type MakeArc<T> = fn(T) -> Arc<T>;\n\n\n\nimpl<T: RecognizerReadable> RecognizerReadable for Arc<T> {\n\n type Rec = MappedRecognizer<T::Rec, MakeArc<T>>;\n\n type AttrRec = MappedRecognizer<T::AttrRec, MakeArc<T>>;\n\n type BodyRec = MappedRecognizer<T::BodyRec, MakeArc<T>>;\n\n\n\n fn make_recognizer() -> Self::Rec {\n\n MappedRecognizer::new(T::make_recognizer(), Arc::new)\n\n }\n\n\n\n fn make_attr_recognizer() -> Self::AttrRec {\n\n MappedRecognizer::new(T::make_attr_recognizer(), Arc::new)\n\n }\n\n\n\n fn make_body_recognizer() -> Self::BodyRec {\n\n MappedRecognizer::new(T::make_body_recognizer(), Arc::new)\n\n }\n\n\n\n fn on_absent() -> Option<Self> {\n", "file_path": "api/swim_form/src/structural/read/recognizer/mod.rs", "rank": 66, "score": 238492.85430914423 }, { "content": "fn make_header(\n\n tag_body: &Option<&FieldModel>,\n\n header_fields: &[&FieldModel],\n\n by_ref: bool,\n\n) -> TokenStream {\n\n let prepend = if by_ref {\n\n quote!(prepend_ref)\n\n } else {\n\n quote!(prepend)\n\n };\n\n\n\n let base_expr = quote!(swim_form::structural::generic::header::NoSlots);\n\n let header_expr = header_fields.iter().rev().fold(base_expr, |expr, field| {\n\n let field_index = &field.selector;\n\n let literal_name = field.resolve_name();\n\n quote! {\n\n #expr.#prepend(#literal_name, #field_index)\n\n }\n\n });\n\n if let Some(body) = tag_body {\n", "file_path": "api/swim_form_derive/src/structural/write/mod.rs", "rank": 67, "score": 238080.33091906275 }, { "content": "fn compound_recognizer(\n\n model: &SegregatedStructModel<'_>,\n\n target: &syn::Type,\n\n builder: &syn::Type,\n\n) -> (syn::Type, syn::Type) {\n\n let (recog_ty_name, v_table_name) = if matches!(&model.fields.body, BodyFields::ReplacedBody(_))\n\n {\n\n (\n\n syn::Ident::new(\"DelegateStructRecognizer\", Span::call_site()),\n\n syn::Ident::new(\"OrdinalVTable\", Span::call_site()),\n\n )\n\n } else {\n\n let (r, v) = match (\n\n model.inner.fields_model.body_kind,\n\n model.inner.newtype_selector(),\n\n ) {\n\n (CompoundTypeKind::Labelled, Some(_)) => {\n\n (\"LabelledNewtypeRecognizer\", \"LabelledVTable\")\n\n }\n\n (CompoundTypeKind::Labelled, None) => (\"LabelledStructRecognizer\", \"LabelledVTable\"),\n", "file_path": "api/swim_form_derive/src/structural/read/mod.rs", "rank": 68, "score": 237889.07506903398 }, { "content": "/// A transaction to clear a summary.\n\npub fn clear_summary<V: Any + Send + Sync>(\n\n summary: &TVar<TransactionSummary<Value, V>>,\n\n) -> impl Stm<Result = ()> {\n\n summary.put(TransactionSummary::clear())\n\n}\n\n\n", "file_path": "swim_server/src/agent/lane/model/map/summary/mod.rs", "rank": 69, "score": 237734.2495689908 }, { "content": "/// A transaction to apply a removal to a summary.\n\npub fn remove_summary<V: Any + Send + Sync>(\n\n summary: &TVar<TransactionSummary<Value, V>>,\n\n key: Value,\n\n) -> impl Stm<Result = ()> + '_ {\n\n summary\n\n .get()\n\n .and_then(move |sum| summary.put(sum.remove(key.clone())))\n\n}\n\n\n\nimpl<V> TransactionSummary<Value, V> {\n\n /// Create an empty summary with the clear flag set.\n\n pub fn clear() -> Self {\n\n TransactionSummary {\n\n coordination_id: 0,\n\n clear: true,\n\n changes: Default::default(),\n\n }\n\n }\n\n\n\n /// Create a summary containing a single update.\n", "file_path": "swim_server/src/agent/lane/model/map/summary/mod.rs", "rank": 70, "score": 237734.2495689908 }, { "content": "/// A transaction to apply an update to a summary.\n\npub fn update_summary<V: Any + Send + Sync>(\n\n summary: &TVar<TransactionSummary<Value, V>>,\n\n key: Value,\n\n value: Arc<V>,\n\n) -> impl Stm<Result = ()> + '_ {\n\n summary\n\n .get()\n\n .and_then(move |sum| summary.put(sum.update(key.clone(), value.clone())))\n\n}\n\n\n", "file_path": "swim_server/src/agent/lane/model/map/summary/mod.rs", "rank": 71, "score": 237734.2495689908 }, { "content": "pub trait StructLike {\n\n fn fields(&self) -> &Fields;\n\n}\n\n\n\nimpl<T: StructLike> StructLike for &T {\n\n fn fields(&self) -> &Fields {\n\n (*self).fields()\n\n }\n\n}\n\n\n\nimpl StructLike for DataStruct {\n\n fn fields(&self) -> &Fields {\n\n &self.fields\n\n }\n\n}\n\n\n\nimpl StructLike for Variant {\n\n fn fields(&self) -> &Fields {\n\n &self.fields\n\n }\n\n}\n", "file_path": "api/swim_form_derive/src/structural/model/mod.rs", "rank": 72, "score": 237645.7189794274 }, { "content": "pub fn deserialize<'de, D: Deserialize<'de>>(obj: &'de [u8]) -> Result<D, StoreError> {\n\n bincode::deserialize(obj).map_err(|e| StoreError::Decoding(e.to_string()))\n\n}\n\n\n", "file_path": "swim_store/store_common/src/utils.rs", "rank": 73, "score": 236024.3462633046 }, { "content": "pub fn derive_demand_map_lifecycle(\n\n attr_args: AttributeArgs,\n\n input_ast: DeriveInput,\n\n) -> TokenStream {\n\n if let Err(error) = validate_input_ast(&input_ast, InputAstType::Lifecycle) {\n\n return TokenStream::from(quote! {#error});\n\n }\n\n\n\n let args = match DemandMapAttrs::from_list(&attr_args) {\n\n Ok(args) => args,\n\n Err(e) => {\n\n return TokenStream::from(e.write_errors());\n\n }\n\n };\n\n\n\n let lifecycle_name = input_ast.ident.clone();\n\n let gen_lifecycle = args\n\n .gen_lifecycle\n\n .unwrap_or_else(|| !has_fields(&input_ast.data));\n\n let task_name = get_task_struct_name(&input_ast.ident.to_string());\n", "file_path": "swim_server/agent_derive/src/lanes/demand_map.rs", "rank": 74, "score": 235781.20583695645 }, { "content": "type BoxU8Vec = fn(Vec<u8>) -> Box<[u8]>;\n\n\n\nimpl RecognizerReadable for Box<[u8]> {\n\n type Rec = MappedRecognizer<DataRecognizer, BoxU8Vec>;\n\n type AttrRec = SimpleAttrBody<Self::Rec>;\n\n type BodyRec = SimpleRecBody<Self::Rec>;\n\n\n\n fn make_recognizer() -> Self::Rec {\n\n MappedRecognizer::new(DataRecognizer, Vec::into_boxed_slice)\n\n }\n\n\n\n fn make_attr_recognizer() -> Self::AttrRec {\n\n SimpleAttrBody::new(Self::make_recognizer())\n\n }\n\n\n\n fn make_body_recognizer() -> Self::BodyRec {\n\n SimpleRecBody::new(Self::make_recognizer())\n\n }\n\n\n\n fn is_simple() -> bool {\n", "file_path": "api/swim_form/src/structural/read/recognizer/mod.rs", "rank": 75, "score": 235071.84856010825 }, { "content": "/// Attempt to read a [`StructuralReadable`] type from MessagePack data in a buffer.\n\npub fn read_from_msg_pack<T: StructuralReadable, R: Buf>(\n\n input: &mut R,\n\n) -> Result<T, MsgPackReadError> {\n\n let mut str_buf = BytesMut::new();\n\n let marker = read_marker(input)?;\n\n match marker {\n\n Marker::Null => Ok(T::read_extant()?),\n\n Marker::True => Ok(T::read_bool(true)?),\n\n Marker::False => Ok(T::read_bool(false)?),\n\n Marker::FixPos(n) => Ok(T::read_i32(n as i32)?),\n\n Marker::FixNeg(n) => Ok(T::read_i32(n as i32)?),\n\n Marker::I8 => compose_simple(input, Buf::get_i8, T::read_i32),\n\n Marker::I16 => compose_simple(input, Buf::get_i16, T::read_i32),\n\n Marker::I32 => compose_simple(input, Buf::get_i32, T::read_i32),\n\n Marker::I64 => compose_simple(input, Buf::get_i64, T::read_i64),\n\n Marker::U8 => compose_simple(input, Buf::get_u8, T::read_i32),\n\n Marker::U16 => compose_simple(input, Buf::get_u16, T::read_i32),\n\n Marker::U32 => compose_simple(input, Buf::get_u32, T::read_u32),\n\n Marker::U64 => compose_simple(input, Buf::get_u64, T::read_u64),\n\n Marker::F32 => compose_simple(input, Buf::get_f32, T::read_f64),\n", "file_path": "api/formats/swim_msgpack/src/reader/mod.rs", "rank": 76, "score": 234522.8519032304 }, { "content": "/// Enumerates the fields in a descriptor in the order in which the implementation exepects to\n\n/// receive them.\n\nfn enumerate_fields<'a>(\n\n model: &'a SegregatedFields<'a>,\n\n) -> impl Iterator<Item = FieldGroup<'a>> + Clone + 'a {\n\n let SegregatedFields { header, body } = model;\n\n let HeaderFields {\n\n tag_name,\n\n tag_body,\n\n header_fields,\n\n attributes,\n\n } = header;\n\n\n\n let body_fields = match body {\n\n BodyFields::StdBody(vec) => Either::Left(vec.iter().copied().map(FieldGroup::Item)),\n\n BodyFields::ReplacedBody(fld) => {\n\n Either::Right(std::iter::once(FieldGroup::DelegateBody(fld)))\n\n }\n\n };\n\n\n\n let header = if tag_body.is_none() && header_fields.is_empty() {\n\n None\n", "file_path": "api/swim_form_derive/src/structural/read/mod.rs", "rank": 77, "score": 234312.3327101883 }, { "content": "/// Fold operation to extract a name transform from the attributes on a type or field.\n\npub fn acc_rename(\n\n mut state: Option<NameTransform>,\n\n nested_meta: syn::NestedMeta,\n\n) -> SynValidation<Option<NameTransform>> {\n\n let err = match NameTransform::try_from(&nested_meta) {\n\n Ok(rename) => {\n\n if state.is_some() {\n\n Some(syn::Error::new_spanned(nested_meta, \"Duplicate tag\"))\n\n } else {\n\n state = Some(rename);\n\n None\n\n }\n\n }\n\n Err(NameTransformError::UnknownAttributeName(name, _)) if name == SCHEMA_PATH => None, //Overlap with other macros which we can ignore.\n\n Err(e) => Some(e.into()),\n\n };\n\n Validation::Validated(state, err.into())\n\n}\n\n\n\npub enum StructTransform<'a> {\n\n Rename(NameTransform),\n\n Newtype(Option<FieldSelector<'a>>),\n\n}\n\n\n", "file_path": "api/swim_form_derive/src/modifiers.rs", "rank": 78, "score": 233967.9903183725 }, { "content": "pub fn parse_callback(\n\n callback: &Option<darling::Result<String>>,\n\n task_name: Ident,\n\n kind: CallbackKind,\n\n) -> Callback {\n\n if let Some(name) = callback {\n\n if let Ok(name) = name {\n\n Callback::Custom {\n\n task_name,\n\n func_name: str_to_ident(name),\n\n }\n\n } else {\n\n match kind {\n\n CallbackKind::Start => Callback::Custom {\n\n task_name,\n\n func_name: default_on_start(),\n\n },\n\n CallbackKind::Command => Callback::Custom {\n\n task_name,\n\n func_name: default_on_command(),\n", "file_path": "swim_server/agent_derive/src/utils.rs", "rank": 79, "score": 233961.82754185837 }, { "content": "fn read_marker<R>(input: &mut R) -> Result<Marker, MsgPackReadError>\n\nwhere\n\n R: Buf,\n\n{\n\n if !input.has_remaining() {\n\n Err(MsgPackReadError::Incomplete)\n\n } else {\n\n let marker = Marker::from_u8(input.get_u8());\n\n Ok(marker)\n\n }\n\n}\n\n\n", "file_path": "api/formats/swim_msgpack/src/reader/mod.rs", "rank": 80, "score": 233817.79574274353 }, { "content": "fn type_contents_to_tokens(\n\n type_contents: &TypeContents<'_, ValueSchemaDescriptor, ValidatedField>,\n\n) -> TokenStream {\n\n match type_contents {\n\n TypeContents::Struct(repr) => {\n\n let schema = derive_compound_schema(\n\n &repr.fields,\n\n &repr.compound_type,\n\n &repr.descriptor,\n\n &repr.descriptor.label,\n\n );\n\n\n\n quote!(#schema)\n\n }\n\n TypeContents::Enum(EnumRepr { variants, .. }) => {\n\n let schemas = variants.iter().fold(TokenStream2::new(), |ts, variant| {\n\n let schema = derive_compound_schema(\n\n &variant.fields,\n\n &variant.compound_type,\n\n &variant.descriptor,\n", "file_path": "api/swim_schema_derive/src/validated_form/mod.rs", "rank": 81, "score": 233799.1036453248 }, { "content": "fn type_of<T: ?Sized>() -> TypeOf<T> {\n\n TypeOf(PhantomData)\n\n}\n\n\n\nimpl<T: ?Sized> Debug for TypeOf<T> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", type_name::<T>())\n\n }\n\n}\n", "file_path": "swim_server/src/agent/lane/model/mod.rs", "rank": 82, "score": 232814.90048654753 }, { "content": "#[test]\n\npub fn clear_to_value() {\n\n let expected = Value::of_attr(\"clear\");\n\n assert_eq!(\n\n Form::into_value(UntypedMapModification::<Value>::Clear),\n\n expected\n\n );\n\n assert_eq!(\n\n Form::as_value(&UntypedMapModification::<Value>::Clear),\n\n expected\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 83, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn skip_from_value() {\n\n let rep = Value::of_attr((\"drop\", 5));\n\n let result1: MapModResult = Form::try_from_value(&rep);\n\n assert_eq!(result1, Ok(UntypedMapModification::Drop(5)));\n\n let result2: MapModResult = Form::try_convert(rep);\n\n assert_eq!(result2, Ok(UntypedMapModification::Drop(5)));\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 84, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn take_from_value() {\n\n let rep = Value::of_attr((\"take\", 3));\n\n let result1: MapModResult = Form::try_from_value(&rep);\n\n assert_eq!(result1, Ok(UntypedMapModification::Take(3)));\n\n let result2: MapModResult = Form::try_convert(rep);\n\n assert_eq!(result2, Ok(UntypedMapModification::Take(3)));\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 85, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn remove_to_value() {\n\n let expected = Value::of_attr((\"remove\", Value::record(vec![Item::slot(\"key\", \"hello\")])));\n\n assert_eq!(\n\n Form::into_value(UntypedMapModification::<Value>::Remove(Value::text(\n\n \"hello\"\n\n ))),\n\n expected\n\n );\n\n assert_eq!(\n\n Form::as_value(&UntypedMapModification::<Value>::Remove(Value::text(\n\n \"hello\"\n\n ))),\n\n expected\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 86, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn remove_from_value() {\n\n let rep = Value::of_attr((\"remove\", Value::record(vec![Item::slot(\"key\", \"hello\")])));\n\n let result1: MapModResult = Form::try_from_value(&rep);\n\n assert_eq!(\n\n result1,\n\n Ok(UntypedMapModification::<Value>::Remove(Value::text(\n\n \"hello\"\n\n )))\n\n );\n\n let result2: MapModResult = Form::try_convert(rep);\n\n assert_eq!(\n\n result2,\n\n Ok(UntypedMapModification::Remove(Value::text(\"hello\")))\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 87, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn clear_from_value() {\n\n let rep = Value::of_attr(\"clear\");\n\n let result1: MapModResult = Form::try_from_value(&rep);\n\n assert_eq!(result1, Ok(UntypedMapModification::<Value>::Clear));\n\n let result2: MapModResult = Form::try_convert(rep);\n\n assert_eq!(result2, Ok(UntypedMapModification::<Value>::Clear));\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 88, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn take_to_value() {\n\n let expected = Value::of_attr((\"take\", 3));\n\n assert_eq!(\n\n Form::into_value(UntypedMapModification::<Value>::Take(3)),\n\n expected\n\n );\n\n assert_eq!(\n\n Form::as_value(&UntypedMapModification::<Value>::Take(3)),\n\n expected\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 89, "score": 230730.71324305597 }, { "content": "#[test]\n\npub fn skip_to_value() {\n\n let expected = Value::of_attr((\"drop\", 5));\n\n assert_eq!(\n\n Form::into_value(UntypedMapModification::<Value>::Drop(5)),\n\n expected\n\n );\n\n assert_eq!(\n\n Form::as_value(&UntypedMapModification::<Value>::Drop(5)),\n\n expected\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 90, "score": 230730.71324305597 }, { "content": "/// Fold operation to extract a struct transform from the attributes on a type.\n\npub fn acc_struct_transform(\n\n mut state: Option<StructTransform>,\n\n nested_meta: syn::NestedMeta,\n\n) -> SynValidation<Option<StructTransform>> {\n\n let err = match StructTransform::try_from(&nested_meta) {\n\n Ok(transform) => match (&mut state, transform) {\n\n (Some(StructTransform::Rename(_)), StructTransform::Rename(_)) => Some(\n\n syn::Error::new_spanned(nested_meta, \"Duplicate `rename` tag\"),\n\n ),\n\n (Some(StructTransform::Newtype(_)), StructTransform::Newtype(_)) => Some(\n\n syn::Error::new_spanned(nested_meta, \"Duplicate `newtype` tag\"),\n\n ),\n\n (None, transform) => match transform {\n\n StructTransform::Rename(rename) => {\n\n state = Some(StructTransform::Rename(rename));\n\n None\n\n }\n\n StructTransform::Newtype(_) => {\n\n state = Some(StructTransform::Newtype(None));\n\n None\n", "file_path": "api/swim_form_derive/src/modifiers.rs", "rank": 91, "score": 230644.25510637515 }, { "content": "pub fn get_agent_data(\n\n args: SwimAgentAttrs,\n\n) -> Result<(AgentName, ConfigType, Vec<AgentField>), TokenStream> {\n\n let SwimAgentAttrs {\n\n ident: agent_name,\n\n data: fields,\n\n config: config_type,\n\n ..\n\n } = args;\n\n\n\n let mut agent_fields = Vec::new();\n\n\n\n match fields {\n\n Data::Enum(_) => {\n\n unimplemented!()\n\n }\n\n Data::Struct(fields) => {\n\n for field in fields {\n\n if let (Some(lane_type), Some(lane_name), Some(lifecycle_name)) =\n\n (field.get_lane_type(), field.ident, field.name)\n", "file_path": "swim_server/agent_derive/src/agent.rs", "rank": 92, "score": 230638.0267164699 }, { "content": "fn attr_final(input: Span<'_>) -> IResult<Span<'_>, ParseEvents<'_>> {\n\n map(preceded(char_comp::char('@'), attr_name_final), |name| {\n\n ParseEvents::TerminateWithAttr(FinalAttrStage::Start(name))\n\n })(input)\n\n}\n\n\n", "file_path": "api/formats/swim_recon/src/parser/record/mod.rs", "rank": 93, "score": 229211.05079836206 }, { "content": "fn assess_kind<'a, It>(definition: &'a Fields, fields: It) -> SynValidation<CompoundTypeKind>\n\nwhere\n\n It: Iterator<Item = &'a TaggedFieldModel<'a>> + 'a,\n\n{\n\n let mut kind = Some(CompoundTypeKind::Unit);\n\n for field in fields {\n\n let TaggedFieldModel { directive, .. } = field;\n\n match *directive {\n\n FieldKind::Item => match kind {\n\n Some(CompoundTypeKind::Labelled) => {\n\n if !field.is_labelled() {\n\n let err = syn::Error::new_spanned(definition, BAD_FIELDS);\n\n return Validation::fail(err);\n\n }\n\n }\n\n Some(CompoundTypeKind::Tuple) => {\n\n if field.is_labelled() {\n\n let err = syn::Error::new_spanned(definition, BAD_FIELDS);\n\n return Validation::fail(err);\n\n }\n", "file_path": "api/swim_form_derive/src/structural/model/record/mod.rs", "rank": 94, "score": 228921.50949487917 }, { "content": "/// Create an iterator that will parse a sequence of events from a complete string.\n\n///\n\n/// * `input` - The input to parse.\n\n/// * `allow_comments` - Boolean flag indicating whether or not the parsing should fail on comments.\n\npub fn parse_iterator(\n\n input: Span<'_>,\n\n allow_comments: bool,\n\n) -> impl Iterator<Item = Result<ReadEvent<'_>, nom::error::Error<Span<'_>>>> + '_ {\n\n record::ParseIterator::new(input, allow_comments)\n\n}\n\n\n", "file_path": "api/formats/swim_recon/src/parser/mod.rs", "rank": 95, "score": 227761.36571623792 }, { "content": "/// Convert a downlink [`Command`], from a map downlink, into a Warp [`RequestEnvelope`].\n\npub fn map_envelope(\n\n path: RelativePath,\n\n command: Command<UntypedMapModification<Value>>,\n\n) -> RequestEnvelope {\n\n envelope_for(map::envelope_body, path, command)\n\n}\n\n\n", "file_path": "swim_client/src/downlink/subscription/envelopes/mod.rs", "rank": 96, "score": 227754.63613881636 }, { "content": "pub fn uplink_aggregator(\n\n config: AggregatorConfig,\n\n stop_rx: trigger::Receiver,\n\n uplink_pulse_lanes: HashMap<RelativePath, SupplyLane<WarpUplinkPulse>>,\n\n lane_tx: mpsc::Sender<(RelativePath, WarpUplinkProfile)>,\n\n uplink_to_lane_tx: trigger::Sender,\n\n) -> (\n\n impl Future<Output = Result<(), AggregatorError>>,\n\n mpsc::Sender<TaggedWarpUplinkProfile>,\n\n) {\n\n let AggregatorConfig {\n\n sample_rate,\n\n buffer_size,\n\n yield_after,\n\n backpressure_config,\n\n } = config;\n\n\n\n let (uplink_tx, uplink_rx) = mpsc::channel(buffer_size.get());\n\n let metric_stream = ReceiverStream::new(uplink_rx);\n\n let (sink, bp_stream) = mpsc::channel(buffer_size.get());\n", "file_path": "swim_server/swim_metrics/src/uplink/mod.rs", "rank": 97, "score": 227754.63613881636 }, { "content": "#[test]\n\npub fn simple_insert_from_value() {\n\n let attr = Attr::of((\"update\", Value::record(vec![Item::slot(\"key\", \"hello\")])));\n\n let body = Item::ValueItem(Value::Int32Value(2));\n\n let rep = Value::Record(vec![attr], vec![body]);\n\n let result1: MapModResult = Form::try_from_value(&rep);\n\n assert_eq!(\n\n result1,\n\n Ok(UntypedMapModification::Update(\n\n Value::text(\"hello\"),\n\n Arc::new(Value::Int32Value(2))\n\n ))\n\n );\n\n let result2: MapModResult = Form::try_convert(rep);\n\n assert_eq!(\n\n result2,\n\n Ok(UntypedMapModification::Update(\n\n Value::text(\"hello\"),\n\n Arc::new(Value::Int32Value(2))\n\n ))\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 98, "score": 227546.07585036417 }, { "content": "#[test]\n\npub fn simple_insert_to_value() {\n\n let attr = Attr::of((\"update\", Value::record(vec![Item::slot(\"key\", \"hello\")])));\n\n let body = Item::ValueItem(Value::Int32Value(2));\n\n let expected = Value::Record(vec![attr], vec![body]);\n\n assert_eq!(\n\n Form::into_value(UntypedMapModification::Update(\n\n Value::text(\"hello\"),\n\n Arc::new(Value::Int32Value(2))\n\n )),\n\n expected\n\n );\n\n assert_eq!(\n\n Form::as_value(&UntypedMapModification::Update(\n\n Value::text(\"hello\"),\n\n Arc::new(Value::Int32Value(2))\n\n )),\n\n expected\n\n );\n\n}\n\n\n", "file_path": "swim_client/src/downlink/model/map/tests.rs", "rank": 99, "score": 227546.07585036417 } ]
Rust
src/sys/component_manager/src/vmex.rs
winksaville/Fuchsia
a0ec86f1d51ae8d2538ff3404dad46eb302f9b4f
use { crate::{ capability::*, model::{error::*, hooks::*}, }, cm_rust::CapabilityPath, failure::Error, fidl::endpoints::ServerEnd, fidl_fuchsia_boot as fboot, fidl_fuchsia_security_resource as fsec, fuchsia_async as fasync, fuchsia_component::client::connect_to_service, fuchsia_zircon::{self as zx, HandleBased}, futures::{future::BoxFuture, prelude::*}, lazy_static::lazy_static, log::warn, std::{convert::TryInto, sync::Arc}, }; lazy_static! { pub static ref VMEX_CAPABILITY_PATH: CapabilityPath = "/svc/fuchsia.process.Vmex".try_into().unwrap(); } pub struct VmexService { inner: Arc<VmexServiceInner>, } impl VmexService { pub fn new() -> Self { Self { inner: Arc::new(VmexServiceInner::new()) } } pub fn hooks(&self) -> Vec<HookRegistration> { vec![HookRegistration { event_type: EventType::RouteBuiltinCapability, callback: self.inner.clone(), }] } pub async fn serve(mut stream: fsec::VmexRequestStream) -> Result<(), Error> { let root_resource_provider = connect_to_service::<fboot::RootResourceMarker>()?; let root_resource = root_resource_provider.get().await?; while let Some(fsec::VmexRequest::Get { responder }) = stream.try_next().await? { let vmex_handle = root_resource.create_child(zx::ResourceKind::VMEX, None, 0, 0, b"vmex")?; let restricted_vmex_handle = vmex_handle.replace_handle( zx::Rights::TRANSFER | zx::Rights::DUPLICATE | zx::Rights::INSPECT, )?; responder.send(zx::Resource::from(restricted_vmex_handle))?; } Ok(()) } } struct VmexServiceInner; impl VmexServiceInner { pub fn new() -> Self { Self {} } async fn on_route_builtin_capability_async<'a>( self: Arc<Self>, capability: &'a ComponentManagerCapability, capability_provider: Option<Box<dyn ComponentManagerCapabilityProvider>>, ) -> Result<Option<Box<dyn ComponentManagerCapabilityProvider>>, ModelError> { match capability { ComponentManagerCapability::LegacyService(capability_path) if *capability_path == *VMEX_CAPABILITY_PATH => { Ok(Some(Box::new(VmexCapabilityProvider::new()) as Box<dyn ComponentManagerCapabilityProvider>)) } _ => Ok(capability_provider), } } } impl Hook for VmexServiceInner { fn on<'a>(self: Arc<Self>, event: &'a Event) -> BoxFuture<'a, Result<(), ModelError>> { Box::pin(async move { match event { Event::RouteBuiltinCapability { realm: _, capability, capability_provider } => { let mut capability_provider = capability_provider.lock().await; *capability_provider = self .on_route_builtin_capability_async(capability, capability_provider.take()) .await?; } _ => {} }; Ok(()) }) } } struct VmexCapabilityProvider; impl VmexCapabilityProvider { pub fn new() -> Self { Self {} } } impl ComponentManagerCapabilityProvider for VmexCapabilityProvider { fn open( &self, _flags: u32, _open_mode: u32, _relative_path: String, server_end: zx::Channel, ) -> BoxFuture<Result<(), ModelError>> { let server_end = ServerEnd::<fsec::VmexMarker>::new(server_end); let stream: fsec::VmexRequestStream = server_end.into_stream().unwrap(); fasync::spawn(async move { let result = VmexService::serve(stream).await; if let Err(e) = result { warn!("VmexService.open failed: {}", e); } }); Box::pin(async { Ok(()) }) } } #[cfg(test)] mod tests { use { super::*, crate::model::{Realm, ResolverRegistry}, fidl::endpoints::ClientEnd, fuchsia_async as fasync, fuchsia_zircon::AsHandleRef, fuchsia_zircon_sys as sys, futures::lock::Mutex, }; fn root_resource_available() -> bool { let bin = std::env::args().next(); match bin.as_ref().map(String::as_ref) { Some("/pkg/test/component_manager_tests") => false, Some("/pkg/test/component_manager_boot_env_tests") => true, _ => panic!("Unexpected test binary name {:?}", bin), } } fn serve_vmex() -> Result<fsec::VmexProxy, Error> { let (proxy, stream) = fidl::endpoints::create_proxy_and_stream::<fsec::VmexMarker>()?; fasync::spawn_local( VmexService::serve(stream) .unwrap_or_else(|e| panic!("Error while serving vmex service: {}", e)), ); Ok(proxy) } #[fasync::run_singlethreaded(test)] async fn fail_with_no_root_resource() -> Result<(), Error> { if root_resource_available() { return Ok(()); } let (_, stream) = fidl::endpoints::create_proxy_and_stream::<fsec::VmexMarker>()?; assert!(!VmexService::serve(stream).await.is_ok()); Ok(()) } #[fasync::run_singlethreaded(test)] async fn kind_type_is_vmex() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_provider = serve_vmex()?; let vmex_resource = vmex_provider.get().await?; let resource_info = vmex_resource.info()?; assert_eq!(resource_info.kind, zx::sys::ZX_RSRC_KIND_VMEX); assert_eq!(resource_info.base, 0); assert_eq!(resource_info.size, 0); Ok(()) } #[fasync::run_singlethreaded(test)] async fn minimal_rights_assigned() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_provider = serve_vmex()?; let vmex_resource = vmex_provider.get().await?; let resource_info = zx::Handle::from(vmex_resource).basic_info()?; assert_eq!( resource_info.rights, zx::Rights::DUPLICATE | zx::Rights::TRANSFER | zx::Rights::INSPECT ); Ok(()) } #[fasync::run_singlethreaded(test)] async fn connect_to_vmex_service() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_service = Arc::new(VmexService::new()); let hooks = Hooks::new(None); hooks.install(vmex_service.hooks()).await; let capability_provider = Arc::new(Mutex::new(None)); let capability = ComponentManagerCapability::LegacyService(VMEX_CAPABILITY_PATH.clone()); let (client, server) = zx::Channel::create()?; let realm = { let resolver = ResolverRegistry::new(); let root_component_url = "test:///root".to_string(); Arc::new(Realm::new_root_realm(resolver, root_component_url)) }; let event = Event::RouteBuiltinCapability { realm: realm.clone(), capability: capability.clone(), capability_provider: capability_provider.clone(), }; hooks.dispatch(&event).await?; let capability_provider = capability_provider.lock().await.take(); if let Some(capability_provider) = capability_provider { capability_provider.open(0, 0, String::new(), server).await?; } let vmex_client = ClientEnd::<fsec::VmexMarker>::new(client) .into_proxy() .expect("failed to create launcher proxy"); let vmex_resource = vmex_client.get().await?; assert_ne!(vmex_resource.raw_handle(), sys::ZX_HANDLE_INVALID); Ok(()) } }
use { crate::{ capability::*, model::{error::*, hooks::*}, }, cm_rust::CapabilityPath, failure::Error, fidl::endpoints::ServerEnd, fidl_fuchsia_boot as fboot, fidl_fuchsia_security_resource as fsec, fuchsia_async as fasync, fuchsia_component::client::connect_to_service, fuchsia_zircon::{self as zx, HandleBased}, futures::{future::BoxFuture, prelude::*}, lazy_static::lazy_static, log::warn, std::{convert::TryInto, sync::Arc}, }; lazy_static! { pub static ref VMEX_CAPABILITY_PATH: CapabilityPath = "/svc/fuchsia.process.Vmex".try_into().unwrap(); } pub struct VmexService { inner: Arc<VmexServiceInner>, } impl VmexService { pub fn new() -> Self { Self { inner: Arc::new(VmexServiceInner::new()) } } pub fn hooks(&self) -> Vec<HookRegistration> { vec![HookRegistration { event_type: EventType::RouteBuiltinCapability, callback: self.inner.clone(), }] } pub async fn serve(mut stream: fsec::VmexRequestStream) -> Result<(), Error> { let root_resource_provider
| panic!("Error while serving vmex service: {}", e)), ); Ok(proxy) } #[fasync::run_singlethreaded(test)] async fn fail_with_no_root_resource() -> Result<(), Error> { if root_resource_available() { return Ok(()); } let (_, stream) = fidl::endpoints::create_proxy_and_stream::<fsec::VmexMarker>()?; assert!(!VmexService::serve(stream).await.is_ok()); Ok(()) } #[fasync::run_singlethreaded(test)] async fn kind_type_is_vmex() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_provider = serve_vmex()?; let vmex_resource = vmex_provider.get().await?; let resource_info = vmex_resource.info()?; assert_eq!(resource_info.kind, zx::sys::ZX_RSRC_KIND_VMEX); assert_eq!(resource_info.base, 0); assert_eq!(resource_info.size, 0); Ok(()) } #[fasync::run_singlethreaded(test)] async fn minimal_rights_assigned() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_provider = serve_vmex()?; let vmex_resource = vmex_provider.get().await?; let resource_info = zx::Handle::from(vmex_resource).basic_info()?; assert_eq!( resource_info.rights, zx::Rights::DUPLICATE | zx::Rights::TRANSFER | zx::Rights::INSPECT ); Ok(()) } #[fasync::run_singlethreaded(test)] async fn connect_to_vmex_service() -> Result<(), Error> { if !root_resource_available() { return Ok(()); } let vmex_service = Arc::new(VmexService::new()); let hooks = Hooks::new(None); hooks.install(vmex_service.hooks()).await; let capability_provider = Arc::new(Mutex::new(None)); let capability = ComponentManagerCapability::LegacyService(VMEX_CAPABILITY_PATH.clone()); let (client, server) = zx::Channel::create()?; let realm = { let resolver = ResolverRegistry::new(); let root_component_url = "test:///root".to_string(); Arc::new(Realm::new_root_realm(resolver, root_component_url)) }; let event = Event::RouteBuiltinCapability { realm: realm.clone(), capability: capability.clone(), capability_provider: capability_provider.clone(), }; hooks.dispatch(&event).await?; let capability_provider = capability_provider.lock().await.take(); if let Some(capability_provider) = capability_provider { capability_provider.open(0, 0, String::new(), server).await?; } let vmex_client = ClientEnd::<fsec::VmexMarker>::new(client) .into_proxy() .expect("failed to create launcher proxy"); let vmex_resource = vmex_client.get().await?; assert_ne!(vmex_resource.raw_handle(), sys::ZX_HANDLE_INVALID); Ok(()) } }
= connect_to_service::<fboot::RootResourceMarker>()?; let root_resource = root_resource_provider.get().await?; while let Some(fsec::VmexRequest::Get { responder }) = stream.try_next().await? { let vmex_handle = root_resource.create_child(zx::ResourceKind::VMEX, None, 0, 0, b"vmex")?; let restricted_vmex_handle = vmex_handle.replace_handle( zx::Rights::TRANSFER | zx::Rights::DUPLICATE | zx::Rights::INSPECT, )?; responder.send(zx::Resource::from(restricted_vmex_handle))?; } Ok(()) } } struct VmexServiceInner; impl VmexServiceInner { pub fn new() -> Self { Self {} } async fn on_route_builtin_capability_async<'a>( self: Arc<Self>, capability: &'a ComponentManagerCapability, capability_provider: Option<Box<dyn ComponentManagerCapabilityProvider>>, ) -> Result<Option<Box<dyn ComponentManagerCapabilityProvider>>, ModelError> { match capability { ComponentManagerCapability::LegacyService(capability_path) if *capability_path == *VMEX_CAPABILITY_PATH => { Ok(Some(Box::new(VmexCapabilityProvider::new()) as Box<dyn ComponentManagerCapabilityProvider>)) } _ => Ok(capability_provider), } } } impl Hook for VmexServiceInner { fn on<'a>(self: Arc<Self>, event: &'a Event) -> BoxFuture<'a, Result<(), ModelError>> { Box::pin(async move { match event { Event::RouteBuiltinCapability { realm: _, capability, capability_provider } => { let mut capability_provider = capability_provider.lock().await; *capability_provider = self .on_route_builtin_capability_async(capability, capability_provider.take()) .await?; } _ => {} }; Ok(()) }) } } struct VmexCapabilityProvider; impl VmexCapabilityProvider { pub fn new() -> Self { Self {} } } impl ComponentManagerCapabilityProvider for VmexCapabilityProvider { fn open( &self, _flags: u32, _open_mode: u32, _relative_path: String, server_end: zx::Channel, ) -> BoxFuture<Result<(), ModelError>> { let server_end = ServerEnd::<fsec::VmexMarker>::new(server_end); let stream: fsec::VmexRequestStream = server_end.into_stream().unwrap(); fasync::spawn(async move { let result = VmexService::serve(stream).await; if let Err(e) = result { warn!("VmexService.open failed: {}", e); } }); Box::pin(async { Ok(()) }) } } #[cfg(test)] mod tests { use { super::*, crate::model::{Realm, ResolverRegistry}, fidl::endpoints::ClientEnd, fuchsia_async as fasync, fuchsia_zircon::AsHandleRef, fuchsia_zircon_sys as sys, futures::lock::Mutex, }; fn root_resource_available() -> bool { let bin = std::env::args().next(); match bin.as_ref().map(String::as_ref) { Some("/pkg/test/component_manager_tests") => false, Some("/pkg/test/component_manager_boot_env_tests") => true, _ => panic!("Unexpected test binary name {:?}", bin), } } fn serve_vmex() -> Result<fsec::VmexProxy, Error> { let (proxy, stream) = fidl::endpoints::create_proxy_and_stream::<fsec::VmexMarker>()?; fasync::spawn_local( VmexService::serve(stream) .unwrap_or_else(|e
random
[]
Rust
src/bin/upgrade/main.rs
thiagoarrais/cargo-edit
265ddb082c0f490a12d9dfb254195717b2ba8d51
#![warn( missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unstable_features, unused_import_braces, unused_qualifications )] #[macro_use] extern crate error_chain; use crate::errors::*; use cargo_edit::{find, get_latest_dependency, CrateName, Dependency, LocalManifest}; use failure::Fail; use std::collections::HashMap; use std::io::Write; use std::path::{Path, PathBuf}; use std::process; use structopt::StructOpt; use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor}; mod errors { error_chain! { links { CargoEditLib(::cargo_edit::Error, ::cargo_edit::ErrorKind); } foreign_links { CargoMetadata(::failure::Compat<::cargo_metadata::Error>); } } } #[derive(Debug, StructOpt)] #[structopt(bin_name = "cargo")] enum Command { #[structopt(name = "upgrade", author = "")] #[structopt( after_help = "This command differs from `cargo update`, which updates the dependency versions recorded in the local lock file (Cargo.lock). If `<dependency>`(s) are provided, only the specified dependencies will be upgraded. The version to upgrade to for each can be specified with e.g. `[email protected]` or `serde@>=0.9,<2.0`. Dev, build, and all target dependencies will also be upgraded. Only dependencies from crates.io are supported. Git/path dependencies will be ignored. All packages in the workspace will be upgraded if the `--all` flag is supplied. The `--all` flag may be supplied in the presence of a virtual manifest." )] Upgrade(Args), } #[derive(Debug, StructOpt)] struct Args { dependency: Vec<String>, #[structopt(long = "manifest-path", value_name = "path")] manifest_path: Option<PathBuf>, #[structopt(long = "all")] all: bool, #[structopt(long = "allow-prerelease")] allow_prerelease: bool, #[structopt(long = "dry-run")] dry_run: bool, } struct Manifests(Vec<(LocalManifest, cargo_metadata::Package)>); impl Manifests { fn get_all(manifest_path: &Option<PathBuf>) -> Result<Self> { let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.no_deps(); if let Some(path) = manifest_path { cmd.manifest_path(path); } let result = cmd .exec() .map_err(|e| Error::from(e.compat()).chain_err(|| "Failed to get workspace metadata"))?; result .packages .into_iter() .map(|package| { Ok(( LocalManifest::try_new(Path::new(&package.manifest_path))?, package, )) }) .collect::<Result<Vec<_>>>() .map(Manifests) } fn get_local_one(manifest_path: &Option<PathBuf>) -> Result<Self> { let resolved_manifest_path: String = find(&manifest_path)?.to_string_lossy().into(); let manifest = LocalManifest::find(&manifest_path)?; let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.no_deps(); if let Some(path) = manifest_path { cmd.manifest_path(path); } let result = cmd .exec() .map_err(|e| Error::from(e.compat()).chain_err(|| "Invalid manifest"))?; let packages = result.packages; let package = packages .iter() .find(|p| p.manifest_path.to_string_lossy() == resolved_manifest_path) .chain_err(|| { "Found virtual manifest, but this command requires running against an \ actual package in this workspace. Try adding `--all`." })?; Ok(Manifests(vec![(manifest, package.to_owned())])) } fn get_dependencies(&self, only_update: Vec<String>) -> Result<DesiredUpgrades> { fn is_version_dep(dependency: &cargo_metadata::Dependency) -> bool { match dependency.source { Some(ref s) => s.splitn(2, '+').next() == Some("registry"), _ => false, } } Ok(DesiredUpgrades(if only_update.is_empty() { self.0 .iter() .flat_map(|&(_, ref package)| package.dependencies.clone()) .filter(is_version_dep) .map(|dependency| (dependency.name, None)) .collect() } else { only_update .into_iter() .map(|name| { if let Some(dependency) = CrateName::new(&name.clone()).parse_as_version()? { Ok(( dependency.name.clone(), dependency.version().map(String::from), )) } else { Ok((name, None)) } }) .collect::<Result<_>>()? })) } fn upgrade(self, upgraded_deps: &ActualUpgrades, dry_run: bool) -> Result<()> { if dry_run { let bufwtr = BufferWriter::stdout(ColorChoice::Always); let mut buffer = bufwtr.buffer(); buffer .set_color(ColorSpec::new().set_fg(Some(Color::Cyan)).set_bold(true)) .chain_err(|| "Failed to set output colour")?; write!(&mut buffer, "Starting dry run. ") .chain_err(|| "Failed to write dry run message")?; buffer .set_color(&ColorSpec::new()) .chain_err(|| "Failed to clear output colour")?; writeln!(&mut buffer, "Changes will not be saved.") .chain_err(|| "Failed to write dry run message")?; bufwtr .print(&buffer) .chain_err(|| "Failed to print dry run message")?; } for (mut manifest, package) in self.0 { println!("{}:", package.name); for (name, version) in &upgraded_deps.0 { manifest.upgrade(&Dependency::new(name).set_version(version), dry_run)?; } } Ok(()) } } struct DesiredUpgrades(HashMap<String, Option<String>>); struct ActualUpgrades(HashMap<String, String>); impl DesiredUpgrades { fn get_upgraded(self, allow_prerelease: bool) -> Result<ActualUpgrades> { self.0 .into_iter() .map(|(name, version)| { if let Some(v) = version { Ok((name, v)) } else { get_latest_dependency(&name, allow_prerelease) .map(|new_dep| { ( name, new_dep .version() .expect("Invalid dependency type") .to_string(), ) }) .chain_err(|| "Failed to get new version") } }) .collect::<Result<_>>() .map(ActualUpgrades) } } fn process(args: Args) -> Result<()> { let Args { dependency, manifest_path, all, allow_prerelease, dry_run, .. } = args; let manifests = if all { Manifests::get_all(&manifest_path) } else { Manifests::get_local_one(&manifest_path) }?; let existing_dependencies = manifests.get_dependencies(dependency)?; let upgraded_dependencies = existing_dependencies.get_upgraded(allow_prerelease)?; manifests.upgrade(&upgraded_dependencies, dry_run) } fn main() { let args: Command = Command::from_args(); let Command::Upgrade(args) = args; if let Err(err) = process(args) { eprintln!("Command failed due to unhandled error: {}\n", err); for e in err.iter().skip(1) { eprintln!("Caused by: {}", e); } if let Some(backtrace) = err.backtrace() { eprintln!("Backtrace: {:?}", backtrace); } process::exit(1); } }
#![warn( missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts, trivial_numeric_casts, unsafe_code, unstable_features, unused_import_braces, unused_qualifications )] #[macro_use] extern crate error_chain; use crate::errors::*; use cargo_edit::{find, get_latest_dependency, CrateName, Dependency, LocalManifest}; use failure::Fail; use std::collections::HashMap; use std::io::Write; use std::path::{Path, PathBuf}; use std::process; use structopt::StructOpt; use termcolor::{BufferWriter, Color, ColorChoice, ColorSpec, WriteColor}; mod errors { error_chain! { links { CargoEditLib(::cargo_edit::Error, ::cargo_edit::ErrorKind); } foreign_links { CargoMetadata(::failure::Compat<::cargo_metadata::Error>); } } } #[derive(Debug, StructOpt)] #[structopt(bin_name = "cargo")] enum Command { #[structopt(name = "upgrade", author = "")] #[structopt( after_help = "This command differs from `cargo update`, which updates the dependency versions recorded in the local lock file (Cargo.lock). If `<dependency>`(s) are provided, only the specified dependencies will be upgraded. The version to upgrade to for each can be specified with e.g. `[email protected]` or `serde@>=0.9,<2.0`. Dev, build, and all target dependencies will also be upgraded. Only dependencies from crates.io are supported. Git/path dependencies will be ignored. All packages in the workspace will be upgraded if the `--all` flag is supplied. The `--all` flag may be supplied in the presence of a virtual manifest." )] Upgrade(Args), } #[derive(Debug, StructOpt)] struct Args { dependency: Vec<String>, #[structopt(long = "manifest-path", value_name = "path")] manifest_path: Option<PathBuf>, #[structopt(long = "all")] all: bool, #[structopt(long = "allow-prerelease")] allow_prerelease: bool, #[structopt(long = "dry-run")] dry_run: bool, } struct Manifests(Vec<(LocalManifest, cargo_metadata::Package)>); impl Manifests { fn get_all(manifest_path: &Option<PathBuf>) -> Result<Self> { let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.no_deps(); if let Some(path) = manifest_path { cmd.manifest_path(path); } let result = cmd .exec() .map_err(|e| Error::from(e.compat()).chain_err(|| "Failed to get workspace metadata"))?; result .packages .into_iter() .map(|package| { Ok(( LocalManifest::try_new(Path::new(&package.manifest_path))?, package, )) }) .collect::<Result<Vec<_>>>() .map(Manifests) } fn get_local_one(manifest_path: &Option<PathBuf>) -> Result<Self> { let resolved_manifest_path: String = find(&manifest_path)?.to_string_lossy().into(); let manifest = LocalManifest::find(&manifest_path)?; let mut cmd = cargo_metadata::MetadataCommand::new(); cmd.no_deps(); if let Some(path) = manifest_path { cmd.manifest_path(path); } let result = cmd .exec() .map_err(|e| Error::from(e.compat()).chain_err(|| "Invalid manifest"))?; let packages = result.packages; let package = packages .iter() .find(|p| p.manifest_path.to_string_lossy() == resolved_manifest_path) .chain_err(|| { "Found virtual manifest, but this command requires running against an \ actual package in this workspace. Try adding `--all`." })?; Ok(Manifests(vec![(manifest, package.to_owned())])) } fn get_dependencies(&self, only_update: Vec<String>) -> Result<DesiredUpgrades> {
Ok(DesiredUpgrades(if only_update.is_empty() { self.0 .iter() .flat_map(|&(_, ref package)| package.dependencies.clone()) .filter(is_version_dep) .map(|dependency| (dependency.name, None)) .collect() } else { only_update .into_iter() .map(|name| { if let Some(dependency) = CrateName::new(&name.clone()).parse_as_version()? { Ok(( dependency.name.clone(), dependency.version().map(String::from), )) } else { Ok((name, None)) } }) .collect::<Result<_>>()? })) } fn upgrade(self, upgraded_deps: &ActualUpgrades, dry_run: bool) -> Result<()> { if dry_run { let bufwtr = BufferWriter::stdout(ColorChoice::Always); let mut buffer = bufwtr.buffer(); buffer .set_color(ColorSpec::new().set_fg(Some(Color::Cyan)).set_bold(true)) .chain_err(|| "Failed to set output colour")?; write!(&mut buffer, "Starting dry run. ") .chain_err(|| "Failed to write dry run message")?; buffer .set_color(&ColorSpec::new()) .chain_err(|| "Failed to clear output colour")?; writeln!(&mut buffer, "Changes will not be saved.") .chain_err(|| "Failed to write dry run message")?; bufwtr .print(&buffer) .chain_err(|| "Failed to print dry run message")?; } for (mut manifest, package) in self.0 { println!("{}:", package.name); for (name, version) in &upgraded_deps.0 { manifest.upgrade(&Dependency::new(name).set_version(version), dry_run)?; } } Ok(()) } } struct DesiredUpgrades(HashMap<String, Option<String>>); struct ActualUpgrades(HashMap<String, String>); impl DesiredUpgrades { fn get_upgraded(self, allow_prerelease: bool) -> Result<ActualUpgrades> { self.0 .into_iter() .map(|(name, version)| { if let Some(v) = version { Ok((name, v)) } else { get_latest_dependency(&name, allow_prerelease) .map(|new_dep| { ( name, new_dep .version() .expect("Invalid dependency type") .to_string(), ) }) .chain_err(|| "Failed to get new version") } }) .collect::<Result<_>>() .map(ActualUpgrades) } } fn process(args: Args) -> Result<()> { let Args { dependency, manifest_path, all, allow_prerelease, dry_run, .. } = args; let manifests = if all { Manifests::get_all(&manifest_path) } else { Manifests::get_local_one(&manifest_path) }?; let existing_dependencies = manifests.get_dependencies(dependency)?; let upgraded_dependencies = existing_dependencies.get_upgraded(allow_prerelease)?; manifests.upgrade(&upgraded_dependencies, dry_run) } fn main() { let args: Command = Command::from_args(); let Command::Upgrade(args) = args; if let Err(err) = process(args) { eprintln!("Command failed due to unhandled error: {}\n", err); for e in err.iter().skip(1) { eprintln!("Caused by: {}", e); } if let Some(backtrace) = err.backtrace() { eprintln!("Backtrace: {:?}", backtrace); } process::exit(1); } }
fn is_version_dep(dependency: &cargo_metadata::Dependency) -> bool { match dependency.source { Some(ref s) => s.splitn(2, '+').next() == Some("registry"), _ => false, } }
function_block-full_function
[ { "content": "fn get_name_from_manifest(manifest: &Manifest) -> Result<String> {\n\n manifest\n\n .data\n\n .as_table()\n\n .get(\"package\")\n\n .and_then(|m| m[\"name\"].as_str().map(std::string::ToString::to_string))\n\n .ok_or_else(|| ErrorKind::ParseCargoToml.into())\n\n}\n\n\n\nconst fn get_default_timeout() -> Duration {\n\n Duration::from_secs(10)\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 0, "score": 177194.16692743974 }, { "content": "/// Query crate name by accessing Cargo.toml in a local path\n\n///\n\n/// The name will be returned as a string. This will fail, when\n\n/// Cargo.toml is not present in the root of the path.\n\npub fn get_crate_name_from_path(path: &str) -> Result<String> {\n\n let cargo_file = Path::new(path).join(\"Cargo.toml\");\n\n Manifest::open(&Some(cargo_file))\n\n .chain_err(|| \"Unable to open local Cargo.toml\")\n\n .and_then(|ref manifest| get_name_from_manifest(manifest))\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 1, "score": 172786.82253652287 }, { "content": "/// Query latest version from crates.io\n\n///\n\n/// The latest version will be returned as a `Dependency`. This will fail, when\n\n///\n\n/// - there is no Internet connection,\n\n/// - the response from crates.io is an error or in an incorrect format,\n\n/// - or when a crate with the given name does not exist on crates.io.\n\npub fn get_latest_dependency(crate_name: &str, flag_allow_prerelease: bool) -> Result<Dependency> {\n\n if env::var(\"CARGO_IS_TEST\").is_ok() {\n\n // We are in a simulated reality. Nothing is real here.\n\n // FIXME: Use actual test handling code.\n\n let new_version = if flag_allow_prerelease {\n\n format!(\"{}--PRERELEASE_VERSION_TEST\", crate_name)\n\n } else {\n\n format!(\"{}--CURRENT_VERSION_TEST\", crate_name)\n\n };\n\n\n\n return Ok(Dependency::new(crate_name).set_version(&new_version));\n\n }\n\n\n\n let crate_versions = fetch_cratesio(crate_name)?;\n\n\n\n let dep = read_latest_version(&crate_versions, flag_allow_prerelease)?;\n\n\n\n if dep.name != crate_name {\n\n println!(\"WARN: Added `{}` instead of `{}`\", dep.name, crate_name);\n\n }\n\n\n\n Ok(dep)\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 2, "score": 153943.4660515914 }, { "content": "/// Read latest version from Versions structure\n\n///\n\n/// Assumes the version are sorted so that the first non-yanked version is the\n\n/// latest, and thus the one we want.\n\nfn read_latest_version(versions: &Versions, flag_allow_prerelease: bool) -> Result<Dependency> {\n\n let latest = versions\n\n .versions\n\n .iter()\n\n .filter(|&v| flag_allow_prerelease || version_is_stable(v))\n\n .find(|&v| !v.yanked)\n\n .ok_or(ErrorKind::NoVersionsAvailable)?;\n\n\n\n let name = &latest.name;\n\n let version = latest.version.to_string();\n\n Ok(Dependency::new(name).set_version(&version))\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 3, "score": 151484.8006304735 }, { "content": "pub fn get_command_path(s: impl AsRef<OsStr>) -> String {\n\n let target_dir: PathBuf = env::var_os(\"CARGO_TARGET_DIR\")\n\n .unwrap_or_else(|| OsString::from(\"target\"))\n\n .into();\n\n\n\n let mut binary_name = OsString::from(\"cargo-\");\n\n binary_name.push(s.as_ref());\n\n\n\n target_dir\n\n .join(\"debug\")\n\n .join(binary_name)\n\n .to_str()\n\n .unwrap()\n\n .to_string()\n\n}\n", "file_path": "tests/utils.rs", "rank": 4, "score": 147876.1734005112 }, { "content": "/// Search for Cargo.toml in this directory and recursively up the tree until one is found.\n\nfn search(dir: &Path) -> Result<PathBuf> {\n\n let manifest = dir.join(MANIFEST_FILENAME);\n\n\n\n if fs::metadata(&manifest).is_ok() {\n\n Ok(manifest)\n\n } else {\n\n dir.parent()\n\n .ok_or_else(|| ErrorKind::MissingManifest.into())\n\n .and_then(|dir| search(dir))\n\n }\n\n}\n\n\n", "file_path": "src/manifest.rs", "rank": 5, "score": 147465.5971125967 }, { "content": "/// If a manifest is specified, return that one, otherise perform a manifest search starting from\n\n/// the current directory.\n\n/// If a manifest is specified, return that one. If a path is specified, perform a manifest search\n\n/// starting from there. If nothing is specified, start searching from the current directory\n\n/// (`cwd`).\n\npub fn find(specified: &Option<PathBuf>) -> Result<PathBuf> {\n\n match *specified {\n\n Some(ref path)\n\n if fs::metadata(&path)\n\n .chain_err(|| \"Failed to get cargo file metadata\")?\n\n .is_file() =>\n\n {\n\n Ok(path.to_owned())\n\n }\n\n Some(ref path) => search(path),\n\n None => search(&env::current_dir().chain_err(|| \"Failed to get current directory\")?),\n\n }\n\n}\n\n\n", "file_path": "src/manifest.rs", "rank": 6, "score": 143374.61735596327 }, { "content": "// Checks whether a version object is a stable release\n\nfn version_is_stable(version: &CrateVersion) -> bool {\n\n !version.version.is_prerelease()\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 7, "score": 141516.6781271702 }, { "content": "#[test]\n\nfn invalid_manifest() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/upgrade/Cargo.toml.invalid\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"upgrade\").as_str(),\n\n \"upgrade\",\n\n \"--manifest-path\",\n\n &manifest,\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .is(\n\n \"Command failed due to unhandled error: Unable to parse Cargo.toml\n\n\n\nCaused by: Manifest not valid TOML\n\nCaused by: TOML parse error at line 1, column 6\n\n |\n\n1 | This is clearly not a valid Cargo.toml.\n\n | ^\n\nUnexpected `i`\n\nExpected `=`\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 8, "score": 132153.29209573785 }, { "content": "fn get_cargo_toml_from_git_url(url: &str) -> Result<String> {\n\n let mut res = get_with_timeout(url, get_default_timeout())\n\n .chain_err(|| \"Failed to fetch crate from git\")?;\n\n let mut body = String::new();\n\n res.read_to_string(&mut body)\n\n .chain_err(|| \"Git response not a valid `String`\")?;\n\n Ok(body)\n\n}\n", "file_path": "src/fetch.rs", "rank": 9, "score": 130638.47917119961 }, { "content": "/// Query crate name by accessing a gitlab repo Cargo.toml\n\n///\n\n/// The name will be returned as a string. This will fail, when\n\n///\n\n/// - there is no Internet connection,\n\n/// - Cargo.toml is not present in the root of the master branch,\n\n/// - the response from gitlab is an error or in an incorrect format.\n\npub fn get_crate_name_from_gitlab(repo: &str) -> Result<String> {\n\n let re =\n\n Regex::new(r\"^https://gitlab.com/([-_0-9a-zA-Z]+)/([-_0-9a-zA-Z]+)(/|.git)?$\").unwrap();\n\n get_crate_name_from_repository(repo, &re, |user, repo| {\n\n format!(\n\n \"https://gitlab.com/{user}/{repo}/raw/master/Cargo.toml\",\n\n user = user,\n\n repo = repo\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 10, "score": 130312.3660475107 }, { "content": "/// Query crate name by accessing a github repo Cargo.toml\n\n///\n\n/// The name will be returned as a string. This will fail, when\n\n///\n\n/// - there is no Internet connection,\n\n/// - Cargo.toml is not present in the root of the master branch,\n\n/// - the response from github is an error or in an incorrect format.\n\npub fn get_crate_name_from_github(repo: &str) -> Result<String> {\n\n let re =\n\n Regex::new(r\"^https://github.com/([-_0-9a-zA-Z]+)/([-_0-9a-zA-Z]+)(/|.git)?$\").unwrap();\n\n get_crate_name_from_repository(repo, &re, |user, repo| {\n\n format!(\n\n \"https://raw.githubusercontent.com/{user}/{repo}/master/Cargo.toml\",\n\n user = user,\n\n repo = repo\n\n )\n\n })\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 11, "score": 130312.3660475107 }, { "content": "#[test]\n\nfn adds_local_source_with_version_flag_and_semver_metadata() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\n\n \"add\",\n\n \"local\",\n\n \"--vers\",\n\n \"0.4.3+useless-metadata.1.0.0\",\n\n \"--path\",\n\n \"/path/to/pkg\",\n\n ],\n\n &manifest,\n\n );\n\n\n\n let toml = get_toml(&manifest);\n", "file_path": "tests/cargo-add.rs", "rank": 12, "score": 130095.33721923266 }, { "content": "#[test]\n\nfn fails_to_upgrade_missing_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // `failure` is not a dependency. Try to upgrade it anyway.\n\n execute_command(&[\"upgrade\", \"failure\"], &manifest);\n\n\n\n // Verify that `failure` has not been added\n\n assert!(get_toml(&manifest)[\"dependencies\"][\"failure\"].is_none());\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 13, "score": 129648.79169075613 }, { "content": "#[test]\n\nfn invalid_root_manifest() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/upgrade/Cargo.toml.invalid\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"upgrade\").as_str(),\n\n \"upgrade\",\n\n \"--all\",\n\n \"--manifest-path\",\n\n &manifest,\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .contains(\"Command failed due to unhandled error: Failed to get workspace metadata\")\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 14, "score": 126584.53218682595 }, { "content": "fn fetch_cratesio(crate_name: &str) -> Result<Versions> {\n\n let url = format!(\n\n \"{host}/api/v1/crates/{crate_name}\",\n\n host = REGISTRY_HOST,\n\n crate_name = crate_name\n\n );\n\n\n\n match get_with_timeout(&url, get_default_timeout()) {\n\n Ok(response) => {\n\n Ok(json::from_reader(response).chain_err(|| ErrorKind::InvalidCratesIoJson)?)\n\n }\n\n Err(e) => {\n\n let not_found_error = e.status() == Some(reqwest::StatusCode::NOT_FOUND);\n\n\n\n Err(e).chain_err(|| {\n\n if not_found_error {\n\n ErrorKind::NoCrate(crate_name.to_string())\n\n } else {\n\n ErrorKind::FetchVersionFailure\n\n }\n\n })\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 15, "score": 126428.55240351408 }, { "content": "/// Check 'failure' deps are not present\n\nfn no_manifest_failures(manifest: &toml_edit::Item) -> bool {\n\n let no_failure_key_in = |section| manifest[section][BOGUS_CRATE_NAME].is_none();\n\n no_failure_key_in(\"dependencies\")\n\n && no_failure_key_in(\"dev-dependencies\")\n\n && no_failure_key_in(\"build-dependencies\")\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 16, "score": 122125.98044321775 }, { "content": "#[test]\n\nfn adds_dev_build_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n assert!(toml[\"build-dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"my-dev-package\", \"--dev\"], &manifest);\n\n execute_command(&[\"add\", \"my-build-package\", \"--build\"], &manifest);\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dev-dependencies\"][\"my-dev-package\"];\n\n assert_eq!(\n\n val.as_str().unwrap(),\n\n \"my-dev-package--CURRENT_VERSION_TEST\"\n\n );\n\n let val = &toml[\"build-dependencies\"][\"my-build-package\"];\n\n assert_eq!(\n", "file_path": "tests/cargo-add.rs", "rank": 17, "score": 121802.44020111456 }, { "content": "/// Helper function that copies the workspace test into a temporary directory.\n\npub fn copy_workspace_test() -> (tempdir::TempDir, String, Vec<String>) {\n\n // Create a temporary directory and copy in the root manifest, the dummy rust file, and\n\n // workspace member manifests.\n\n let tmpdir = tempdir::TempDir::new(\"upgrade_workspace\")\n\n .expect(\"failed to construct temporary directory\");\n\n\n\n let (root_manifest_path, workspace_manifest_paths) = {\n\n // Helper to copy in files to the temporary workspace. The standard library doesn't have a\n\n // good equivalent of `cp -r`, hence this oddity.\n\n let copy_in = |dir, file| {\n\n let file_path = tmpdir\n\n .path()\n\n .join(dir)\n\n .join(file)\n\n .to_str()\n\n .unwrap()\n\n .to_string();\n\n\n\n fs::create_dir_all(tmpdir.path().join(dir)).unwrap();\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 18, "score": 121061.70831774708 }, { "content": "#[test]\n\nfn adds_local_source_using_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"local\", \"--path\", \"/path/to/pkg\"], &manifest);\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"local\"];\n\n assert_eq!(val[\"path\"].as_str(), Some(\"/path/to/pkg\"));\n\n\n\n // check this works with other flags (e.g. --dev) as well\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"local-dev\", \"--path\", \"/path/to/pkg-dev\", \"--dev\"],\n\n &manifest,\n\n );\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dev-dependencies\"][\"local-dev\"];\n\n assert_eq!(val[\"path\"].as_str(), Some(\"/path/to/pkg-dev\"));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 19, "score": 118117.08292914763 }, { "content": "#[test]\n\nfn adds_local_source_with_version_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"local\", \"--vers\", \"0.4.3\", \"--path\", \"/path/to/pkg\"],\n\n &manifest,\n\n );\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"local\"];\n\n assert_eq!(val[\"path\"].as_str(), Some(\"/path/to/pkg\"));\n\n assert_eq!(val[\"version\"].as_str(), Some(\"0.4.3\"));\n\n\n\n // check this works with other flags (e.g. --dev) as well\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n", "file_path": "tests/cargo-add.rs", "rank": 20, "score": 117568.49951043144 }, { "content": "#[test]\n\nfn adds_multiple_dev_build_dependencies() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n assert!(toml[\"build-dependencies\"].is_none());\n\n assert!(toml[\"build-dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"my-dev-package1\", \"my-dev-package2\", \"--dev\"],\n\n &manifest,\n\n );\n\n execute_command(\n\n &[\"add\", \"my-build-package1\", \"--build\", \"my-build-package2\"],\n\n &manifest,\n\n );\n\n\n\n // dependencies present afterwards\n", "file_path": "tests/cargo-add.rs", "rank": 21, "score": 117160.51226241351 }, { "content": "#[test]\n\nfn fails_to_add_dependency_with_empty_target() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Fails because target parameter must be a valid target\n\n execute_bad_command(&[\"add\", \"--target\", \"\", \"my-package1\"], &manifest);\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 22, "score": 117107.43631392832 }, { "content": "#[test]\n\nfn fails_to_add_optional_dev_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n // Fails because optional dependencies must be in `dependencies` table.\n\n execute_bad_command(\n\n &[\n\n \"add\",\n\n \"versioned-package\",\n\n \"--vers\",\n\n \">=0.1.1\",\n\n \"--dev\",\n\n \"--optional\",\n\n ],\n\n &manifest,\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 23, "score": 117080.96633922857 }, { "content": "#[test]\n\nfn upgrade_workspace() {\n\n let (_tmpdir, root_manifest, workspace_manifests) = copy_workspace_test();\n\n\n\n execute_command(&[\"upgrade\", \"--all\"], &root_manifest);\n\n\n\n // All of the workspace members have `libc` as a dependency.\n\n for workspace_member in workspace_manifests {\n\n assert_eq!(\n\n get_toml(&workspace_member)[\"dependencies\"][\"libc\"].as_str(),\n\n Some(\"libc--CURRENT_VERSION_TEST\")\n\n );\n\n }\n\n}\n\n\n\n/// Detect if attempting to run against a workspace root and give a helpful warning.\n", "file_path": "tests/cargo-upgrade.rs", "rank": 24, "score": 115770.93921049911 }, { "content": "#[test]\n\nfn upgrade_specified_only() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Setup manifest with the dependencies `env_proxy` and `docopt`\n\n execute_command(&[\"add\", \"docopt\", \"--vers\", \"0.8\"], &manifest);\n\n execute_command(&[\"add\", \"env_proxy\", \"--vers\", \"0.1.1\"], &manifest);\n\n\n\n // Update `docopt` to the latest version\n\n execute_command(&[\"upgrade\", \"docopt\"], &manifest);\n\n\n\n // Verify that `docopt` was upgraded, but not `env_proxy`\n\n let dependencies = &get_toml(&manifest)[\"dependencies\"];\n\n assert_eq!(\n\n dependencies[\"docopt\"].as_str(),\n\n Some(\"docopt--CURRENT_VERSION_TEST\")\n\n );\n\n assert_eq!(dependencies[\"env_proxy\"].as_str(), Some(\"0.1.1\"));\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 25, "score": 115770.93921049911 }, { "content": "#[test]\n\nfn fails_to_add_multiple_optional_dev_dependencies() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n // Fails because optional dependencies must be in `dependencies` table.\n\n execute_bad_command(\n\n &[\"add\", \"--optional\", \"my-package1\", \"my-package2\", \"--dev\"],\n\n &manifest,\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 26, "score": 112826.19973359308 }, { "content": "#[test]\n\nfn upgrade_all_dry_run() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Setup manifest with the dependency `[email protected]`\n\n execute_command(&[\"add\", \"docopt\", \"--vers\", \"0.8\"], &manifest);\n\n\n\n // Now, upgrade `docopt` to the latest version\n\n execute_command(&[\"upgrade\", \"--dry-run\"], &manifest);\n\n\n\n // Verify that `docopt` has not been updated.\n\n assert_eq!(\n\n get_toml(&manifest)[\"dependencies\"][\"docopt\"].as_str(),\n\n Some(\"0.8\")\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 27, "score": 111545.07021988844 }, { "content": "#[test]\n\nfn upgrade_optional_dependency() {\n\n // Set up a Cargo.toml with an optional dependency `test_optional_dependency` verifies that this\n\n // is correct.\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n execute_command(\n\n &[\"add\", \"docopt\", \"--vers\", \">=0.1.1\", \"--optional\"],\n\n &manifest,\n\n );\n\n\n\n // Now, update without including the `optional` flag.\n\n execute_command(&[\"upgrade\"], &manifest);\n\n\n\n // Dependency present afterwards - correct version, and still optional.\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"docopt\"];\n\n assert_eq!(\n\n val[\"version\"].as_str(),\n\n Some(\"docopt--CURRENT_VERSION_TEST\")\n\n );\n\n assert_eq!(val[\"optional\"].as_bool(), Some(true));\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 28, "score": 110115.7224093883 }, { "content": "#[test]\n\nfn fails_to_add_inexistent_local_source_without_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_bad_command(&[\"add\", \"./tests/fixtures/local\"], &manifest);\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 29, "score": 109764.97988384773 }, { "content": "#[test]\n\nfn detect_workspace() {\n\n let (_tmpdir, root_manifest, _workspace_manifests) = copy_workspace_test();\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"upgrade\").as_str(),\n\n \"upgrade\",\n\n \"--manifest-path\",\n\n &root_manifest,\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .is(\n\n \"Command failed due to unhandled error: Found virtual manifest, but this command \\\n\n requires running against an actual package in this workspace. Try adding `--all`.\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 30, "score": 107455.89420980313 }, { "content": "#[test]\n\nfn unknown_flags() {\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"upgrade\").as_str(),\n\n \"upgrade\",\n\n \"foo\",\n\n \"--flag\",\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .is(\n\n \"error: Found argument '--flag' which wasn't expected, or isn't valid in this context\n\n\n\nUSAGE:\n\n cargo upgrade [FLAGS] [OPTIONS] [dependency]...\n\n\n\nFor more information try --help \",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 31, "score": 107115.4405492603 }, { "content": "#[test]\n\nfn invalid_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"rm\").as_str(),\n\n \"rm\",\n\n \"invalid_dependency_name\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .contains(\n\n \"Command failed due to unhandled error: The dependency `invalid_dependency_name` could \\\n\n not be found in `dependencies`.\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 32, "score": 106625.95059471433 }, { "content": "fn get_crate_name_from_repository<T>(repo: &str, matcher: &Regex, url_template: T) -> Result<String>\n\nwhere\n\n T: Fn(&str, &str) -> String,\n\n{\n\n matcher\n\n .captures(repo)\n\n .ok_or_else(|| \"Unable to parse git repo URL\".into())\n\n .and_then(|cap| match (cap.get(1), cap.get(2)) {\n\n (Some(user), Some(repo)) => {\n\n let url = url_template(user.as_str(), repo.as_str());\n\n let data: Result<Manifest> = get_cargo_toml_from_git_url(&url)\n\n .and_then(|m| m.parse().chain_err(|| ErrorKind::ParseCargoToml));\n\n data.and_then(|ref manifest| get_name_from_manifest(manifest))\n\n }\n\n _ => Err(\"Git repo url seems incomplete\".into()),\n\n })\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 33, "score": 105805.22214213936 }, { "content": "#[derive(Deserialize)]\n\nstruct CrateVersion {\n\n #[serde(rename = \"crate\")]\n\n name: String,\n\n #[serde(rename = \"num\")]\n\n version: semver::Version,\n\n yanked: bool,\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 34, "score": 105738.7579941258 }, { "content": "#[test]\n\nfn upgrade_all_allow_prerelease_dry_run() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Setup manifest with the dependency `[email protected]`\n\n execute_command(&[\"add\", \"docopt\", \"--vers\", \"0.8\"], &manifest);\n\n\n\n // Now, upgrade `docopt` to the latest version\n\n execute_command(&[\"upgrade\", \"--allow-prerelease\", \"--dry-run\"], &manifest);\n\n\n\n // Verify that `docopt` has been updated successfully.\n\n assert_eq!(\n\n get_toml(&manifest)[\"dependencies\"][\"docopt\"].as_str(),\n\n Some(\"0.8\")\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 35, "score": 103995.51111877497 }, { "content": "#[test]\n\nfn adds_specified_version() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"versioned-package\", \"--vers\", \">=0.1.1\"],\n\n &manifest,\n\n );\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"versioned-package\"];\n\n assert_eq!(val.as_str().expect(\"not string\"), \">=0.1.1\");\n\n\n\n // cannot run with both --dev and --build at the same time\n\n let call = process::Command::new(get_command_path(\"add\").as_str())\n\n .args(&[\"add\", BOGUS_CRATE_NAME, \"--vers\", \"invalid version string\"])\n\n .arg(format!(\"--manifest-path={}\", &manifest))\n\n .output()\n\n .unwrap();\n\n\n\n assert!(!call.status.success());\n\n assert!(no_manifest_failures(&get_toml(&manifest).root));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 36, "score": 103149.08721961419 }, { "content": "#[test]\n\nfn overwrite_path_with_version() {\n\n overwrite_dependency_test(\n\n &[\"add\", \"versioned-package\", \"--path\", \"../foo\"],\n\n &[\"add\", \"versioned-package\"],\n\n r#\"\n\n[dependencies]\n\nversioned-package = \"versioned-package--CURRENT_VERSION_TEST\"\n\n\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 37, "score": 103003.69843824964 }, { "content": "#[test]\n\nfn overwrite_version_with_path() {\n\n overwrite_dependency_test(\n\n &[\"add\", \"versioned-package\", \"--vers\", \"0.1.1\", \"--optional\"],\n\n &[\"add\", \"versioned-package\", \"--path\", \"../foo\"],\n\n r#\"\n\n[dependencies]\n\nversioned-package = { optional = true, path = \"../foo\" }\n\n\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 38, "score": 103003.69843824964 }, { "content": "#[test]\n\nfn invalid_dependency_in_section() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"rm\").as_str(),\n\n \"rm\",\n\n \"semver\",\n\n \"regex\",\n\n \"--dev\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .contains(\n\n \"Command failed due to unhandled error: The dependency `semver` could not be found in \\\n\n `dev-dependencies`.\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 39, "score": 102262.1771053787 }, { "content": "#[test]\n\nfn adds_dependency_with_upgrade_all() {\n\n upgrade_test_helper(\"all\", \">=\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 40, "score": 101682.3697324206 }, { "content": "/// Merge a new dependency into an old entry. See `Dependency::to_toml` for what the format of the\n\n/// new dependency will be.\n\nfn merge_dependencies(old_dep: &mut toml_edit::Item, new: &Dependency) {\n\n assert!(!old_dep.is_none());\n\n\n\n let new_toml = new.to_toml().1;\n\n\n\n if str_or_1_len_table(old_dep) {\n\n // The old dependency is just a version/git/path. We are safe to overwrite.\n\n *old_dep = new_toml;\n\n } else if old_dep.is_table_like() {\n\n for key in &[\"version\", \"path\", \"git\"] {\n\n // remove this key/value pairs\n\n old_dep[key] = toml_edit::Item::None;\n\n }\n\n if let Some(name) = new_toml.as_str() {\n\n old_dep[\"version\"] = toml_edit::value(name);\n\n } else {\n\n merge_inline_table(old_dep, &new_toml);\n\n }\n\n } else {\n\n unreachable!(\"Invalid old dependency type\");\n\n }\n\n\n\n if let Some(t) = old_dep.as_inline_table_mut() {\n\n t.fmt()\n\n }\n\n}\n\n\n", "file_path": "src/manifest.rs", "rank": 41, "score": 99507.39586047071 }, { "content": "/// Execute local cargo command, includes `--manifest-path`\n\npub fn execute_command<S>(command: &[S], manifest: &str)\n\nwhere\n\n S: AsRef<OsStr>,\n\n{\n\n let subcommand_name = &command[0].as_ref();\n\n\n\n let call = process::Command::new(&get_command_path(subcommand_name))\n\n .args(command)\n\n .arg(format!(\"--manifest-path={}\", manifest))\n\n .env(\"CARGO_IS_TEST\", \"1\")\n\n .output()\n\n .expect(\"call to test build failed\");\n\n\n\n if !call.status.success() {\n\n println!(\"Status code: {:?}\", call.status);\n\n println!(\"STDOUT: {}\", String::from_utf8_lossy(&call.stdout));\n\n println!(\"STDERR: {}\", String::from_utf8_lossy(&call.stderr));\n\n panic!(\n\n \"cargo-{} failed to execute\",\n\n subcommand_name.to_string_lossy()\n\n )\n\n }\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 42, "score": 98961.8150065133 }, { "content": "#[test]\n\nfn adds_dependency_with_custom_target() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n execute_command(\n\n &[\"add\", \"--target\", \"x86_64/windows.json\", \"my-package1\"],\n\n &manifest,\n\n );\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n // Get package by hand because toml-rs does not currently handle escaping dots in get()\n\n let val = &toml[\"target\"][\"x86_64/windows.json\"][\"dependencies\"][\"my-package1\"];\n\n assert_eq!(val.as_str(), Some(\"my-package1--CURRENT_VERSION_TEST\"));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 43, "score": 98347.13111617128 }, { "content": "#[test]\n\nfn adds_dependency_with_target_cfg() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"target\"].is_none());\n\n\n\n execute_command(&[\"add\", \"--target\", \"cfg(unix)\", \"my-package1\"], &manifest);\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"target\"][\"cfg(unix)\"][\"dependencies\"][\"my-package1\"];\n\n\n\n assert_eq!(val.as_str().unwrap(), \"my-package1--CURRENT_VERSION_TEST\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 44, "score": 98347.13111617128 }, { "content": "#[test]\n\nfn adds_dependency_with_target_triple() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"target\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"--target\", \"i686-unknown-linux-gnu\", \"my-package1\"],\n\n &manifest,\n\n );\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n\n\n let val = &toml[\"target\"][\"i686-unknown-linux-gnu\"][\"dependencies\"][\"my-package1\"];\n\n assert_eq!(val.as_str().unwrap(), \"my-package1--CURRENT_VERSION_TEST\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 45, "score": 98347.13111617128 }, { "content": "#[test]\n\nfn adds_multiple_dependencies_with_versions() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"my-package1@>=0.1.1\", \"[email protected]\"],\n\n &manifest,\n\n );\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"my-package1\"];\n\n assert_eq!(val.as_str().expect(\"not string\"), \">=0.1.1\");\n\n let val = &toml[\"dependencies\"][\"my-package2\"];\n\n assert_eq!(val.as_str().expect(\"not string\"), \"0.2.3\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 46, "score": 97829.94376581213 }, { "content": "#[test]\n\nfn adds_multiple_dependencies_with_some_versions() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"my-package1\", \"[email protected]\"], &manifest);\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"my-package1\"];\n\n assert_eq!(\n\n val.as_str().expect(\"not string\"),\n\n \"my-package1--CURRENT_VERSION_TEST\"\n\n );\n\n let val = &toml[\"dependencies\"][\"my-package2\"];\n\n assert_eq!(val.as_str().expect(\"not string\"), \"0.2.3\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 47, "score": 97829.94376581213 }, { "content": "#[test]\n\nfn adds_dependency_with_upgrade_minor() {\n\n upgrade_test_helper(\"minor\", \"^\");\n\n}\n", "file_path": "tests/cargo-add.rs", "rank": 48, "score": 97738.13455000718 }, { "content": "#[test]\n\nfn adds_dependency_with_upgrade_none() {\n\n upgrade_test_helper(\"none\", \"=\");\n\n}\n", "file_path": "tests/cargo-add.rs", "rank": 49, "score": 97738.13455000718 }, { "content": "#[test]\n\nfn adds_dependency_with_upgrade_patch() {\n\n upgrade_test_helper(\"patch\", \"~\");\n\n}\n", "file_path": "tests/cargo-add.rs", "rank": 50, "score": 97738.13455000718 }, { "content": "#[test]\n\nfn adds_dependency_with_upgrade_bad() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n let upgrade_arg = format!(\"--upgrade=an_invalid_string\",);\n\n execute_bad_command(&[\"add\", \"my-package\", upgrade_arg.as_str()], &manifest);\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 51, "score": 97738.13455000718 }, { "content": "/// Execute localc cargo command, includes `--manifest-path`, expect command failed\n\npub fn execute_bad_command<S>(command: &[S], manifest: &str)\n\nwhere\n\n S: AsRef<OsStr>,\n\n{\n\n let subcommand_name = &command[0].as_ref();\n\n\n\n let call = process::Command::new(&get_command_path(subcommand_name))\n\n .args(command)\n\n .arg(format!(\"--manifest-path={}\", manifest))\n\n .env(\"CARGO_IS_TEST\", \"1\")\n\n .output()\n\n .unwrap();\n\n\n\n if call.status.success() {\n\n println!(\"Status code: {:?}\", call.status);\n\n println!(\"STDOUT: {}\", String::from_utf8_lossy(&call.stdout));\n\n println!(\"STDERR: {}\", String::from_utf8_lossy(&call.stderr));\n\n panic!(\n\n \"cargo-{} success to execute\",\n\n subcommand_name.to_string_lossy()\n\n )\n\n }\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 52, "score": 96752.9187369078 }, { "content": "/// Parse a manifest file as TOML\n\npub fn get_toml(manifest_path: &str) -> toml_edit::Document {\n\n let mut f = fs::File::open(manifest_path).unwrap();\n\n let mut s = String::new();\n\n f.read_to_string(&mut s).unwrap();\n\n s.parse().expect(\"toml parse error\")\n\n}\n\n\n", "file_path": "tests/utils.rs", "rank": 53, "score": 96492.63106559087 }, { "content": "#[test]\n\nfn adds_git_source_using_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\n\n \"add\",\n\n \"git-package\",\n\n \"--git\",\n\n \"http://localhost/git-package.git\",\n\n ],\n\n &manifest,\n\n );\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"git-package\"];\n\n assert_eq!(\n", "file_path": "tests/cargo-add.rs", "rank": 54, "score": 95726.51929571245 }, { "content": "#[test]\n\nfn adds_local_source_without_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n let (tmpdir, _) = clone_out_test(\"tests/fixtures/add/local/Cargo.toml.sample\");\n\n let tmppath = tmpdir.into_path();\n\n let tmpdirstr = tmppath.to_str().unwrap();\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", tmpdirstr], &manifest);\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"foo-crate\"];\n\n assert_eq!(val[\"path\"].as_str(), Some(tmpdirstr));\n\n\n\n // check this works with other flags (e.g. --dev) as well\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", tmpdirstr, \"--dev\"], &manifest);\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dev-dependencies\"][\"foo-crate\"];\n\n assert_eq!(val[\"path\"].as_str(), Some(tmpdirstr));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 55, "score": 95619.87766857089 }, { "content": "#[test]\n\nfn adds_specified_version_with_inline_notation() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"versioned-package@>=0.1.1\"], &manifest);\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"versioned-package\"];\n\n assert_eq!(val.as_str().expect(\"not string\"), \">=0.1.1\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 56, "score": 95474.33640854602 }, { "content": "#[test]\n\nfn git_and_version_flags_are_mutually_exclusive() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n let call = process::Command::new(get_command_path(\"add\").as_str())\n\n .args(&[\"add\", BOGUS_CRATE_NAME])\n\n .args(&[\"--vers\", \"0.4.3\"])\n\n .args(&[\"--git\", \"git://git.git\"])\n\n .arg(format!(\"--manifest-path={}\", &manifest))\n\n .output()\n\n .unwrap();\n\n\n\n assert!(!call.status.success());\n\n assert!(no_manifest_failures(&get_toml(&manifest).root));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 57, "score": 95177.93587699626 }, { "content": "#[test]\n\nfn invalid_manifest() {\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"add\").as_str(),\n\n \"add\",\n\n \"foo\",\n\n \"--manifest-path=tests/fixtures/manifest-invalid/Cargo.toml.sample\",\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .is(\n\n r#\"Command failed due to unhandled error: Unable to parse Cargo.toml\n\n\n\nCaused by: Manifest not valid TOML\n\nCaused by: TOML parse error at line 6, column 7\n\n |\n\n6 | key = invalid-value\n\n | ^\n\nUnexpected `i`\n\nExpected `digit`, `-` or `+`\n\nexpected 4 more elements\n\nexpected 2 more elements\n\nWhile parsing a Time\n\nWhile parsing a Date-Time\n\nWhile parsing a Float\n\nWhile parsing an Integer\"#,\n\n )\n\n .unwrap();\n\n}\n", "file_path": "tests/test_manifest.rs", "rank": 58, "score": 93752.9439085122 }, { "content": "#[test]\n\nfn upgrade_all() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Setup manifest with the dependency `[email protected]`\n\n execute_command(&[\"add\", \"docopt\", \"--vers\", \"0.8.0\"], &manifest);\n\n\n\n // Now, upgrade `docopt` to the latest version\n\n execute_command(&[\"upgrade\"], &manifest);\n\n\n\n // Verify that `docopt` has been updated successfully.\n\n assert_eq!(\n\n get_toml(&manifest)[\"dependencies\"][\"docopt\"].as_str(),\n\n Some(\"docopt--CURRENT_VERSION_TEST\")\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 59, "score": 93340.83870957764 }, { "content": "#[test]\n\nfn upgrade_at() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Setup manifest\n\n execute_command(&[\"add\", \"docopt\", \"--vers\", \"0.8\"], &manifest);\n\n\n\n // Now, upgrade `docopt` to a version that seems unlikely to ever get published.\n\n execute_command(&[\"upgrade\", \"[email protected]\"], &manifest);\n\n\n\n // Verify that `docopt` has been updated to the specified version.\n\n assert_eq!(\n\n get_toml(&manifest)[\"dependencies\"][\"docopt\"].as_str(),\n\n Some(\"1000000.0.0\")\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 60, "score": 93340.83870957764 }, { "content": "#[test]\n\nfn adds_local_source_with_inline_version_notation() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"[email protected]\", \"--path\", \"/path/to/pkg\"], &manifest);\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"local\"];\n\n assert_eq!(val[\"path\"].as_str(), Some(\"/path/to/pkg\"));\n\n assert_eq!(val[\"version\"].as_str(), Some(\"0.4.3\"));\n\n\n\n // check this works with other flags (e.g. --dev) as well\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\n", "file_path": "tests/cargo-add.rs", "rank": 61, "score": 92062.21914438915 }, { "content": "#[test]\n\nfn git_flag_and_inline_version_are_mutually_exclusive() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n let call = process::Command::new(get_command_path(\"add\").as_str())\n\n .args(&[\"add\", &format!(\"{}@0.4.3\", BOGUS_CRATE_NAME)])\n\n .args(&[\"--git\", \"git://git.git\"])\n\n .arg(format!(\"--manifest-path={}\", &manifest))\n\n .output()\n\n .unwrap();\n\n\n\n assert!(!call.status.success());\n\n assert!(no_manifest_failures(&get_toml(&manifest).root));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 62, "score": 91828.82371752604 }, { "content": "#[test]\n\nfn upgrade_as_expected() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/upgrade/Cargo.toml.source\");\n\n\n\n execute_command(&[\"upgrade\"], &manifest);\n\n\n\n let upgraded = get_toml(&manifest);\n\n let target = get_toml(\"tests/fixtures/upgrade/Cargo.toml.target\");\n\n\n\n assert_eq!(target.to_string(), upgraded.to_string());\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 63, "score": 89991.72632275637 }, { "content": "#[test]\n\n#[cfg(feature = \"test-external-apis\")]\n\nfn fails_to_add_inexistent_git_source_without_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_bad_command(\n\n &[\"add\", \"https://github.com/killercup/fake-git-repo.git\"],\n\n &manifest,\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 64, "score": 89155.57302359218 }, { "content": "#[test]\n\nfn overwrite_version_with_version() {\n\n overwrite_dependency_test(\n\n &[\"add\", \"versioned-package\", \"--vers\", \"0.1.1\", \"--optional\"],\n\n &[\"add\", \"versioned-package\"],\n\n r#\"\n\n[dependencies]\n\nversioned-package = { version = \"versioned-package--CURRENT_VERSION_TEST\", optional = true }\n\n\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 65, "score": 87053.99282833132 }, { "content": "#[test]\n\n#[cfg(feature = \"test-external-apis\")]\n\nfn upgrade_prints_messages() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/upgrade/Cargo.toml.source\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"upgrade\").as_str(),\n\n \"upgrade\",\n\n \"docopt\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .succeeds()\n\n .and()\n\n .stdout()\n\n .contains(\"docopt v0.8 -> v\")\n\n .unwrap();\n\n}\n", "file_path": "tests/cargo-upgrade.rs", "rank": 66, "score": 86927.01194534091 }, { "content": "#[test]\n\nfn upgrade_all_allow_prerelease() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // Setup manifest with `docopt`\n\n execute_command(&[\"add\", \"docopt\", \"--vers\", \"0.8\"], &manifest);\n\n\n\n // Now, upgrade `docopt` to the latest version\n\n execute_command(&[\"upgrade\", \"--allow-prerelease\"], &manifest);\n\n\n\n // Verify that `docopt` has been updated successfully.\n\n assert_eq!(\n\n get_toml(&manifest)[\"dependencies\"][\"docopt\"].as_str(),\n\n Some(\"docopt--PRERELEASE_VERSION_TEST\")\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-upgrade.rs", "rank": 67, "score": 86923.95974839672 }, { "content": "#[derive(Debug, Hash, PartialEq, Eq, Clone)]\n\nenum DependencySource {\n\n Version {\n\n version: Option<String>,\n\n path: Option<String>,\n\n },\n\n Git(String),\n\n}\n\n\n\n/// A dependency handled by Cargo\n\n#[derive(Debug, Hash, PartialEq, Eq, Clone)]\n\npub struct Dependency {\n\n /// The name of the dependency (as it is set in its `Cargo.toml` and known to crates.io)\n\n pub name: String,\n\n optional: bool,\n\n default_features: bool,\n\n source: DependencySource,\n\n}\n\n\n\nimpl Default for Dependency {\n\n fn default() -> Dependency {\n", "file_path": "src/dependency.rs", "rank": 68, "score": 85938.58221102509 }, { "content": "/// Print a message if the new dependency version is different from the old one.\n\nfn print_upgrade_if_necessary(\n\n crate_name: &str,\n\n old_dep: &toml_edit::Item,\n\n new_version: &toml_edit::Item,\n\n) -> Result<()> {\n\n let old_version = if str_or_1_len_table(old_dep) {\n\n old_dep.clone()\n\n } else if old_dep.is_table_like() {\n\n let version = old_dep[\"version\"].clone();\n\n if version.is_none() {\n\n return Err(\"Missing version field\".into());\n\n }\n\n version\n\n } else {\n\n unreachable!(\"Invalid old dependency type\")\n\n };\n\n\n\n if let (Some(old_version), Some(new_version)) = (old_version.as_str(), new_version.as_str()) {\n\n if old_version == new_version {\n\n return Ok(());\n", "file_path": "src/manifest.rs", "rank": 69, "score": 84954.17764997076 }, { "content": "#[test]\n\nfn invalid_section() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n execute_command(&[\"rm\", \"semver\", \"--build\"], &manifest);\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"rm\").as_str(),\n\n \"rm\",\n\n \"semver\",\n\n \"--build\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .contains(\n\n \"Command failed due to unhandled error: The table `build-dependencies` could not be \\\n\n found.\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 70, "score": 82285.21370984723 }, { "content": "#[test]\n\nfn unknown_flags() {\n\n assert_cli::Assert::command(&[get_command_path(\"add\").as_str(), \"add\", \"foo\", \"--flag\"])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .is(\n\n r\"error: Found argument '--flag' which wasn't expected, or isn't valid in this context\n\n\n\nUSAGE:\n\n cargo add [FLAGS] [OPTIONS] <crate>...\n\n\n\nFor more information try --help\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 71, "score": 82005.56645929032 }, { "content": "#[test]\n\nfn unknown_flags() {\n\n assert_cli::Assert::command(&[get_command_path(\"rm\").as_str(), \"rm\", \"foo\", \"--flag\"])\n\n .fails_with(1)\n\n .and()\n\n .stderr()\n\n .is(\n\n r\"error: Found argument '--flag' which wasn't expected, or isn't valid in this context\n\n\n\nUSAGE:\n\n cargo rm [FLAGS] [OPTIONS] <crates>...\n\n\n\nFor more information try --help\",\n\n )\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 72, "score": 82005.56645929032 }, { "content": "#[derive(Deserialize)]\n\nstruct Versions {\n\n versions: Vec<CrateVersion>,\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 73, "score": 81828.0632377725 }, { "content": "fn str_or_1_len_table(item: &toml_edit::Item) -> bool {\n\n item.is_str() || item.as_table_like().map(|t| t.len() == 1).unwrap_or(false)\n\n}\n", "file_path": "src/manifest.rs", "rank": 74, "score": 81154.17399290128 }, { "content": "#[test]\n\nfn adds_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"my-package\"], &manifest);\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"my-package\"];\n\n assert_eq!(val.as_str().unwrap(), \"my-package--CURRENT_VERSION_TEST\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 75, "score": 80907.54411695752 }, { "content": "fn overwrite_dependency_test(first_command: &[&str], second_command: &[&str], expected: &str) {\n\n // First, add a dependency.\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n execute_command(first_command, &manifest);\n\n\n\n // Then, overwite with the latest version\n\n execute_command(second_command, &manifest);\n\n\n\n // Verify that the dependency is as expected.\n\n let toml = get_toml(&manifest);\n\n let expected = r#\"[package]\n\nname = \"cargo-list-test-fixture\"\n\nversion = \"0.0.0\"\n\n\n\n[lib]\n\npath = \"dummy.rs\"\n\n\"#\n\n .to_string()\n\n + expected;\n\n let expected_dep: toml_edit::Document = expected.parse().expect(\"toml parse error\");\n\n assert_eq!(expected_dep.to_string(), toml.to_string());\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 76, "score": 79854.66175691025 }, { "content": "#[test]\n\nfn get_latest_version_from_json_test() {\n\n let versions: Versions = json::from_str(\n\n r#\"{\n\n \"versions\": [\n\n {\n\n \"crate\": \"treexml\",\n\n \"num\": \"0.3.1\",\n\n \"yanked\": true\n\n },\n\n {\n\n \"crate\": \"treexml\",\n\n \"num\": \"0.3.0\",\n\n \"yanked\": false\n\n }\n\n ]\n\n }\"#,\n\n )\n\n .expect(\"crate version is correctly parsed\");\n\n\n\n assert_eq!(\n\n read_latest_version(&versions, false)\n\n .unwrap()\n\n .version()\n\n .unwrap(),\n\n \"0.3.0\"\n\n );\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 77, "score": 79306.09595248073 }, { "content": "#[test]\n\nfn get_latest_stable_version_from_json() {\n\n let versions: Versions = json::from_str(\n\n r#\"{\n\n \"versions\": [\n\n {\n\n \"crate\": \"foo\",\n\n \"num\": \"0.6.0-alpha\",\n\n \"yanked\": false\n\n },\n\n {\n\n \"crate\": \"foo\",\n\n \"num\": \"0.5.0\",\n\n \"yanked\": false\n\n }\n\n ]\n\n }\"#,\n\n )\n\n .expect(\"crate version is correctly parsed\");\n\n\n\n assert_eq!(\n\n read_latest_version(&versions, false)\n\n .unwrap()\n\n .version()\n\n .unwrap(),\n\n \"0.5.0\"\n\n );\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 78, "score": 79306.09595248073 }, { "content": "#[test]\n\nfn get_no_latest_version_from_json_when_all_are_yanked() {\n\n let versions: Versions = json::from_str(\n\n r#\"{\n\n \"versions\": [\n\n {\n\n \"crate\": \"treexml\",\n\n \"num\": \"0.3.1\",\n\n \"yanked\": true\n\n },\n\n {\n\n \"crate\": \"treexml\",\n\n \"num\": \"0.3.0\",\n\n \"yanked\": true\n\n }\n\n ]\n\n }\"#,\n\n )\n\n .expect(\"crate version is correctly parsed\");\n\n\n\n assert!(read_latest_version(&versions, false).is_err());\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 79, "score": 79306.09595248073 }, { "content": "#[test]\n\nfn overwrite_git_with_path() {\n\n overwrite_dependency_test(\n\n &[\n\n \"add\",\n\n \"versioned-package\",\n\n \"--git\",\n\n \"git://git.git\",\n\n \"--optional\",\n\n ],\n\n &[\"add\", \"versioned-package\", \"--path\", \"../foo\"],\n\n r#\"\n\n[dependencies]\n\nversioned-package = { optional = true, path = \"../foo\" }\n\n\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 80, "score": 78982.96178202057 }, { "content": "#[test]\n\nfn overwrite_version_with_git() {\n\n overwrite_dependency_test(\n\n &[\"add\", \"versioned-package\", \"--vers\", \"0.1.1\", \"--optional\"],\n\n &[\"add\", \"versioned-package\", \"--git\", \"git://git.git\"],\n\n r#\"\n\n[dependencies]\n\nversioned-package = { optional = true, git = \"git://git.git\" }\n\n\"#,\n\n )\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 81, "score": 78586.75002712746 }, { "content": "#[test]\n\nfn adds_multiple_dependencies() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"my-package1\", \"my-package2\"], &manifest);\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"my-package1\"];\n\n assert_eq!(val.as_str().unwrap(), \"my-package1--CURRENT_VERSION_TEST\");\n\n let val = &toml[\"dependencies\"][\"my-package2\"];\n\n assert_eq!(val.as_str().unwrap(), \"my-package2--CURRENT_VERSION_TEST\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 82, "score": 77757.77603188997 }, { "content": "#[test]\n\nfn remove_existing_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n let toml = get_toml(&manifest);\n\n assert!(!toml[\"dependencies\"][\"docopt\"].is_none());\n\n execute_command(&[\"rm\", \"docopt\"], &manifest);\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"][\"docopt\"].is_none());\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 83, "score": 77757.77603188997 }, { "content": "#[test]\n\nfn adds_optional_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\n\n \"add\",\n\n \"versioned-package\",\n\n \"--vers\",\n\n \">=0.1.1\",\n\n \"--optional\",\n\n ],\n\n &manifest,\n\n );\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"versioned-package\"][\"optional\"];\n\n assert_eq!(val.as_bool().expect(\"optional not a bool\"), true);\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 84, "score": 77757.77603188997 }, { "content": "#[test]\n\nfn adds_prerelease_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(&[\"add\", \"my-package\", \"--allow-prerelease\"], &manifest);\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"my-package\"];\n\n assert_eq!(val.as_str().unwrap(), \"my-package--PRERELEASE_VERSION_TEST\");\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 85, "score": 77757.77603188997 }, { "content": "#[test]\n\nfn get_latest_unstable_or_stable_version_from_json() {\n\n let versions: Versions = json::from_str(\n\n r#\"{\n\n \"versions\": [\n\n {\n\n \"crate\": \"foo\",\n\n \"num\": \"0.6.0-alpha\",\n\n \"yanked\": false\n\n },\n\n {\n\n \"crate\": \"foo\",\n\n \"num\": \"0.5.0\",\n\n \"yanked\": false\n\n }\n\n ]\n\n }\"#,\n\n )\n\n .expect(\"crate version is correctly parsed\");\n\n\n\n assert_eq!(\n\n read_latest_version(&versions, true)\n\n .unwrap()\n\n .version()\n\n .unwrap(),\n\n \"0.6.0-alpha\"\n\n );\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 86, "score": 76497.11147705316 }, { "content": "#[test]\n\nfn git_and_path_are_mutually_exclusive() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n let call = process::Command::new(get_command_path(\"add\").as_str())\n\n .args(&[\"add\", BOGUS_CRATE_NAME])\n\n .args(&[\"--git\", \"git://git.git\"])\n\n .args(&[\"--path\", \"/path/here\"])\n\n .arg(format!(\"--manifest-path={}\", &manifest))\n\n .output()\n\n .unwrap();\n\n\n\n assert!(!call.status.success());\n\n assert!(no_manifest_failures(&get_toml(&manifest).root));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 87, "score": 76061.92511766723 }, { "content": "#[test]\n\n#[cfg(feature = \"test-external-apis\")]\n\nfn adds_dependency_normalized_name() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"add\").as_str(),\n\n \"add\",\n\n \"linked_hash_map\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .succeeds()\n\n .and()\n\n .stdout()\n\n .contains(\"WARN: Added `linked-hash-map` instead of `linked_hash_map`\")\n\n .unwrap();\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n assert!(!toml[\"dependencies\"][\"linked-hash-map\"].is_none());\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 88, "score": 74895.01803763909 }, { "content": "#[test]\n\nfn remove_multiple_existing_dependencies() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n let toml = get_toml(&manifest);\n\n assert!(!toml[\"dependencies\"][\"docopt\"].is_none());\n\n assert!(!toml[\"dependencies\"][\"semver\"].is_none());\n\n execute_command(&[\"rm\", \"docopt\", \"semver\"], &manifest);\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"][\"docopt\"].is_none());\n\n assert!(toml[\"dependencies\"][\"semver\"].is_none());\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 89, "score": 74891.9658406949 }, { "content": "#[test]\n\nfn adds_multiple_optional_dependencies() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"--optional\", \"my-package1\", \"my-package2\"],\n\n &manifest,\n\n );\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n assert!(&toml[\"dependencies\"][\"my-package1\"][\"optional\"]\n\n .as_bool()\n\n .expect(\"optional not a bool\"));\n\n assert!(&toml[\"dependencies\"][\"my-package2\"][\"optional\"]\n\n .as_bool()\n\n .expect(\"optional not a bool\"));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 90, "score": 74891.9658406949 }, { "content": "#[test]\n\nfn adds_no_default_features_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\n\n \"add\",\n\n \"versioned-package\",\n\n \"--vers\",\n\n \">=0.1.1\",\n\n \"--no-default-features\",\n\n ],\n\n &manifest,\n\n );\n\n\n\n // dependency present afterwards\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"versioned-package\"][\"default-features\"];\n\n assert_eq!(val.as_bool().expect(\"default-features not a bool\"), false);\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 91, "score": 74891.9658406949 }, { "content": "#[test]\n\nfn add_prints_message_for_build_deps() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"add\").as_str(),\n\n \"add\",\n\n \"hello-world\",\n\n \"--build\",\n\n \"--vers\",\n\n \"0.1.0\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .succeeds()\n\n .and()\n\n .stdout()\n\n .is(\"Adding hello-world v0.1.0 to build-dependencies\")\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 92, "score": 73552.32214809221 }, { "content": "#[test]\n\nfn add_prints_message_for_dev_deps() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n assert_cli::Assert::command(&[\n\n get_command_path(\"add\").as_str(),\n\n \"add\",\n\n \"docopt\",\n\n \"--dev\",\n\n \"--vers\",\n\n \"0.8.0\",\n\n &format!(\"--manifest-path={}\", manifest),\n\n ])\n\n .succeeds()\n\n .and()\n\n .stdout()\n\n .is(\"Adding docopt v0.8.0 to dev-dependencies\")\n\n .unwrap();\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 93, "score": 73499.24459198568 }, { "content": "#[test]\n\n#[cfg(feature = \"test-external-apis\")]\n\nfn adds_git_source_without_flag() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependency not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"https://github.com/killercup/cargo-edit.git\"],\n\n &manifest,\n\n );\n\n\n\n let toml = get_toml(&manifest);\n\n let val = &toml[\"dependencies\"][\"cargo-edit\"];\n\n assert_eq!(\n\n val[\"git\"].as_str(),\n\n Some(\"https://github.com/killercup/cargo-edit.git\")\n\n );\n\n\n\n // check this works with other flags (e.g. --dev) as well\n", "file_path": "tests/cargo-add.rs", "rank": 94, "score": 73232.36623207988 }, { "content": "#[test]\n\nfn remove_existing_dependency_from_specific_section() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n // Test removing dev dependency.\n\n let toml = get_toml(&manifest);\n\n assert!(!toml[\"dev-dependencies\"][\"regex\"].is_none());\n\n execute_command(&[\"rm\", \"--dev\", \"regex\"], &manifest);\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"][\"regex\"].is_none());\n\n\n\n // Test removing build dependency.\n\n let toml = get_toml(&manifest);\n\n assert!(!toml[\"build-dependencies\"][\"semver\"].is_none());\n\n execute_command(&[\"rm\", \"--build\", \"semver\"], &manifest);\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"build-dependencies\"].is_none());\n\n}\n\n\n", "file_path": "tests/cargo-rm.rs", "rank": 95, "score": 72273.37070527971 }, { "content": "#[test]\n\nfn remove_section_after_removed_last_dependency() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/rm/Cargo.toml.sample\");\n\n\n\n let toml = get_toml(&manifest);\n\n assert!(!toml[\"dev-dependencies\"][\"regex\"].is_none());\n\n assert_eq!(toml[\"dev-dependencies\"].as_table().unwrap().len(), 2);\n\n\n\n execute_command(&[\"rm\", \"--dev\", \"regex\", \"serde\"], &manifest);\n\n\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dev-dependencies\"].is_none());\n\n}\n\n\n\n// https://github.com/killercup/cargo-edit/issues/32\n", "file_path": "tests/cargo-rm.rs", "rank": 96, "score": 72273.37070527971 }, { "content": "#[test]\n\nfn adds_multiple_no_default_features_dependencies() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_command(\n\n &[\"add\", \"--no-default-features\", \"my-package1\", \"my-package2\"],\n\n &manifest,\n\n );\n\n\n\n // dependencies present afterwards\n\n let toml = get_toml(&manifest);\n\n assert!(!&toml[\"dependencies\"][\"my-package1\"][\"default-features\"]\n\n .as_bool()\n\n .expect(\"default-features not a bool\"));\n\n assert!(!&toml[\"dependencies\"][\"my-package2\"][\"default-features\"]\n\n .as_bool()\n\n .expect(\"default-features not a bool\"));\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 97, "score": 72273.37070527971 }, { "content": "#[test]\n\nfn adds_multiple_dependencies_with_conflicts_option() {\n\n let (_tmpdir, manifest) = clone_out_test(\"tests/fixtures/add/Cargo.toml.sample\");\n\n\n\n // dependencies not present beforehand\n\n let toml = get_toml(&manifest);\n\n assert!(toml[\"dependencies\"].is_none());\n\n\n\n execute_bad_command(\n\n &[\"add\", \"my-package1\", \"my-package2\", \"--vers\", \"0.1.0\"],\n\n &manifest,\n\n );\n\n execute_bad_command(\n\n &[\n\n \"add\",\n\n \"my-package1\",\n\n \"my-package2\",\n\n \"--git\",\n\n \"https://github.com/dcjanus/invalid\",\n\n ],\n\n &manifest,\n\n );\n\n execute_bad_command(\n\n &[\"add\", \"my-package1\", \"my-package2\", \"--path\", \"./foo\"],\n\n &manifest,\n\n );\n\n}\n\n\n", "file_path": "tests/cargo-add.rs", "rank": 98, "score": 72273.37070527971 }, { "content": "fn get_with_timeout(url: &str, timeout: Duration) -> reqwest::Result<reqwest::Response> {\n\n let client = reqwest::ClientBuilder::new()\n\n .timeout(timeout)\n\n .proxy(reqwest::Proxy::custom(|url| {\n\n env_proxy::for_url(url).to_url()\n\n }))\n\n .build()?;\n\n\n\n client\n\n .get(url)\n\n .send()\n\n .and_then(reqwest::Response::error_for_status)\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 99, "score": 70462.56476506531 } ]
Rust
rpg-cli/src/main.rs
Jomy10/rpg-lang
e7f283633ff5931fe4aef27bdabf4e794a840369
use std::{env, fs}; use std::path::Path; use std::process::Command; use std::time::Duration; use clap::{App, arg}; use directories_next::ProjectDirs; use rpg_compiler::{Config}; use rpg_compiler::user_output::CompileError; use simple_colors::{blue, green}; use spinner::{SpinnerHandle, SpinnerBuilder}; use spinners::utils::spinner_data::SpinnerData; pub struct ColoredSpinner { handle: SpinnerHandle, } impl ColoredSpinner { pub fn new(message: String) -> Self { let spinner_data = SpinnerData {frames: vec![ "\x1b[34m⠋\x1b[0m", "\x1b[34m⠙\x1b[0m", "\x1b[34m⠚\x1b[0m", "\x1b[34m⠞\x1b[0m", "\x1b[34m⠖\x1b[0m", "\x1b[34m⠦\x1b[0m", "\x1b[34m⠴\x1b[0m", "\x1b[34m⠲\x1b[0m", "\x1b[34m⠳\x1b[0m", "\x1b[34m⠓\x1b[0m" ], interval: 80}; let handle = SpinnerBuilder::new(message) .spinner(spinner_data.frames.clone()) .step(Duration::from_millis(spinner_data.interval.into())) .start(); ColoredSpinner { handle } } pub fn message(&self, message: String) -> Option<String> { self.handle.update(message) } pub fn stop(self) { self.handle.close(); } } fn main() { let dir = ProjectDirs::from("be", "jonaseveraert", "rpgc").expect("No valid home directory path could be retrieved from the operating system"); let data_dir = dir.data_dir(); let matches = App::new("RPG Compiler") .version("0.1.0") .author("Jonas Everaert <[email protected]>") .about("The official compiler for the RPG esoteric programming language") .arg(arg!([file] "The .rpg source file you wish to compile")) .arg( arg!([output_dir] "Sets the output directory of the compiled app") ) .arg( arg!(-r --release "Compiles the program with optimizations") ) .arg( arg!(-d --debug "Compiles the program without optimization (default)") ) .arg( arg!(-m --max_char <VALUE> "Optionally sets the maximum amount of characters allowed in the program, setting it to more than 10 is considered cheating, though.") .required(false) ) .arg( arg!(-v --verbose "Prints out more error messages") ) .subcommand( App::new("clean") .about("Cleans the build folder"), ) .get_matches(); if let Some(file) = matches.value_of("file") { let debug = !matches.is_present("release"); let verbose = matches.is_present("verbose"); let max_char = matches.value_of("max_char"); let _output_dir = matches.value_of("output_dir"); let cd = env::current_dir().expect_compile_error("Could not find current working directory"); let output_dir: &Path; if let Some(dir) = _output_dir { output_dir = Path::new(dir); } else { output_dir = cd.as_path() }; let app_name = "rpg"; let compiled = if verbose == true || max_char.is_some() { unsafe { rpg_compiler::compile_with_config( file, Config { max_char: if max_char.is_some() { max_char.unwrap().parse::<usize>().expect_compile_error("Did not specify a valid number for max_char") } else { 10 }, verbose }, ) } } else { rpg_compiler::compile(file) }; let compiled_path = Path::new(data_dir).join("tmp_compiled"); if !compiled_path.exists() { fs::create_dir_all(&compiled_path).expect_compile_error("Couldn't create working directory."); fs::create_dir(&compiled_path.join("src")).expect_compile_error("Couldn't create working directory."); fs::write(&compiled_path.join("Cargo.toml"), CARGO_TOML).expect_compile_error("Couldn't create working directory."); } fs::write(&compiled_path.join("src").join("main.rs"), compiled).expect_compile_error("Couldn't write compiled source file."); let sp = ColoredSpinner::new("Compiling rust project...".to_string()); let o = if cfg!(target_os = "windows") { Command::new("cmd") .args(["/C", &format!("cd \"{}\" && cargo build{}", compiled_path.to_str().expect_compile_error( "unable to convert compiled path to string." ), if debug {""} else {" --release"} ).trim()]) .output() .expect_compile_error("Failed to execute rust compiler") } else { Command::new("sh") .arg("-c") .arg(&format!("cd \"{}\" && cargo build{}", compiled_path.to_str().expect_compile_error( "unable to convert compiled path to string." ), if debug {""} else {" --release"} ).trim()) .output() .expect_compile_error("Failed to execute rust compiler") }; sp.stop(); if verbose { let out = String::from_utf8(o.stdout).expect("Couldn't convert utf8."); let out = out.trim(); let err = String::from_utf8(o.stderr).expect("Couldn't convert utf8."); let err = err.trim(); println!("{out}"); println!("{err}"); } println!(); let sp = ColoredSpinner::new("Copying...".to_string()); let output_dir = Path::new(output_dir); let compiled_file_dir = compiled_path.join("target").join("debug").join(app_name); fs::write(output_dir.join(app_name), fs::read(compiled_file_dir).expect_compile_error("Compiled file not found or no read access.")).expect_compile_error("Couldn't write output file."); sp.message("Setting file permissions...".to_string()); let o = if cfg!(target_os = "windows") { Command::new("cmd") .args(["/C", &format!("icacls {app_name} /grant user:(gw,ge,d,wd,ra,rea)")]) .output() .expect_compile_error("Failed to execute rust compiler") } else { Command::new("sh") .arg("-c") .arg(&format!("chmod +x {app_name}")) .output() .expect_compile_error("Failed to execute rust compiler") }; sp.stop(); if verbose { println!("{}", String::from_utf8(o.stdout).expect("Couldn't parse stdout").trim()); println!("{}", String::from_utf8(o.stderr).expect("Couldn't parse stderr").trim()); } println!("\n{}", blue!("Compilation successful.")) } else if let Some(("clean", _)) = matches.subcommand() { fs::remove_dir_all(data_dir).expect("Couldn't remove working directory."); println!("{}", green!("Cleaned build folder")) } else { println!("Please specify a source file") } } const CARGO_TOML: &str = r#"[package] name = "rpg" version = "0.1.0" edition = "2021""#;
use std::{env, fs}; use std::path::Path; use std::process::Command; use std::time::Duration; use clap::{App, arg}; use directories_next::ProjectDirs; use rpg_compiler::{Config}; use rpg_compiler::user_output::CompileError; use simple_colors::{blue, green}; use spinner::{SpinnerHandle, SpinnerBuilder}; use spinners::utils::spinner_data::SpinnerData; pub struct ColoredSpinner { handle: SpinnerHandle, } impl ColoredSpinner { pub fn new(message: String) -> Self { let spinner_data = SpinnerData {frames: vec![ "\x1b[34m⠋\x1b[0m", "\x1b[34m⠙\x1b[0m", "\x1b[34m⠚\x1b[0m", "\x1b[34m⠞\x1b[0m", "\x1b[34m⠖\x1b[0m", "\x1b[34m⠦\x1b[0m", "\x1b[34m⠴\x1b[0m", "\x1b[34m⠲\x1b[0m", "\x1b[34m⠳\x1b[0m", "\x1b[34m⠓\x1b[0m" ], interval: 80}; let handle = SpinnerBuilder::new(message) .spinner(spinner_data.frames.clone()) .step(Duration::from_millis(spinner_data.interval.into())) .start(); ColoredSpinner { handle } } pub fn message(&self, message: String) -> Option<String> { self.handle.update(message) } pub fn stop(self) { self.handle.close(); } } fn main() {
let data_dir = dir.data_dir(); let matches = App::new("RPG Compiler") .version("0.1.0") .author("Jonas Everaert <[email protected]>") .about("The official compiler for the RPG esoteric programming language") .arg(arg!([file] "The .rpg source file you wish to compile")) .arg( arg!([output_dir] "Sets the output directory of the compiled app") ) .arg( arg!(-r --release "Compiles the program with optimizations") ) .arg( arg!(-d --debug "Compiles the program without optimization (default)") ) .arg( arg!(-m --max_char <VALUE> "Optionally sets the maximum amount of characters allowed in the program, setting it to more than 10 is considered cheating, though.") .required(false) ) .arg( arg!(-v --verbose "Prints out more error messages") ) .subcommand( App::new("clean") .about("Cleans the build folder"), ) .get_matches(); if let Some(file) = matches.value_of("file") { let debug = !matches.is_present("release"); let verbose = matches.is_present("verbose"); let max_char = matches.value_of("max_char"); let _output_dir = matches.value_of("output_dir"); let cd = env::current_dir().expect_compile_error("Could not find current working directory"); let output_dir: &Path; if let Some(dir) = _output_dir { output_dir = Path::new(dir); } else { output_dir = cd.as_path() }; let app_name = "rpg"; let compiled = if verbose == true || max_char.is_some() { unsafe { rpg_compiler::compile_with_config( file, Config { max_char: if max_char.is_some() { max_char.unwrap().parse::<usize>().expect_compile_error("Did not specify a valid number for max_char") } else { 10 }, verbose }, ) } } else { rpg_compiler::compile(file) }; let compiled_path = Path::new(data_dir).join("tmp_compiled"); if !compiled_path.exists() { fs::create_dir_all(&compiled_path).expect_compile_error("Couldn't create working directory."); fs::create_dir(&compiled_path.join("src")).expect_compile_error("Couldn't create working directory."); fs::write(&compiled_path.join("Cargo.toml"), CARGO_TOML).expect_compile_error("Couldn't create working directory."); } fs::write(&compiled_path.join("src").join("main.rs"), compiled).expect_compile_error("Couldn't write compiled source file."); let sp = ColoredSpinner::new("Compiling rust project...".to_string()); let o = if cfg!(target_os = "windows") { Command::new("cmd") .args(["/C", &format!("cd \"{}\" && cargo build{}", compiled_path.to_str().expect_compile_error( "unable to convert compiled path to string." ), if debug {""} else {" --release"} ).trim()]) .output() .expect_compile_error("Failed to execute rust compiler") } else { Command::new("sh") .arg("-c") .arg(&format!("cd \"{}\" && cargo build{}", compiled_path.to_str().expect_compile_error( "unable to convert compiled path to string." ), if debug {""} else {" --release"} ).trim()) .output() .expect_compile_error("Failed to execute rust compiler") }; sp.stop(); if verbose { let out = String::from_utf8(o.stdout).expect("Couldn't convert utf8."); let out = out.trim(); let err = String::from_utf8(o.stderr).expect("Couldn't convert utf8."); let err = err.trim(); println!("{out}"); println!("{err}"); } println!(); let sp = ColoredSpinner::new("Copying...".to_string()); let output_dir = Path::new(output_dir); let compiled_file_dir = compiled_path.join("target").join("debug").join(app_name); fs::write(output_dir.join(app_name), fs::read(compiled_file_dir).expect_compile_error("Compiled file not found or no read access.")).expect_compile_error("Couldn't write output file."); sp.message("Setting file permissions...".to_string()); let o = if cfg!(target_os = "windows") { Command::new("cmd") .args(["/C", &format!("icacls {app_name} /grant user:(gw,ge,d,wd,ra,rea)")]) .output() .expect_compile_error("Failed to execute rust compiler") } else { Command::new("sh") .arg("-c") .arg(&format!("chmod +x {app_name}")) .output() .expect_compile_error("Failed to execute rust compiler") }; sp.stop(); if verbose { println!("{}", String::from_utf8(o.stdout).expect("Couldn't parse stdout").trim()); println!("{}", String::from_utf8(o.stderr).expect("Couldn't parse stderr").trim()); } println!("\n{}", blue!("Compilation successful.")) } else if let Some(("clean", _)) = matches.subcommand() { fs::remove_dir_all(data_dir).expect("Couldn't remove working directory."); println!("{}", green!("Cleaned build folder")) } else { println!("Please specify a source file") } } const CARGO_TOML: &str = r#"[package] name = "rpg" version = "0.1.0" edition = "2021""#;
let dir = ProjectDirs::from("be", "jonaseveraert", "rpgc").expect("No valid home directory path could be retrieved from the operating system");
assignment_statement
[ { "content": "pub fn compile(file: &str) -> String {\n\n let sp = ColoredSpinner::new(\"Reading input...\".to_string());\n\n let code = fs::read_to_string(file).expect_compile_error(&format!(\"{file} could not be found.\"));\n\n let code = rm_comments(&code);\n\n let code = code.trim();\n\n sp.stop(); println!();\n\n let sp = ColoredSpinner::new(\"Tokenizing...\".to_string());\n\n let mut tokens = Tokenizer::new(&code).tokenize();\n\n sp.stop(); println!();\n\n let sp = ColoredSpinner::new(\"Parsing...\".to_string());\n\n let parsed = Parser::new(&mut tokens).parse();\n\n let parsed = Arc::new(parsed);\n\n sp.stop(); println!();\n\n let thread_parsed = parsed.clone();\n\n let type_checker = thread::spawn(move || {\n\n TypeChecker::new(&thread_parsed).check_types();\n\n });\n\n let sp = ColoredSpinner::new(\"Generating...\".to_string());\n\n let generated = Generator::new(&parsed).generate();\n\n sp.stop(); println!();\n", "file_path": "rpg-compiler/src/compile/mod.rs", "rank": 0, "score": 90417.81673713197 }, { "content": "pub fn rm_comments(code: &str) -> String {\n\n let lines = code.lines();\n\n let comment_regex = Regex::new(\"#\").unwrap();\n\n let uncommented = lines.into_iter().map(|line| {\n\n let mut l = line;\n\n if let Some(index) = comment_regex.find(line) {\n\n l = &l[0..index.start()];\n\n }\n\n l.to_string()\n\n }).collect::<Vec<String>>().join(\"\\n\");\n\n uncommented\n\n}", "file_path": "rpg-compiler/src/tokenizer/rm_comments.rs", "rank": 2, "score": 86194.54995693051 }, { "content": "/// Generates a uid, which is us just a counter.\n\n///\n\n/// Will not return a uid of 0, because the counter is incremented before it is returned.\n\n/// Also means the max value cannot be returned as this function will panic due to trying to\n\n/// increment with overflow.\n\npub fn generate_uid() -> usize {\n\n unsafe {\n\n UUID_COUNTER += 1;\n\n UUID_COUNTER\n\n }\n\n}", "file_path": "rpg-compiler/src/uid/mod.rs", "rank": 3, "score": 65369.982100761226 }, { "content": "/// Runs a shell command and returns the output\n\nfunc shell(_ command: String) -> String {\n\n let task = Process()\n\n let pipe = Pipe()\n\n\n\n task.standardOutput = pipe\n\n task.standardError = pipe\n\n task.arguments = [\"-c\", command]\n\n task.launchPath = \"/bin/zsh\"\n\n task.launch()\n\n\n\n let data = pipe.fileHandleForReading.readDataToEndOfFile()\n\n let output = String(data: data, encoding: .utf8)!\n\n\n\n return output\n\n}\n\n\n", "file_path": "rpg-cli/scripts/release.swift", "rank": 4, "score": 44977.07038022415 }, { "content": "struct Game {\n\n alive: Vec<u32>,\n\n max_chars: usize\n\n}\n\nimpl Game {\n\n fn add_actor(&mut self, actor: u32) {\n\n self.alive.push(actor);\n\n if self.alive.len() > self.max_chars {\n\n // Runtime error\n\n let s = &format!(\\\"Your actors exceeded the maximum amount of actors allowed ({})\\\", self.max_chars);\n\n eprintln!(\\\"{}\n\n{}\\\", cyan!(\\\"Runtime error\\\"), red!(s));\n\n println!(\\\"{} Actors alive: {:?}\\\", blue!(\\\"HINT:\\\"), self.alive);\n\n std::process::exit(1)\n\n }\n\n }\n\n fn rm_actor(&mut self, id: u32) {\n\n let i = self.alive.iter().enumerate().find_map(|(i,a)| {if a == &id {Some(i)}else{None}});\n\n if let Some(i) = i {\n\n self.alive.remove(i);\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 5, "score": 39436.796269351806 }, { "content": "struct Merchant;\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 6, "score": 39436.796269351806 }, { "content": "struct ColoredSpinner {\n\n handle: SpinnerHandle,\n\n}\n\n\n\nimpl ColoredSpinner {\n\n /// Create a new spinner along with a message\n\n ///\n\n /// Returns a spinner\n\n fn new(message: String) -> Self {\n\n // Dots3\n\n let spinner_data = SpinnerData {frames: vec![\n\n \"\\x1b[34m⠋\\x1b[0m\",\n\n \"\\x1b[34m⠙\\x1b[0m\",\n\n \"\\x1b[34m⠚\\x1b[0m\",\n\n \"\\x1b[34m⠞\\x1b[0m\",\n\n \"\\x1b[34m⠖\\x1b[0m\",\n\n \"\\x1b[34m⠦\\x1b[0m\",\n\n \"\\x1b[34m⠴\\x1b[0m\",\n\n \"\\x1b[34m⠲\\x1b[0m\",\n\n \"\\x1b[34m⠳\\x1b[0m\",\n", "file_path": "rpg-compiler/src/compile/mod.rs", "rank": 7, "score": 38436.240594322495 }, { "content": "/// Either a char or a zombie\n\nstruct Actor<'a> {\n\n id: u32,\n\n /// Can derive actor type using `health`\n\n health: ActorHealth,\n\n attack: u32,\n\n items: Vec<&'a Item>,\n\n confused: bool\n\n}\n\nimpl<'a> Actor<'a> {\n\n fn new(id: u32, h: ActorHealth, a: u32) -> Actor<'a> { Actor { id, health: h, attack: a, items: Vec::new(), confused: false } }\n\n fn attacked(&mut self, val: u32, game: &mut Game) { self.health.attacked(val, self.id, game) }\n\n fn heal(&mut self, val: u32) { self.health.heal(val) }\n\n /// Deprecated\n\n fn validate_actor(&self) -> bool { if let ActorHealth::Char(val) = self.health { return val != (0 as u32); } else { return true; } }\n\n fn health(&self) -> ActorHealth {\n\n if self.confused {\n\n if let ActorHealth::Char(v) = self.health {\n\n return ActorHealth::Char(v-1);\n\n } else if let ActorHealth::Zombie(v) = self.health {\n\n return ActorHealth::Zombie(v-1);\n\n } else {\n\n runtime_error!(\\\"This well never happen.\\\");\n\n }\n\n } else {\n\n return self.health;\n\n }\n\n }\n\n}\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 8, "score": 37649.2753310113 }, { "content": "/// Runs a shell command and prints the output\n\nfunc pshell(_ command: String) {\n\n print(shell(command))\n\n}\n\n\n\nlet build: String = ProcessInfo.processInfo.environment[\"BUILD\"] ?? \"false\"\n\n\n\nif build == \"true\" {\n\n pshell(\"./scripts/build.sh\")\n\n}\n\n\n\npshell(\"./scripts/package.sh\")", "file_path": "rpg-cli/scripts/release.swift", "rank": 9, "score": 34083.83496073526 }, { "content": " pub trait ItemClone {\n\n fn clone_box(&self) -> Box<dyn Item>;\n\n }\n\n \n\n impl<T> ItemClone for T\n\n where T: 'static + Item + Clone\n\n {\n\n fn clone_box(&self) -> Box<dyn Item> {\n\n Box::new(self.clone())\n\n }\n\n }\n\n \n\n impl Clone for Box<dyn Item> {\n\n fn clone(&self) -> Box<dyn Item> {\n\n self.clone_box()\n\n }\n\n }\n\n \n\n pub fn parse_dyn_node<NodeType: 'static>(n: &dyn Node) -> &NodeType {\n\n let parse_node: &NodeType = match n.as_any().downcast_ref::<NodeType>() {\n\n Some(n) => n,\n\n None => crate::compile_error!(\"Expected wrong type\")\n\n };\n\n parse_node\n\n }\n\n}", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 10, "score": 34004.17936228428 }, { "content": " /// Implements Clone for Box<dyn Node + Send + Sync>\n\n pub trait NodeClone {\n\n fn clone_box(&self) -> Box<dyn Node + Send + Sync>;\n\n }\n\n \n\n impl<T> NodeClone for T\n\n where T: 'static + Node + Clone\n\n {\n\n fn clone_box(&self) -> Box<dyn Node + Send + Sync> {\n\n Box::new(self.clone())\n\n }\n\n }\n\n \n\n impl Clone for Box<dyn Node + Send + Sync> {\n\n fn clone(&self) -> Box<dyn Node + Send + Sync> {\n\n self.clone_box()\n\n }\n\n }\n\n \n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 11, "score": 34004.17936228428 }, { "content": "pub trait CompileError<T> {\n\n fn expect_compile_error(self, msg: &str) -> T;\n\n}\n\n\n\nimpl<T> CompileError<T> for Result<T, ParseIntError> {\n\n fn expect_compile_error(self, msg: &str) -> T {\n\n match self {\n\n Ok(t) => t,\n\n Err(e) => unsafe {\n\n if VERBOSE {\n\n crate::compile_error!(\"{}\\n== VERBOSE OUTPUT ==\\n{}\", msg, e)\n\n } else {\n\n crate::compile_error!(\"{}\", msg)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> CompileError<T> for Result<T, String> {\n", "file_path": "rpg-compiler/src/user_output/compile_error.rs", "rank": 12, "score": 30400.663484269855 }, { "content": " pub trait Item: fmt::Debug + ItemClone {}\n\n \n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 13, "score": 28541.784724961406 }, { "content": " pub trait Node: fmt::Debug + NodeClone + Send + Sync {\n\n fn get_type(&self) -> NodeType;\n\n fn as_any(&self) -> &dyn Any;\n\n fn get_id(&self) -> usize;\n\n }\n\n \n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 23, "score": 25922.93274832671 }, { "content": " \"\\x1b[34m⠓\\x1b[0m\"\n\n ], interval: 80};\n\n \n\n let handle = SpinnerBuilder::new(message)\n\n .spinner(spinner_data.frames.clone())\n\n .step(Duration::from_millis(spinner_data.interval.into()))\n\n .start();\n\n \n\n ColoredSpinner { handle }\n\n }\n\n \n\n /// Update spinner's message\n\n ///\n\n /// Returns the String that is put in in case the sender could not send.\n\n pub fn message(&self, message: String) -> Option<String> {\n\n self.handle.update(message)\n\n }\n\n \n\n /// Stop the spinner\n\n pub fn stop(self) {\n\n self.handle.close();\n\n }\n\n}", "file_path": "rpg-compiler/src/compile/mod.rs", "rank": 24, "score": 18.69566971332547 }, { "content": "use simple_colors::blue;\n\n\n\nuse crate::node::*;\n\nuse crate::{Token, TokenType, uid};\n\nuse crate::user_output::CompileError;\n\n\n\npub struct Parser<'a> {\n\n tokens: &'a mut Vec<Token>,\n\n /// Contains the names of all the named objects\n\n ids: Vec<(String, usize)>\n\n}\n\n\n\nimpl<'a> Parser<'a> {\n\n pub fn new(tokens: &'a mut Vec<Token>) -> Self {\n\n Self { tokens, ids: Vec::new() }\n\n }\n\n \n\n pub fn parse(&mut self) -> Vec<Box<dyn Node + Send + Sync>> {\n\n let mut nodes: Vec<Box<dyn Node + Send + Sync>> = Vec::new();\n\n while !self.tokens.is_empty() {\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 25, "score": 14.252188537721342 }, { "content": " Self::Potion => \"potion\".to_string(),\n\n Self::SpellBook => \"spellbook\".to_string(),\n\n Self::End => \"end\".to_string(),\n\n Self::FnAttacks => \"attacks\".to_string(),\n\n Self::FnShouts => \"shouts\".to_string(),\n\n Self::FnWhispers => \"whispers\".to_string(),\n\n Self::FnBuys => \"buys\".to_string(),\n\n Self::FnUses => \"uses\".to_string(),\n\n Self::FnCasting => \"casting\".to_string(),\n\n Self::SbFnSpeak => \"speak()\".to_string(),\n\n Self::SbFnUnZombify => \"un_zombify()\".to_string(),\n\n Self::SbFnConfuse => \"confuse()\".to_string(),\n\n Self::SbFnGodSpeech => \"god_speech()\".to_string(),\n\n Self::SbFnTimeWarp => \"time_warp()\".to_string(),\n\n Self::SbFnShift => \"shift()\".to_string(),\n\n Self::SbFnCreatePotion => \"create_potion()\".to_string(),\n\n Self::From => \"from\".to_string(),\n\n Self::Identifier => \"identifier\".to_string(),\n\n Self::Integer => \"integer\".to_string(),\n\n Self::Equals => \"'='\".to_string(),\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 26, "score": 13.038574308116559 }, { "content": "impl Token {\n\n pub fn new(t: TokenType, v: String) -> Self {\n\n Self { ttype: t, value: v }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq)]\n\n/// The token types available in the RPG language\n\npub enum TokenType {\n\n // types\n\n Char,\n\n Zombie,\n\n Merchant,\n\n Potion,\n\n SpellBook,\n\n End,\n\n // functions\n\n FnBuys,\n\n FnAttacks,\n\n FnShouts,\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 27, "score": 12.610330888790557 }, { "content": "/// Tokenizes an input string\n\npub struct Tokenizer<'a> {\n\n code: &'a str\n\n}\n\n\n\nimpl<'a> Tokenizer<'a> {\n\n pub fn new(code: &'a str) -> Tokenizer<'a> {\n\n Self { code }\n\n }\n\n \n\n pub fn tokenize(&mut self) -> Vec<Token> {\n\n let mut tokens: Vec<Token> = Vec::new();\n\n while !self.code.is_empty() {\n\n tokens.push(self.tokenize_next());\n\n self.code = self.code.trim();\n\n }\n\n tokens\n\n }\n\n \n\n fn tokenize_next(&mut self) -> Token {\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 28, "score": 12.019261548621019 }, { "content": "use std::collections::HashMap;\n\nuse crate::node::{Node, NodeType, parse_dyn_node};\n\nuse crate::node;\n\nuse crate::compile_error;\n\n\n\n// TODO: type check shift() & create_potion()\n\npub struct TypeChecker<'a> {\n\n nodes: &'a Vec<Box<dyn Node + Send + Sync>>,\n\n var_map: HashMap<usize, NodeType>\n\n}\n\nimpl<'a> TypeChecker<'a> {\n\n pub fn new(nodes: &'a Vec<Box<dyn Node + Send + Sync>>) -> Self {\n\n Self {\n\n nodes,\n\n var_map: HashMap::new()\n\n }\n\n }\n\n pub fn check_types(&mut self) {\n\n self.check_node_types(self.nodes)\n\n }\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 29, "score": 11.367970848752744 }, { "content": " }\n\n }\n\n}\n\n\n\npub mod node {\n\n use std::any::Any;\n\n use std::fmt;\n\n use crate::{impl_node, new_node};\n\n \n\n new_node!(Char, name: String, health: u32, attack: u32);\n\n \n\n new_node!(Zombie, name: String, health: i32, attack: u32);\n\n \n\n new_node!(Merchant, name: String);\n\n \n\n new_node!(Potion, name: String, value: u32);\n\n impl Item for Potion {}\n\n \n\n new_node!(SpellBook, name: String);\n\n impl Item for SpellBook {}\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 30, "score": 10.38851749032299 }, { "content": " }\n\n }\n\n}\";\n\n\n\n/// RPG Code Generator\n\npub struct Generator<'a> {\n\n /// The maximum amount of characters allowed in the program\n\n max_chars: usize,\n\n /// The code\n\n nodes: &'a Vec<Box<dyn Node + Send + Sync>>,\n\n}\n\n\n\nimpl<'a> Generator<'a> {\n\n pub fn new(nodes: &'a Vec<Box<dyn Node + Send + Sync>>) -> Self {\n\n Self {\n\n max_chars: unsafe{MAX_CHAR},\n\n nodes,\n\n }\n\n }\n\n \n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 31, "score": 10.180262589814578 }, { "content": " Self::OParen => \"'('\".to_string(),\n\n Self::CParen => \"')'\".to_string(),\n\n Self::Comma => \"','\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n#[allow(unused)]\n\nimpl TokenType {\n\n fn formatted(&self) -> String {\n\n match self {\n\n Self::Identifier => { format!(\"an {}\", self.to_string()) }\n\n Self::Integer => { format!(\"an {}\", self.to_string()) }\n\n _ => self.to_string()\n\n }\n\n }\n\n}", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 32, "score": 9.98554392330329 }, { "content": " Equals,\n\n /// )\n\n OParen,\n\n /// (\n\n CParen,\n\n Comma\n\n}\n\n\n\nimpl Debug for TokenType {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"{}\", self.to_string())\n\n }\n\n}\n\n\n\nimpl ToString for TokenType {\n\n fn to_string(&self) -> String {\n\n match self {\n\n Self::Char => \"char\".to_string(),\n\n Self::Zombie => \"zombie\".to_string(),\n\n Self::Merchant => \"merchant\".to_string(),\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 33, "score": 9.840046419031102 }, { "content": " pub fn generate(&self) -> String {\n\n format!(\n\n \"{}\\nfn main() {{\n\n let mut game = Game {{ alive: Vec::new(), max_chars: {} }};\n\n {}\n\n}}\",\n\n STD_CODE,\n\n self.max_chars,\n\n self.generate_all().join(\"\\n\")\n\n )\n\n }\n\n \n\n fn generate_all(&self) -> Vec<String> {\n\n self.nodes.iter().map(|node| self.generate_next(node)).collect::<Vec<String>>()\n\n }\n\n \n\n fn generate_next(&self, node: &Box<dyn Node + Send + Sync>) -> String {\n\n let node = &**node;\n\n match node.get_type() {\n\n NodeType::Char => {\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 34, "score": 9.626947685886467 }, { "content": "use std::io;\n\nuse std::num::ParseIntError;\n\n#[macro_export]\n\n/// Prints out an compilation error message in red.\n\nmacro_rules! compile_error {\n\n ($( $arg: tt)*) => ({\n\n let s = format!($($arg)*);\n\n eprintln!(\"{}\", simple_colors::red!(s));\n\n std::process::exit(1)\n\n })\n\n}\n\n\n\npub static mut VERBOSE: bool = false;\n\n\n", "file_path": "rpg-compiler/src/user_output/compile_error.rs", "rank": 35, "score": 9.448222155140936 }, { "content": " #[derive(Debug, PartialEq)]\n\n pub enum NodeType {\n\n Char,\n\n Zombie,\n\n Merchant,\n\n Potion,\n\n SpellBook,\n\n FnBuys,\n\n FnAttacks,\n\n FnUses,\n\n FnShouts,\n\n /// `c1 shouts sb1 casting speak()`\n\n FnShoutsSpeak,\n\n FnWhispers,\n\n /// `c1 shouts sb1 casting speak()`\n\n FnWhispersSpeak,\n\n /// A spell book function cast\n\n FnUsesCasting,\n\n /// The body of a `FnUsesCasting` statement of type `TimeWarp`.\n\n /// \"SpellBody\"\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 36, "score": 7.917696353963979 }, { "content": "pub mod user_output;\n\nmod tokenizer;\n\npub use tokenizer::*;\n\nmod parser;\n\npub use parser::*;\n\npub mod uid;\n\npub mod generator;\n\npub mod type_checker;\n\nmod compile;\n\npub use compile::*;", "file_path": "rpg-compiler/src/lib.rs", "rank": 37, "score": 7.065973524034361 }, { "content": "use std::{fs, thread};\n\nuse std::cmp::max;\n\nuse std::sync::Arc;\n\nuse std::time::Duration;\n\nuse simple_colors::Color;\n\nuse spinner::{SpinnerBuilder, SpinnerHandle};\n\nuse spinners::utils::spinner_data::SpinnerData;\n\nuse crate::{Parser, rm_comments, Tokenizer};\n\nuse crate::generator::Generator;\n\nuse crate::type_checker::TypeChecker;\n\nuse crate::user_output::CompileError;\n\n\n", "file_path": "rpg-compiler/src/compile/mod.rs", "rank": 38, "score": 7.064946573361025 }, { "content": " type_checker.join().expect(\"Unable to join type-checker thread.\");\n\n generated\n\n}\n\n\n\npub struct Config {\n\n pub max_char: usize,\n\n pub verbose: bool\n\n}\n\n\n\npub unsafe fn compile_with_config(file: &str, conf: Config) -> String {\n\n let sp = ColoredSpinner::new(\"Reading input...\".to_string());\n\n let max_char = conf.max_char;\n\n let verbose = conf.verbose;\n\n if max_char > 10 { println!(\"Cheater :(\") }\n\n crate::generator::MAX_CHAR = max_char;\n\n crate::user_output::VERBOSE = verbose;\n\n let code = fs::read_to_string(file).expect_compile_error(&format!(\"{file} could not be found.\"));\n\n let code = rm_comments(&code);\n\n let code = code.trim();\n\n sp.stop(); println!();\n", "file_path": "rpg-compiler/src/compile/mod.rs", "rank": 39, "score": 6.762541727966509 }, { "content": "mod rm_comments;\n\nmod tokenizer;\n\npub use rm_comments::rm_comments;\n\npub use tokenizer::*;", "file_path": "rpg-compiler/src/tokenizer/mod.rs", "rank": 40, "score": 6.54223143035412 }, { "content": " pub id: usize,\n\n $(pub $field: $type),*\n\n }\n\n \n\n unsafe impl Send for $name {}\n\n unsafe impl Sync for $name {}\n\n \n\n impl Node for $name {\n\n impl_node!{NodeType::$name}\n\n }\n\n )\n\n }\n\n \n\n #[macro_export]\n\n /// Implements node methods\n\n macro_rules! impl_node {\n\n ( $type: expr ) => (\n\n fn get_type(&self) -> NodeType {\n\n $type\n\n }\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 41, "score": 6.521937638425766 }, { "content": " TokenType::SbFnTimeWarp => Box::new(self.parse_spell_time_warp(user_id, item_or_sb_id)),\n\n TokenType::SbFnShift => Box::new(self.parse_spell_shift(user_id, item_or_sb_id)),\n\n TokenType::SbFnCreatePotion => Box::new(self.parse_spell_pot(user_id, item_or_sb_id)),\n\n val => crate::compile_error!(\"{}\", &format!(\"Invalid spellbook spell: {}\", val.to_string()))\n\n }\n\n }\n\n }\n\n }\n\n // Item (potion) use\n\n Box::new(\n\n FnUses {\n\n id: uid::generate_uid(),\n\n user: user_id,\n\n item: item_or_sb_id\n\n }\n\n )\n\n }\n\n \n\n fn parse_spell_pot(&mut self, user: usize, sb: usize) -> node::FnUsesCasting {\n\n self.consume(TokenType::SbFnCreatePotion);\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 42, "score": 6.281131614800414 }, { "content": " TokenType::FnCasting => crate::compile_error!(\"Casting can not be used on its own. It has to be used alongside a `uses` action.\"),\n\n v => crate::compile_error!(\"Expected an action after identifier {}, but got {}\", ident.value, v.to_string())\n\n }\n\n }\n\n \n\n /// Parses a buys expression `c1 buys from m1`\n\n fn parse_fn_buys(&mut self, ident: &Token) -> node::FnBuys {\n\n let user = &ident.value;\n\n self.consume(TokenType::FnBuys);\n\n let item = self.consume(TokenType::Identifier).value;\n\n self.consume(TokenType::From);\n\n let merchant = self.consume(TokenType::Identifier).value;\n\n let user_id = &self.ids.iter().find_map(|obj| {\n\n if &obj.0 == user {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", user, blue!(\"HINT:\")));\n\n // Note: Parser does not check if the right type is given, only if the ident exists!\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 43, "score": 6.229504474000873 }, { "content": " \n\n // new_node!(FnBuys, user: Char, item: Box<dyn BuyableNode>, merchant: Merchant);\n\n // Fields: id's of the nodes\n\n new_node!(FnBuys, user: usize, item: usize, merchant: usize);\n\n \n\n // The item here is a potion\n\n new_node!(FnUses, user: usize, item: usize);\n\n \n\n new_node!(FnUsesCasting, user: usize, spell_book: usize, function: SBFunction, parameter: Option<usize>, body: Option<FnBody>);\n\n \n\n new_node!(FnBody, body: Vec<Box<dyn Node + Send + Sync>>);\n\n \n\n new_node!(FnAttacks, attacked: usize, attacker: usize);\n\n \n\n new_node!(FnShouts, user: usize);\n\n new_node!(FnShoutsSpeak, user: usize, spell_book: usize);\n\n \n\n new_node!(FnWhispers, user: usize);\n\n new_node!(FnWhispersSpeak, user: usize, spell_book: usize);\n\n \n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 44, "score": 6.12755360442543 }, { "content": " self.consume(TokenType::CParen);\n\n let consumed_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &consumed {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", consumed, blue!(\"HINT:\")));\n\n let mut body: Vec<Box<dyn Node + Send + Sync>> = Vec::new();\n\n while !self.peek(TokenType::End, 0).expect_compile_error(\"Expected time warp loop to end with `end`, but got none.\") {\n\n body.push(self.parse_next_statement());\n\n }\n\n self.consume(TokenType::End);\n\n node::FnUsesCasting {\n\n id: uid::generate_uid(),\n\n user,\n\n spell_book: sb,\n\n function: SBFunction::TimeWarp,\n\n parameter: Some(consumed_id),\n\n body: Some(node::FnBody {\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 45, "score": 5.818813211987848 }, { "content": "}\n\n\n\n/// Contains the regex for a token and its type\n\n#[derive(Debug)]\n\npub struct TokenRegex {\n\n /// Token type\n\n pub ttype: TokenType,\n\n /// A regex in perl-compatible syntax\n\n pub regex: Regex\n\n}\n\n\n\n/// Represents a single token\n\n#[derive(Debug, Clone)]\n\npub struct Token {\n\n /// Token type\n\n pub ttype: TokenType,\n\n /// token value\n\n pub value: String\n\n}\n\n\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 46, "score": 5.652332044926257 }, { "content": " FnWhispers,\n\n FnUses,\n\n FnCasting,\n\n // SpellBookFunctions\n\n SbFnSpeak,\n\n SbFnUnZombify,\n\n SbFnConfuse,\n\n SbFnGodSpeech,\n\n SbFnTimeWarp,\n\n SbFnShift,\n\n SbFnCreatePotion,\n\n // Other\n\n From,\n\n // Names\n\n Identifier,\n\n // Implicit types\n\n /// A signed integer\n\n Integer,\n\n // Punctuation\n\n /// =\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 47, "score": 5.564947416215479 }, { "content": " }}\n\n }}\"\n\n ));\n\n }\n\n SBFunction::TimeWarp => {\n\n let body: Vec<String> = if f.body.is_some() {\n\n f.body.as_ref().expect_compile_error(\"Unkown error: expected body, but was empty.\")\n\n .body.iter()\n\n .map(|node| {\n\n self.generate_next(node)\n\n }\n\n ).collect::<Vec<String>>()\n\n } else {\n\n Vec::new()\n\n };\n\n let consumed = f.parameter.expect_compile_error(\"Expected a parameter for spell `time_warp`.\");\n\n return_s.push_str(&format!(\n\n /**/\n\n \"{{\n\n let mut loop_times = match &mut i{consumed}.health {{\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 48, "score": 5.4634717449619465 }, { "content": " None\n\n }\n\n }).expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", attacked, blue!(\"HINT:\")));\n\n \n\n node::FnAttacks {\n\n id: uid::generate_uid(),\n\n attacked: attacked_id,\n\n attacker: attacker_id\n\n }\n\n }\n\n \n\n fn parse_fn_uses(&mut self, ident: &Token) -> Box<dyn Node + Send + Sync> {\n\n let user = &ident.value;\n\n self.consume(TokenType::FnUses);\n\n let user_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == user {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 49, "score": 5.3519372427644285 }, { "content": " self.ids.push((name.clone(), id));\n\n \n\n node::SpellBook {\n\n id,\n\n name\n\n }\n\n }\n\n \n\n /// Called when an identifier is at the beginning of a statement\n\n fn parse_identifier(&mut self) -> Box<dyn Node + Send + Sync> {\n\n let ident = self.consume(TokenType::Identifier);\n\n return match {\n\n if let Some(t) = self.peek_type(0) { t }\n\n else { crate::compile_error!(\"Expected an action after identifier {}, but got none.\", ident.value) }\n\n } {\n\n TokenType::FnBuys => Box::new(self.parse_fn_buys(&ident)),\n\n TokenType::FnAttacks => Box::new(self.parse_fn_attacks(&ident)),\n\n TokenType::FnShouts => self.parse_fn_shouts(&ident),\n\n TokenType::FnWhispers => self.parse_fn_whispers(&ident),\n\n TokenType::FnUses => self.parse_fn_uses(&ident),\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 50, "score": 5.3149267237514275 }, { "content": "//! Contains functions and macros for informing the user about events\n\n\n\nmod compile_error;\n\npub use compile_error::*;", "file_path": "rpg-compiler/src/user_output/mod.rs", "rank": 51, "score": 5.312782622506817 }, { "content": " compile_error!(\"The actor shouting was not defined.\")\n\n }\n\n }\n\n NodeType::FnUsesCasting => {\n\n // TODO\n\n }\n\n NodeType::FnBody => {\n\n let node: &node::FnBody = parse_dyn_node(node);\n\n self.check_node_types(&node.body);\n\n }\n\n }\n\n });\n\n }\n\n}", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 52, "score": 5.273003176617234 }, { "content": " \n\n fn parse_spell_shift(&mut self, user: usize, sb: usize) -> node::FnUsesCasting {\n\n self.consume(TokenType::SbFnShift);\n\n self.consume(TokenType::OParen);\n\n self.consume(TokenType::CParen);\n\n \n\n node::FnUsesCasting {\n\n id: uid::generate_uid(),\n\n user,\n\n spell_book: sb,\n\n function: SBFunction::Shift,\n\n parameter: None,\n\n body: None\n\n }\n\n }\n\n \n\n fn parse_spell_un_zombify(&mut self, user: usize, sb: usize) -> node::FnUsesCasting {\n\n self.consume(TokenType::SbFnUnZombify);\n\n self.consume(TokenType::OParen);\n\n let zombie = self.consume(TokenType::Identifier).value;\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 53, "score": 5.266916202175016 }, { "content": " }\n\n \n\n fn parse_spell_god_speech(&mut self, user: usize, sb: usize) -> node::FnUsesCasting {\n\n self.consume(TokenType::SbFnGodSpeech);\n\n self.consume(TokenType::OParen);\n\n self.consume(TokenType::CParen);\n\n node::FnUsesCasting {\n\n id: uid::generate_uid(),\n\n user,\n\n spell_book: sb,\n\n function: SBFunction::GodSpeech,\n\n parameter: None,\n\n body: None\n\n }\n\n }\n\n \n\n fn parse_spell_time_warp(&mut self, user: usize, sb: usize) -> node::FnUsesCasting {\n\n self.consume(TokenType::SbFnTimeWarp);\n\n self.consume(TokenType::OParen);\n\n let consumed = self.consume(TokenType::Identifier).value;\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 54, "score": 5.201775128582466 }, { "content": "# The official RPG-lang compiler\n\n[RPG](https://github.com/jomy10/rpg-lang) is an esoteric programming language\n\n\n\n## Usage\n\n\n\n```rust\n\nuse rpg_compiler::{compile, compile_with_config, Config};\n\n\n\n// Use one of the compile functions\n\nlet output = compile(\"main.rpg\");\n\nlet output = compile_with_config(\"main.rpg\", Config { max_char: 10, verbose: false });\n\n```\n\n\n\nThe variable output will contained the rust code of the rpg program. This can then be written to a file and compiled using cargo.\n\n\n\n## Language Specification\n\n\n\n### Actors\n\nActors are either a [character](#characters) or a [zombie](#zombies). They have 2 variables\n\ncalled **health** and **attack**. They also have an inventory to hold [items](#items). The maximum amount of actors\n\nallowed per game (e.g. per program) is **10**. Characters will disappear when they die (e.g. when their health is 0),\n\nbut zombies won't disappear when their health is 0 or below. They need to be [converted to a char](#un_zombify)\n\n\n\nCharacters can cast more [spells](#spells), but can't have negative health, while zombies can have negative health.\n\n\n\n#### Characters\n\nCharacters have 2 variables **health** and **attack**, both are unsigned 32-bit integers.\n\n\n\nA character called \"ash\" with health of 5 and attack of 3:\n\n\n\n```\n\nchar ash = (5, 3)\n\n```\n\n\n\n#### Zombies\n\nZombies are actors that can have negative **health** (signed 32-bit integer). They can be converted to\n\na regular character using the [un_zombify](#un_zombify) spell.\n\n\n\n```\n\nzombie walker = (-4, 6)\n\n```\n\n\n\n### Items\n\n#### Potions\n\nPotions have 1 variable called **healing** and can be used to heal an actor. Actors buy these potions\n\nfrom [merchants](#merchants) and need to buy them as many times as they will use the potion.\n\n\n\n```\n\npotion p = (5)\n\n```\n\n\n\n#### Spell books\n\nSpell books are used for casting different [spells](#spells).\n\n\n\n```\n\nspellbook eucharidion = ()\n\n```\n\n\n", "file_path": "rpg-compiler/README.md", "rank": 55, "score": 5.123529385087929 }, { "content": "use std::fmt::{Debug, Formatter};\n\nuse lazy_static::lazy_static;\n\nuse regex::Regex;\n\nuse TokenType::*;\n\nuse crate::compile_error;\n\n\n\nlazy_static! {\n\n /// All token types and their regexes\n\n static ref TOKEN_TYPES: [TokenRegex; 26] = [\n\n TokenRegex { ttype: Char, regex: Regex::new(r\"\\A\\bchar\\b\").unwrap() },\n\n TokenRegex { ttype: Zombie, regex: Regex::new(r\"\\A\\bzombie\\b\").unwrap() },\n\n TokenRegex { ttype: Merchant, regex: Regex::new(r\"\\A\\bmerchant\\b\").unwrap() },\n\n TokenRegex { ttype: Potion, regex: Regex::new(r\"\\A\\bpotion\\b\").unwrap() },\n\n TokenRegex { ttype: SpellBook, regex: Regex::new(r\"\\A\\bspellbook\\b\").unwrap() },\n\n TokenRegex { ttype: End, regex: Regex::new(r\"\\A\\bend\\b\").unwrap() },\n\n TokenRegex { ttype: FnAttacks, regex: Regex::new(r\"\\A\\battacks\\b\").unwrap() },\n\n TokenRegex { ttype: FnShouts, regex: Regex::new(r\"\\A\\bshouts\\b\").unwrap() },\n\n TokenRegex { ttype: FnWhispers, regex: Regex::new(r\"\\A\\bwhispers\\b\").unwrap() },\n\n TokenRegex { ttype: FnBuys, regex: Regex::new(r\"\\A\\bbuys\\b\").unwrap() },\n\n TokenRegex { ttype: FnUses, regex: Regex::new(r\"\\A\\buses\\b\").unwrap() },\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 56, "score": 4.99259439500852 }, { "content": " } else {\n\n compile_error!(\"The potion being used was not defined.\")\n\n }\n\n } else {\n\n compile_error!(\"The actor using the potion was not defined.\");\n\n }\n\n }\n\n NodeType::FnShouts => {\n\n let node: &node::FnShouts = parse_dyn_node(node);\n\n if let Some(shouter) = self.var_map.get(&node.user) {\n\n if !(shouter == &NodeType::Char || shouter == &NodeType::Zombie) {\n\n compile_error!(\"The one shouting is not an actor.\")\n\n }\n\n } else {\n\n compile_error!(\"The actor shouting was not defined.\")\n\n }\n\n }\n\n NodeType::FnShoutsSpeak => {\n\n let node: &node::FnShoutsSpeak = parse_dyn_node(node);\n\n if let Some(shouter) = self.var_map.get(&node.user) {\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 57, "score": 4.966370252368591 }, { "content": " f.attacker\n\n );\n\n }\n\n NodeType::FnUses => {\n\n // Only potions atm\n\n let f: &node::FnUses = parse_dyn_node(node);\n\n // TODO: expect to runtime error\n\n return format!(\n\n \"if let Item::Potion(_, heal) = i{} {{ \\\n\n if game.alive.contains(&{}) {{ \\\n\n i{}.heal(heal);\\\n\n let item_index = i{}.items.iter().enumerate().find_map(|(i, p)| {{\n\n let mut _val = None;\n\n if let Item::Potion(id,val) = p {{if &{} == id {{_val = Some(i);}} else {{_val = None;}} }}\n\n _val\n\n }});\n\n i{}.items.remove(item_index.expect(\\\"The actor does not own the potion it is trying to use.\\\"));\n\n }}\\\n\n }}\",\n\n f.item,\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 58, "score": 4.894656788403312 }, { "content": " compile_error!(\"The one being attacked is not an actor.\")\n\n }\n\n } else {\n\n compile_error!(\"Attacking actor could not be found.\")\n\n }\n\n } else {\n\n compile_error!(\"Actor being attacked could not be found.\")\n\n }\n\n }\n\n NodeType::FnUses => {\n\n let node: &node::FnUses = parse_dyn_node(node);\n\n if let Some(user) = self.var_map.get(&node.user) {\n\n if let Some(potion) = self.var_map.get(&node.item) {\n\n if user == &NodeType::Char || user == &NodeType::Zombie {\n\n if !(potion == &NodeType::Potion) {\n\n compile_error!(\"The item being used is not a potion.\")\n\n }\n\n } else {\n\n compile_error!(\"The user of the potion is not an actor.\")\n\n }\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 59, "score": 4.834889546276779 }, { "content": "use crate::node::{Node, NodeType, SBFunction};\n\nuse crate::node;\n\nuse crate::user_output::CompileError;\n\nuse crate::node::parse_dyn_node;\n\n\n\npub static mut MAX_CHAR: usize = 10;\n\n\n\n/// Code that allows the language to function\n\nconst STD_CODE: &str = \"\\\n\n#![allow(unused)]\n\nuse std::io::{stdin,stdout,Write};\n\nuse std::fmt::{Formatter, Display};\n\nmacro_rules! red {\n\n ( $str: tt ) => ({\n\n &format!(\\\"\u001b[31m{}\u001b[0m\\\", $str)\n\n });\n\n ( $other: expr) => ({\n\n &format!(\\\"\u001b[31m{}\u001b[0m\\\", $other)\n\n })\n\n}\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 60, "score": 4.816313348501435 }, { "content": " let zombie_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &zombie {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No zombie {} exists.\\n{} Define the zombie before you use it.\", zombie, blue!(\"HINT:\")));\n\n self.consume(TokenType::CParen);\n\n \n\n node::FnUsesCasting {\n\n id: uid::generate_uid(),\n\n user,\n\n spell_book: sb,\n\n function: SBFunction::UnZombify,\n\n parameter: Some(zombie_id),\n\n body: None\n\n }\n\n }\n\n \n\n fn parse_spell_confuse(&mut self, user: usize, sb: usize) -> node::FnUsesCasting {\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 61, "score": 4.747960605804274 }, { "content": " })\n\n .expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", user, blue!(\"HINT:\")));\n\n let item_or_sb = self.consume(TokenType::Identifier).value;\n\n let item_or_sb_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &item_or_sb {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n })\n\n .expect_compile_error(&format!(\"No item or spellbook {} exists.\\n{} Define the item or spellbook before you use it.\", item_or_sb, blue!(\"HINT:\")));\n\n if let Ok(is_casting) = self.peek(TokenType::FnCasting, 0) {\n\n if is_casting {\n\n // Spellbook use\n\n self.consume(TokenType::FnCasting);\n\n if let Some(spell) = self.peek_type(0) {\n\n return match spell {\n\n TokenType::SbFnUnZombify => Box::new(self.parse_spell_un_zombify(user_id, item_or_sb_id)),\n\n TokenType::SbFnConfuse => Box::new(self.parse_spell_confuse(user_id, item_or_sb_id)),\n\n TokenType::SbFnGodSpeech => Box::new(self.parse_spell_god_speech(user_id, item_or_sb_id)),\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 62, "score": 4.564125175943672 }, { "content": " FnBody\n\n }\n\n \n\n #[derive(Clone, Copy, Debug)]\n\n /// Spell book functions\n\n pub enum SBFunction {\n\n UnZombify,\n\n Confuse,\n\n GodSpeech,\n\n TimeWarp,\n\n Shift,\n\n CreatePot\n\n // Speak is always in a shouts or whispers\n\n }\n\n \n\n #[macro_export]\n\n macro_rules! new_node {\n\n ( $name: ident, $($field: ident: $type: ty),* ) => (\n\n #[derive(Debug, Clone)]\n\n pub struct $name {\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 63, "score": 4.450280926138161 }, { "content": " id: uid::generate_uid(),\n\n body\n\n })\n\n }\n\n }\n\n \n\n /// Includes regular shout and spellbook speak version\n\n fn parse_fn_shouts(&mut self, ident: &Token) -> Box<dyn Node + Send + Sync> {\n\n self.consume(TokenType::FnShouts);\n\n let user = &ident.value;\n\n let user_id = self.ids.iter().find_map(|obj| if &obj.0 == user { Some(obj.1) } else { None })\n\n .expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", user, blue!(\"HINT:\")));\n\n if let Ok(is_casting) = self.peek(TokenType::FnCasting, 1) {\n\n if let Ok(is_speak) = self.peek(TokenType::SbFnSpeak, 2) {\n\n if is_casting && is_speak {\n\n let spellbook = self.consume(TokenType::Identifier).value;\n\n let sb_id = self.ids.iter().find_map(|obj| if &obj.0 == &spellbook { Some(obj.1) } else { None })\n\n .expect_compile_error(&format!(\"No spellbook {} exists.\\n{} Define the spellbook before you use it.\", spellbook, blue!(\"HINT:\")));\n\n self.consume(TokenType::FnCasting);\n\n self.consume(TokenType::SbFnSpeak);\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 64, "score": 4.431691852625431 }, { "content": " }\n\n } else {\n\n compile_error!(\"The actor shouting was not defined.\")\n\n }\n\n }\n\n NodeType::FnWhispersSpeak => {\n\n let node: &node::FnWhispersSpeak = parse_dyn_node(node);\n\n if let Some(whisperer) = self.var_map.get(&node.user) {\n\n if let Some(spellbook) = self.var_map.get(&node.spell_book) {\n\n if whisperer == &NodeType::Char || whisperer == &NodeType::Zombie {\n\n if !(spellbook == &NodeType::SpellBook) {\n\n compile_error!(\"The actor is not using a spellbook to shout.\")\n\n }\n\n } else {\n\n compile_error!(\"The one shouting is not an actor.\")\n\n }\n\n } else {\n\n compile_error!(\"The spellbook used for speaking was not defined.\")\n\n }\n\n } else {\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 65, "score": 4.387750864498388 }, { "content": " let user = &ident.value;\n\n let user_id = self.ids.iter().find_map(|obj| if &obj.0 == user { Some(obj.1) } else { None })\n\n .expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", user, blue!(\"HINT:\")));\n\n if let Ok(is_casting) = self.peek(TokenType::FnCasting, 1) {\n\n if let Ok(is_speak) = self.peek(TokenType::SbFnSpeak, 2) {\n\n if is_casting && is_speak {\n\n let spellbook = self.consume(TokenType::Identifier).value;\n\n let sb_id = self.ids.iter().find_map(|obj| if &obj.0 == &spellbook { Some(obj.1) } else { None })\n\n .expect_compile_error(&format!(\"No spellbook {} exists.\\n{} Define the spellbook before you use it.\", spellbook, blue!(\"HINT:\")));\n\n self.consume(TokenType::FnCasting);\n\n self.consume(TokenType::SbFnSpeak);\n\n self.consume(TokenType::OParen);\n\n self.consume(TokenType::CParen);\n\n return Box::new(node::FnWhispersSpeak {\n\n id: uid::generate_uid(),\n\n user: user_id,\n\n spell_book: sb_id\n\n });\n\n }\n\n }\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 66, "score": 4.247740533607299 }, { "content": " }\n\n Box::new(\n\n node::FnWhispers {\n\n id: uid::generate_uid(),\n\n user: user_id\n\n }\n\n )\n\n }\n\n \n\n /// Consumes the next token\n\n ///\n\n /// Panics if the next token does not match the expected_token_type\n\n fn consume(&mut self, expected_type: TokenType) -> Token {\n\n let token = self.tokens.remove(0);\n\n if token.ttype == expected_type {\n\n token\n\n } else {\n\n crate::compile_error!(\"Expected token type {} but got {}\", expected_type.to_string(), token.ttype.to_string())\n\n }\n\n }\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 67, "score": 4.106768527409571 }, { "content": " if let Some(spellbook) = self.var_map.get(&node.spell_book) {\n\n if shouter == &NodeType::Char || shouter == &NodeType::Zombie {\n\n if !(spellbook == &NodeType::SpellBook) {\n\n compile_error!(\"The actor is not using a spellbook to shout.\")\n\n }\n\n } else {\n\n compile_error!(\"The one shouting is not an actor.\")\n\n }\n\n } else {\n\n compile_error!(\"The spellbook used for speaking was not defined.\")\n\n }\n\n } else {\n\n compile_error!(\"The actor shouting was not defined.\")\n\n }\n\n }\n\n NodeType::FnWhispers => {\n\n let node: &node::FnWhispers = parse_dyn_node(node);\n\n if let Some(whisperer) = self.var_map.get(&node.user) {\n\n if !(whisperer == &NodeType::Char || whisperer == &NodeType::Zombie) {\n\n compile_error!(\"The one shouting is not an actor.\")\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 68, "score": 4.081796621838475 }, { "content": " self.consume(TokenType::SbFnConfuse);\n\n self.consume(TokenType::OParen);\n\n let confused_char = self.consume(TokenType::Identifier).value;\n\n self.consume(TokenType::CParen);\n\n let confused_char_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &confused_char {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No character or zombie {} exists.\\n{} Define the character or zombie before you use it.\", confused_char, blue!(\"HINT:\")));\n\n \n\n node::FnUsesCasting {\n\n id: uid::generate_uid(),\n\n user,\n\n spell_book: sb,\n\n function: SBFunction::Confuse,\n\n parameter: Some(confused_char_id),\n\n body: None\n\n }\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 69, "score": 4.03069533895485 }, { "content": " merchant: *merchant_id\n\n }\n\n }\n\n \n\n /// Parses an attacks expression `c1 attacks c2`\n\n fn parse_fn_attacks(&mut self, ident: &Token) -> node::FnAttacks {\n\n let attacker = &ident.value;\n\n self.consume(TokenType::FnAttacks);\n\n let attacked = self.consume(TokenType::Identifier).value;\n\n let attacker_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == attacker {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No character {} exists.\\n{} Define the character before you use it.\", attacker, blue!(\"HINT:\")));\n\n let attacked_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &attacked {\n\n Some(obj.1)\n\n } else {\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 70, "score": 3.9773240896747044 }, { "content": " let item_id = &self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &item {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No item {} exists.\\n{} Define the item before you use it.\", item, blue!(\"HINT:\")));\n\n // Same note\n\n let merchant_id = &self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &merchant {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No merchant {} exists.\\n{} Define the merchant before you use it.\", merchant, blue!(\"HINT:\")));\n\n \n\n node::FnBuys {\n\n id: uid::generate_uid(),\n\n user: *user_id,\n\n item: *item_id,\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 71, "score": 3.659686445047024 }, { "content": " self.consume(TokenType::OParen);\n\n let potion = self.consume(TokenType::Identifier).value;\n\n let potion_id = self.ids.iter().find_map(|obj| {\n\n if &obj.0 == &potion {\n\n Some(obj.1)\n\n } else {\n\n None\n\n }\n\n }).expect_compile_error(&format!(\"No potion {} exists.\\n{} Define the potion before you use it.\", potion, blue!(\"HINT:\")));\n\n self.consume(TokenType::CParen);\n\n \n\n node::FnUsesCasting {\n\n id: uid::generate_uid(),\n\n user,\n\n spell_book: sb,\n\n function: SBFunction::CreatePot,\n\n parameter: Some(potion_id),\n\n body: None\n\n }\n\n }\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 72, "score": 3.6314416487632415 }, { "content": " } else {\n\n *val -= a;\n\n }\n\n }\n\n Self::Zombie(val) => {\n\n *val -= a as i32;\n\n }\n\n }\n\n }\n\n fn heal(&mut self, h: u32) {\n\n match self {\n\n Self::Char(val) => {\n\n if *val == 0 {\n\n runtime_error!(\\\"Cannot heal a dead actor.\\\");\n\n } else {\n\n *val += h;\n\n }\n\n }\n\n Self::Zombie(val) => {\n\n *val += h as i32;\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 73, "score": 3.516338634993753 }, { "content": " else {{ runtime_error!(\\\"Wrong type, only characters can whisper speak.\\\") }}\",\n\n );\n\n }\n\n NodeType::FnUsesCasting => {\n\n let f: &node::FnUsesCasting = parse_dyn_node(node);\n\n let usr = f.user;\n\n let item = f.spell_book;\n\n let mut return_s = format!(\"if !i{usr}.items.contains(&&i{item}) {{ runtime_error!(\\\"The spell cannot be called, because the caster doesn't own a spellbook.\\\") }};\");\n\n match f.function {\n\n SBFunction::UnZombify => {\n\n let id = f.parameter.expect_compile_error(\"Un_zombify called without zombie parameter.\");\n\n return_s.push_str(&format!(\n\n \"let i{id} = if let ActorHealth::Zombie(h) = i{id}.health {{\\\n\n if h <= 0 {{ game.rm_actor({id}); i{id} }} else {{ Actor::new({id}, ActorHealth::Char(h as u32), i{id}.attack) }}\\\n\n }} else {{runtime_error!(\\\"Tried to call `un_zombify` on a non-zombie.\\\")}};\"\n\n ));\n\n }\n\n SBFunction::Confuse => {\n\n let id = f.parameter.expect_compile_error(\"Confuse called without parameter.\");\n\n return_s.push_str(&format!(\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 74, "score": 3.4295898872694 }, { "content": " for token_type in TOKEN_TYPES.iter() {\n\n // m = match\n\n for m in token_type.regex.captures_iter(&self.code) {\n\n // We will only have 1 match because of \\A\n\n if let Some(_match) = m.get(0) {\n\n let m = _match.as_str().to_string();\n\n self.code = self.code.strip_prefix(m.as_str()).expect(\"Unexpected error: could not strip match from code\");\n\n return Token::new(token_type.ttype, m);\n\n }\n\n }\n\n }\n\n \n\n // Have no matches\n\n let first_token = self.code.split(|c| c == ' ').collect::<Vec<&str>>();\n\n if let Some(first_token) = first_token.get(0) {\n\n compile_error!(\"Unexpected token: found {}\", first_token)\n\n } else {\n\n compile_error!(\"Expected token but got None\")\n\n }\n\n }\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 75, "score": 3.1158659570328497 }, { "content": "use regex::Regex;\n\n\n", "file_path": "rpg-compiler/src/tokenizer/rm_comments.rs", "rank": 76, "score": 3.0480174439241834 }, { "content": " self.consume(TokenType::OParen);\n\n self.consume(TokenType::CParen);\n\n return Box::new(node::FnShoutsSpeak {\n\n id: uid::generate_uid(),\n\n user: user_id,\n\n spell_book: sb_id\n\n });\n\n }\n\n }\n\n }\n\n Box::new(\n\n node::FnShouts {\n\n id: uid::generate_uid(),\n\n user: user_id\n\n }\n\n )\n\n }\n\n \n\n fn parse_fn_whispers(&mut self, ident: &Token) -> Box<dyn Node + Send + Sync> {\n\n self.consume(TokenType::FnWhispers);\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 77, "score": 3.008533665285312 }, { "content": " f.user,\n\n f.user,\n\n f.user,\n\n f.item,\n\n f.user\n\n );\n\n }\n\n NodeType::FnShouts => {\n\n let expr: &node::FnShouts = parse_dyn_node(node);\n\n let user = expr.user;\n\n return format!(\n\n \"if !game.alive.contains(&{user}) {{ runtime_error!(\\\"Dead actors can't shout.\\\") }} \\\n\n else {{ println!(\\\"{{}}\\\", i{user}.clone().health()); }}\",\n\n );\n\n }\n\n NodeType::FnShoutsSpeak => {\n\n let expr: &node::FnShoutsSpeak = parse_dyn_node(node);\n\n let item = expr.spell_book;\n\n let usr = expr.user;\n\n return format!(\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 78, "score": 2.9810823830807687 }, { "content": " \"let i{} = Item::SpellBook;\",\n\n sb.id\n\n );\n\n }\n\n NodeType::FnBuys => {\n\n let f: &node::FnBuys = parse_dyn_node(node);\n\n return format!(\n\n // TODO: display name of dead actor\n\n \"if game.alive.contains(&{}) {{ i{}.items.push(&i{}); }} else {{ runtime_error!(\\\"Cannot add an item to the inventory of a dead actor.\\\") }}\",\n\n f.user,\n\n f.user,\n\n f.item\n\n );\n\n }\n\n NodeType::FnAttacks => {\n\n let f: &node::FnAttacks = parse_dyn_node(node);\n\n return format!(\n\n \"if game.alive.contains(&{}) {{ i{}.attacked(i{}.attack, &mut game); }} else {{ runtime_error!(\\\"A dead actor cannot attack.\\\") }}\",\n\n f.attacker,\n\n f.attacked,\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 79, "score": 2.9016541928364705 }, { "content": "#!/usr/bin/swift\n\nimport Foundation\n\n\n\nlet sha256_un_parsed: String = ProcessInfo.processInfo.environment[\"SHA\"] ?? \"\"\n\nlet chars = Array(sha256_un_parsed)\n\nvar sha256: String = \"\"\n\nfor char in chars {\n\n if char == \" \" {\n\n break\n\n } else {\n\n sha256.append(char)\n\n }\n\n}\n\nprint(sha256)", "file_path": "rpg-cli/scripts/homebrew/parse_sha.swift", "rank": 80, "score": 2.85376956057965 }, { "content": "### Un_zombify\n\nConverts a character to a zombie.\n\n\n\n```\n\njames_brown uses spell_book casting un_zombify(zombie1)\n\n```\n\n\n\n### Confuse\n\nWhen a character is confused, it will output its health - 1 when shouting or whispering.\n\n\n\n```\n\nsteven uses spell_book casting confuse(other_char)\n\nother_char shouts # Will output the other_char's health - 1 e.g. other_char = (1,2), so the output will be: 0\n\n```\n\n\n\n### Create_potion\n\nCharacters can change the value of a potion\n\n\n\n```\n\nchar sans = (6, 1)\n\npotion p = (5)\n\nsans buys p from merchant\n\n\n\nsans uses spellbook casting create_potion(p)\n\nsans uses p\n\nsans shouts\n\n# Output:\n\n# 12 (6 + 6)\n\n```\n\n\n\n### Shift\n\nShift swaps a character's health and attack.\n\n\n\n```\n\nchar Ness = (2,1)\n\nNess uses spellbook casting shift()\n\nNess shouts\n\n# Output:\n\n# 1\n", "file_path": "rpg-compiler/README.md", "rank": 81, "score": 2.85347897130253 }, { "content": "#### Characters\n\nCharacters have 2 variables **health** and **attack**, both are unsigned 32-bit integers. \n\n\n\nA character called \"ash\" with health of 5 and attack of 3:\n\n\n\n```\n\nchar ash = (5, 3)\n\n```\n\n\n\n#### Zombies\n\nZombies are actors that can have negative **health** (signed 32-bit integer). They can be converted to \n\na regular character using the [un_zombify](#un_zombify) spell.\n\n\n\n```\n\nzombie walker = (-4, 6)\n\n```\n\n\n\n### Items\n\n#### Potions\n\nPotions have 1 variable called **healing** and can be used to heal an actor. Actors buy these potions \n\nfrom [merchants](#merchants) and need to buy them as many times as they will use the potion.\n\n\n\n```\n\npotion p = (5)\n\n```\n\n\n\n#### Spell books\n\nSpell books are used for casting different [spells](#spells).\n\n\n\n```\n\nspellbook eucharidion = ()\n\n```\n\n\n\n### Merchants\n\nMerchants sell items. Actors can buy these items and will hold them in their inventory.\n\n\n\n```\n\nmerchant cabbage = ()\n\n```\n\n\n\nActors buy items from merchants using `buys`\n\n\n\n```\n\nactor buys item from merchant\n\n```\n\n\n\n### Attacking\n\nActors can attack each other, this subtracts the attack of the attacking actor from the health of the actor being attacked.\n\n\n\n``` \n\nchar a = (10, 3)\n\nchar b = (2, 5)\n\n\n\nb attacks a\n\n# a will now have 10 - 5 = 5 health\n\n```\n\n\n\n### Using items\n\nActors can use items in their inventory.\n\n\n\n```\n\npotion p = (5)\n\nchar a = (5,0)\n\nmerchant m = ()\n\na buys p from m\n\n\n\na uses p\n\n# a will now have 5 + 5 = 10 health\n\n```\n\n\n\n### Outputting to the screen\n\n\n\n#### Shouts\n\nCharacters can shout their health. This will output a new line.\n\n\n\n```\n\nchar a = (1,0)\n\na shouts\n\na shouts\n\n# output:\n\n# 1\n\n# 1\n\n```\n\n\n\n#### Whispers\n\nCharacters can whisper their health. This will output without a new line.\n\n\n\n```\n\nchar a = (1,0)\n\na whispers\n\na whispers\n\n# output: \n\n# 11\n\n```\n\n\n\n### Spells\n\nCharacters can use [spell books](#spell-books) to cast spells. The spell name is\n\nfollowed by `()` or `(param)`\n\n\n\n#### God_speech\n\nThis will read whatever number the user inputs\n\n\n\n```\n\ninput uses spellbook casting god_speech()\n\n```\n\n\n", "file_path": "README.md", "rank": 82, "score": 2.780032387255195 }, { "content": " TokenRegex { ttype: FnCasting, regex: Regex::new(r\"\\A\\bcasting\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnSpeak, regex: Regex::new(r\"\\A\\bspeak\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnUnZombify, regex: Regex::new(r\"\\A\\bun_zombify\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnConfuse, regex: Regex::new(r\"\\A\\bconfuse\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnGodSpeech, regex: Regex::new(r\"\\A\\bgod_speech\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnTimeWarp, regex: Regex::new(r\"\\A\\btime_warp\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnShift, regex: Regex::new(r\"\\A\\bshift\\b\").unwrap() },\n\n TokenRegex { ttype: SbFnCreatePotion, regex: Regex::new(r\"\\A\\bcreate_potion\\b\").unwrap() },\n\n TokenRegex { ttype: From, regex: Regex::new(r\"\\A\\bfrom\\b\").unwrap() },\n\n // Identifier also matches all of the above, which is why it should be below all of them\n\n // This means that all of the above are reserved words\n\n TokenRegex { ttype: Identifier, regex: Regex::new(r\"\\A\\b[a-zA-Z_]\\w*\\b\").unwrap() },\n\n TokenRegex { ttype: Integer, regex: Regex::new(r\"\\A-?[0-9]+\").unwrap() },\n\n TokenRegex { ttype: Equals, regex: Regex::new(r\"\\A=\").unwrap() },\n\n TokenRegex { ttype: OParen, regex: Regex::new(r\"\\A\\(\").unwrap() },\n\n TokenRegex { ttype: CParen, regex: Regex::new(r\"\\A\\)\").unwrap() },\n\n TokenRegex { ttype: Comma, regex: Regex::new(r\"\\A,\").unwrap() },\n\n ];\n\n}\n\n\n", "file_path": "rpg-compiler/src/tokenizer/tokenizer.rs", "rank": 83, "score": 2.7625146187102096 }, { "content": "macro_rules! blue {\n\n ( $str: tt ) => ({\n\n &format!(\\\"\\x1b[34m{}\\x1b[0m\\\", $str)\n\n });\n\n ( $other: expr) => ({\n\n &format!(\\\"\\x1b[34m{}\\x1b[0m\\\", $other)\n\n })\n\n}\n\nmacro_rules! cyan {\n\n ( $str: tt ) => ({\n\n &format!(\\\"\u001b[36m{}\u001b[0m\\\", $str)\n\n });\n\n ( $other: expr) => ({\n\n &format!(\\\"\u001b[36m{}\u001b[0m\\\", $other)\n\n })\n\n}\n\nmacro_rules! runtime_error {\n\n ($( $arg: tt)*) => ({\n\n let s = format!($($arg)*);\n\n eprintln!(\\\"{}\n\n{}\\\", cyan!(\\\"Runtime error\\\"), red!(s));\n\n std::process::exit(1)\n\n })\n\n}\n\n#[derive(Clone)]\n\n/// Either a char or a zombie\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 84, "score": 2.7171107231639597 }, { "content": "### Merchants\n\nMerchants sell items. Actors can buy these items and will hold them in their inventory.\n\n\n\n```\n\nmerchant cabbage = ()\n\n```\n\n\n\nActors buy items from merchants using `buys`\n\n\n\n```\n\nactor buys item from merchant\n\n```\n\n\n\n### Attacking\n\nActors can attack each other, this subtracts the attack of the attacking actor from the health of tha actor being attacked.\n\n\n\n``` \n\nchar a = (10, 3)\n\nchar b = (2, 5)\n\n\n\nb attacks a\n\n# a will now have 10 - 5 = 5 health\n\n```\n\n\n\n### Using items\n\nActors can use items in their inventory.\n\n\n\n```\n\npotion p = (5)\n\nchar a = (5,0)\n\nmerchant m = ()\n\na buys p from m\n\n\n\na uses p\n\n# a will now have 5 + 5 = 10 health\n\n```\n\n\n\n### Outputting to the screen\n\n\n\n#### Shouts\n\nCharacters can shout their health. This will output a new line.\n\n\n\n```\n\nchar a = (1,0)\n\na shouts\n\na shouts\n\n# output:\n\n# 1\n\n# 1\n\n```\n\n\n\n#### Whispers\n\nCharacters can whisper their health. This will output without a new line.\n\n\n\n```\n\nchar a = (1,0)\n\na whispers\n\na whispers\n\n# output: \n\n# 11\n\n```\n\n\n\n### Spells\n\nCharacters can use [spell books](#spell-books) to cast spells. They are casting using a spellbook and the spell name is\n\nfollowed by `()` or `(param)`\n\n\n\n### God_speech\n\nThis will read whatever number the user inputs\n\n\n\n```\n\ninput uses spellbook casting god_speech()\n\n```\n\n\n\n#### Speak\n\nThis will print the ASCII value of the health of the actor\n\n\n\n```\n\nchar jeremy = (33,1)\n\nspellbook eucharidium = ()\n\nmerchant cabbage_man = ()\n\n\n\njeremy buys eucharidion from cabbage_man\n\n\n\njeremy shouts eucharidion casting speak()\n\n# output:\n\n# !\n\n```\n\n\n\n#### Time_warp\n\nThe time warp performs the lines beneath it until it reaches the `end` keyword, at which point it will go back to the\n\nbeginning and performs the lines again. To do this, it will require an offer. It will perform this loop until the offered\n\ncharacter has no health left.\n\n\n\nThe health of the character being consumed is subtracted at the end of the lines.\n\n\n\n```\n\n# We have 2 characters: david (5 health) and ella (5 health). David has a spellbook in its inventories\n\ndavid uses spellbook casting time_warp(ella)\n\n\tella shouts\n\nend\n\n\n\n# Output:\n\n# 5\n\n# 4\n\n# 3\n\n# 2\n\n# 1\n\n```\n\n\n", "file_path": "rpg-compiler/README.md", "rank": 85, "score": 2.5072119768631698 }, { "content": "#!/usr/bin/swift\n\nimport Foundation\n\n\n\ndo {\n\n let token = try String(contentsOfFile: \"scripts/token.txt\")\n\n print(\"\\(token.trimmingCharacters(in: .whitespacesAndNewlines))\")\n\n} catch {\n\n print(\"\\(error)\")\n\n}\n", "file_path": "rpg-cli/scripts/get_token.swift", "rank": 86, "score": 2.4338620508148354 }, { "content": " let sp = ColoredSpinner::new(\"Tokenizing...\".to_string());\n\n let mut tokens = Tokenizer::new(&code).tokenize();\n\n sp.stop(); println!();\n\n let sp = ColoredSpinner::new(\"Parsing...\".to_string());\n\n let parsed = Parser::new(&mut tokens).parse();\n\n let parsed = Arc::new(parsed);\n\n sp.stop(); println!();\n\n // if let Some(handler) = &handler { &handler.stop_parsing; }\n\n // TODO: pb for type checker\n\n let thread_parsed = parsed.clone();\n\n let type_checker = thread::spawn(move || {\n\n TypeChecker::new(&thread_parsed).check_types();\n\n });\n\n let sp = ColoredSpinner::new(\"Generating...\".to_string());\n\n let generated = Generator::new(&parsed).generate();\n\n sp.stop(); println!();\n\n // if let Some(handler) = &handler { &handler.stop_generating; }\n\n type_checker.join().expect(\"Unable to join type-checker thread.\");\n\n generated\n\n}\n\n\n", "file_path": "rpg-compiler/src/compile/mod.rs", "rank": 87, "score": 2.4178824267546974 }, { "content": "## Contributing\n\nI welcome all contributions. Feel free to open an issue about anything and optionally a pull request.\n\n\n\nAlso, if you want to, please take a look at the issues, some of them are improvements I want to make, others are bugs \n\nthat need ot be fixed. I could use soe help with them.\n\n\n\n## Using the CLI\n\nTo compile your rpg program, you can use the cli:\n\n\n\n```bash\n\nrpgc path/to/source.rpg\n\n```\n\n\n\nThis command will create an executable file called `rpg` at your current location (`rpg.exe` on Windows). Make sure you don't have a file called rpg there, as it will be overwritten.\n\n\n\nTo run your program, run `./rpg`. You can also combine these 2:\n\n\n\n```bash\n\nrpgc path/to/source.rpg && ./rpg\n\n```\n\n\n\nIf something doesn't seem to work, you can always use the `-v` or `--verbose` flags to see if any errors occured. \n\nIf they did, please open an issue as these kinds of errors are usually bugs in the compiler.\n\n\n\n```bash\n\nrpgc rpgc path/to/source.rpg -v\n\n```\n\n\n\nFor more commands, use `rpgc help`.\n\n\n\nIf you have installed the cli using the manual downloads, you can run it using `./rpgc` or by moving it to your bin directory.\n\n\n\n## Examples\n\nYou can find the examples in the [examples](examples) folder.\n\n\n\n## Bugs\n\nI have tested the compiler, but since this is still the first version, there might be some bugs, so feel free to open an issue.\n\n\n\n## Language Specification\n\n\n\n### Actors\n\nActors are either a [character](#characters) or a [zombie](#zombies). They have 2 variables \n\ncalled **health** and **attack**. They also have an inventory to hold [items](#items). The maximum amount of actors\n\nallowed per game (e.g. per program) is **10**. Characters will disappear when they die (e.g. when their health is 0),\n\nbut zombies won't disappear when their health is 0 or below. They need to be [converted to a char](#un_zombify)\n\n\n\nCharacters can cast more [spells](#spells), but can't have negative health, while zombies can have negative health.\n\n\n", "file_path": "README.md", "rank": 88, "score": 2.3960711801715684 }, { "content": "#!/usr/bin/swift\n\n// Environment variables\n\n// BUILD=BOOL (default=false)\n\n// This script is not currenlty used\n\n\n\nimport Foundation\n\n\n\n/// Runs a shell command and returns the output\n", "file_path": "rpg-cli/scripts/release.swift", "rank": 89, "score": 2.365975813580307 }, { "content": " \"if !i{usr}.items.contains(&&i{item}) {{ runtime_error!(\\\"The spell cannot be called, because the caster doesn't own a spellbook.\\\") }};\\\n\n if !game.alive.contains(&{usr}) {{ runtime_error!(\\\"Dead actors can't shout.\\\") }} else if let ActorHealth::Char(val) = i{usr}.health() {{ println!(\\\"{{}}\\\", (val as u8) as char); }} else {{ runtime_error!(\\\"Wrong type, only characters can shout speak.\\\") }}\",\n\n );\n\n }\n\n NodeType::FnWhispers => {\n\n let expr: &node::FnWhispers = parse_dyn_node(node);\n\n return format!(\n\n \"if !i{}.validate_actor() {{ runtime_error!(\\\"Dead actors can't shout.\\\") }} print!(\\\"{{}}\\\", i{}.health());\",\n\n expr.user,\n\n expr.user\n\n );\n\n }\n\n NodeType::FnWhispersSpeak => {\n\n let expr: &node::FnWhispersSpeak = parse_dyn_node(node);\n\n let item = expr.spell_book;\n\n let usr = expr.user;\n\n return format!(\n\n \"if !i{usr}.items.contains(&&i{item}) {{ runtime_error!(\\\"The spell cannot be called, because the caster doesn't own a spellbook.\\\") }}; \\\n\n if !i{usr}.validate_actor() {{ runtime_error!(\\\"Dead actors can't shout.\\\") }} \\\n\n else if let ActorHealth::Char(val) = i{usr}.health() {{ print!(\\\"{{}}\\\", (val as u8) as char); }} \\\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 90, "score": 2.340427424355458 }, { "content": "#### Speak\n\nThis will print the ASCII value of the health of the actor\n\n\n\n```\n\nchar jeremy = (33,1)\n\nspellbook eucharidium = ()\n\nmerchant cabbage_man = ()\n\n\n\njeremy buys eucharidion from cabbage_man\n\n\n\njeremy shouts eucharidion casting speak()\n\n# output:\n\n# !\n\n```\n\n\n\n#### Time_warp\n\nThe time warp performs the lines beneath it until it reaches the `end` keyword, at which point it will go back to the \n\nbeginning and performs the lines again. To do this, it will require an offer. It will perform this loop until the offered \n\ncharacter has no health left.\n\n\n\nThe health of the character being consumed is subtracted at the end of the lines.\n\n\n\n```\n\n# We have 2 characters: david (5 health) and ella (5 health). David has a spellbook in its inventories\n\ndavid uses spellbook casting time_warp(ella)\n\n\tella shouts\n\nend\n\n\n\n# Output:\n\n# 5\n\n# 4\n\n# 3\n\n# 2\n\n# 1\n\n```\n\n\n\n#### Un_zombify\n\nConverts a zombie to a character.\n\n\n\n```\n\njames_brown uses spell_book casting un_zombify(zombie1)\n\n```\n\n\n\n#### Confuse \n\nWhen a character is confused, it will output its health - 1 when shouting or whispering.\n\n\n\n```\n\nsteven uses spell_book casting confuse(other_char)\n\nother_char shouts # Will output the other_char's health - 1 e.g. other_char = (1,2), so the output will be: 0\n\n```\n\n\n\n#### Create_potion\n\nCharacters can change the value of a potion\n\n\n\n```\n\nchar sans = (6, 1)\n\npotion p = (5)\n\nsans buys p from merchant\n\n\n\nsans uses spellbook casting create_potion(p)\n\nsans uses p\n\nsans shouts\n\n# Output:\n\n# 12 (6 + 6)\n\n```\n\n\n\n#### Shift\n\nShift swaps a character's health and attack.\n\n\n\n```\n\nchar Ness = (2,1)\n\nNess uses spellbook casting shift()\n\nNess shouts\n\n# Output:\n\n# 1\n\n```\n\n\n\n## Questions\n\nIf you have any questions, feel free to ask them by opening an issue!\n\n\n\n## Other links\n\n- [Wiki page](https://esolangs.org/wiki/Rpg)\n\n\n\n## License\n\nThe compiler and programming language are licensed under the [MIT License](LICENSE).\n", "file_path": "README.md", "rank": 91, "score": 2.2895716775968973 }, { "content": "# RPGC\n\nThe official rpg compiler cli tool.\n\n\n\nNOTE: The compiler has not been tested on Windows, but it should work fine. If you use Windows and this worked for\n\nyou, pleade open a pull request and remove this line :)\n", "file_path": "rpg-cli/README.md", "rank": 92, "score": 2.264626677641612 }, { "content": " \n\n /// Returns the token type of the next token. Returns None if there is no token at the given\n\n /// `offset` position.\n\n fn peek_type(&self, offset: usize) -> Option<TokenType> {\n\n if let Some(token) = self.tokens.get(offset) {\n\n return Some(token.ttype);\n\n } else {\n\n None\n\n }\n\n }\n\n \n\n /// Peeks at the token at the index of `offset` and returns true if the expected\n\n /// token was found, false otherwise.\n\n ///\n\n /// Returns err if there was no token at the offset index\n\n fn peek(&self, expected_type: TokenType, offset: usize) -> Result<bool, String> {\n\n if let Some(token) = self.tokens.get(offset) {\n\n Ok(token.ttype == expected_type)\n\n } else {\n\n Err(\"Incomplete syntax\".to_string())\n", "file_path": "rpg-compiler/src/parser/mod.rs", "rank": 93, "score": 2.2590471683823323 }, { "content": " }\n\n}\n\n\n\nimpl<T> CompileError<T> for io::Result<T> {\n\n fn expect_compile_error(self, msg: &str) -> T {\n\n match self {\n\n Ok(t) => t,\n\n Err(e) => unsafe {\n\n if VERBOSE {\n\n crate::compile_error!(\"{}\\n== VERBOSE OUTPUT ==\\n{}\", msg, e)\n\n } else {\n\n crate::compile_error!(\"{}\", msg)\n\n }\n\n }\n\n }\n\n }\n\n}", "file_path": "rpg-compiler/src/user_output/compile_error.rs", "rank": 94, "score": 2.2357202820777933 }, { "content": " } else {\n\n compile_error!(\"No merchant found while buying.\")\n\n }\n\n } else {\n\n compile_error!(\"Item you are trying to buy was not found.\")\n\n }\n\n } else {\n\n compile_error!(\"Actor that is trying to buy not found.\")\n\n }\n\n \n\n }\n\n NodeType::FnAttacks => {\n\n let node: &node::FnAttacks = parse_dyn_node(node);\n\n if let Some(attacked) = self.var_map.get(&node.attacked) {\n\n if let Some(attacker) = self.var_map.get(&node.attacker) {\n\n if attacked == &NodeType::Char || attacked == &NodeType::Zombie {\n\n if !(attacker == &NodeType::Char || attacker == &NodeType::Zombie) {\n\n compile_error!(\"The one attacking is not an actor.\")\n\n }\n\n } else {\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 95, "score": 2.2262741932528933 }, { "content": " fn expect_compile_error(self, msg: &str) -> T {\n\n match self {\n\n Ok(t) => t,\n\n Err(e) => unsafe {\n\n if VERBOSE {\n\n crate::compile_error!(\"{}\\n== VERBOSE OUTPUT ==\\n{}\", msg, e)\n\n } else {\n\n crate::compile_error!(\"{}\", msg)\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T> CompileError<T> for Option<T> {\n\n fn expect_compile_error(self, msg: &str) -> T {\n\n match self {\n\n Some(t) => t,\n\n None => crate::compile_error!(\"{}\", msg)\n\n }\n", "file_path": "rpg-compiler/src/user_output/compile_error.rs", "rank": 96, "score": 1.959502112757606 }, { "content": " NodeType::SpellBook => {\n\n let node: &node::SpellBook = parse_dyn_node(node);\n\n self.var_map.insert(node.id, NodeType::SpellBook);\n\n }\n\n NodeType::FnBuys => {\n\n let node: &node::FnBuys = parse_dyn_node(node);\n\n if let Some(user) = self.var_map.get(&node.user) {\n\n if let Some(item) = self.var_map.get(&node.item) {\n\n if let Some(merchant) = self.var_map.get(&node.merchant) {\n\n if user == &NodeType::Char || user == &NodeType::Zombie {\n\n if item == &NodeType::Potion || item == &NodeType::SpellBook {\n\n if !(merchant == &NodeType::Merchant) {\n\n compile_error!(\"Only merchants can sell items.\")\n\n }\n\n } else {\n\n compile_error!(\"Only potions and spellbooks can be bought from a merchant.\")\n\n }\n\n } else {\n\n compile_error!(\"The one buying must be an actor.\")\n\n }\n", "file_path": "rpg-compiler/src/type_checker/mod.rs", "rank": 97, "score": 1.8803271562224917 }, { "content": " i{user}.items.remove(potion_index.unwrap());\\\n\n let health: u32 = if let ActorHealth::Char(h) = i{user}.health {{\n\n h\n\n }} else {{ runtime_error!(\\\"Only actors can make potions.\\\") }};\n\n let i{potion} = Item::Potion({potion}, health);\n\n i{user}.items.push(&i{potion});\"\n\n //if let ActorHealth::Char(h) = i{user}.health {{\\\n\n // i{potion}.set_val(h);\\\n\n // }}\"\n\n ))\n\n }\n\n }\n\n return return_s;\n\n }\n\n NodeType::FnBody => {}\n\n }\n\n unimplemented!(\"That function has not been implemented.\")\n\n }\n\n}", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 98, "score": 1.7944469270193335 }, { "content": " \"i{id}.confused = true;\"\n\n ));\n\n }\n\n SBFunction::GodSpeech => {\n\n // TODO: expect to runtime error\n\n let user = f.user;\n\n return_s.push_str(&format!(\n\n \"{{\\\n\n let mut s = String::new();\n\n let _ = stdout().flush();\n\n stdin().read_line(&mut s).expect(\\\"Input invalid.\\\");\n\n if let Some('\\\\n')=s.chars().next_back() {{\n\n s.pop();\n\n }}\n\n if let Some('\\\\r')=s.chars().next_back() {{\n\n s.pop();\n\n }}\n\n match i{user}.health {{\n\n ActorHealth::Char(_) => {{i{user}.health = ActorHealth::Char(s.parse::<u32>().expect(\\\"Invalid input\\\"))}}\n\n ActorHealth::Zombie(_) => {{i{user}.health = ActorHealth::Zombie(s.parse::<i32>().expect(\\\"Invalid input\\\"))}}\n", "file_path": "rpg-compiler/src/generator/mod.rs", "rank": 99, "score": 1.4734169751891217 } ]
Rust
ignition-host/src/process/pipe.rs
mvanbem/ignition
d22dffd786ff10f69ffa5f8d36cd46fd06288e00
use std::ptr::copy_nonoverlapping; use std::sync::{Arc, Mutex}; use std::task::Poll; use replace_with::{replace_with_or_abort, replace_with_or_abort_and_return}; use tokio::sync::mpsc::UnboundedSender; use crate::{TaskId, WakeParams}; struct SendPointer<T>(*const T); unsafe impl<T> Send for SendPointer<T> {} struct SendPointerMut<T>(*mut T); unsafe impl<T> Send for SendPointerMut<T> {} pub struct PipeReader { inner: Arc<Mutex<InnerPipe>>, } pub struct PipeWriter { inner: Arc<Mutex<InnerPipe>>, } struct InnerPipe { state: PipeState, } enum PipeState { Idle, PendingRead { read_wake_queue_sender: UnboundedSender<WakeParams>, read_task_id: TaskId, dst: SendPointerMut<u8>, dst_len: u32, }, PendingWrite { write_wake_queue_sender: UnboundedSender<WakeParams>, write_task_id: TaskId, src: SendPointer<u8>, src_len: u32, }, Closed, } pub fn pipe() -> (PipeReader, PipeWriter) { let inner = Arc::new(Mutex::new(InnerPipe { state: PipeState::Idle, })); ( PipeReader { inner: Arc::clone(&inner), }, PipeWriter { inner }, ) } impl InnerPipe { fn close(&mut self) { replace_with_or_abort(&mut self.state, |state| match state { PipeState::Idle => PipeState::Closed, PipeState::PendingRead { read_wake_queue_sender, read_task_id, .. } => { read_wake_queue_sender .send(WakeParams { task_id: read_task_id, param: 0, }) .unwrap(); PipeState::Closed } PipeState::PendingWrite { .. } => { todo!("write to a closed pipe") } PipeState::Closed => PipeState::Closed, }); } } impl PipeReader { pub unsafe fn read( &self, read_wake_queue_sender: &UnboundedSender<WakeParams>, read_task_id: TaskId, dst: *mut u8, dst_len: u32, ) -> Poll<u32> { if dst_len == 0 { return Poll::Ready(0); } let mut inner = self.inner.lock().unwrap(); replace_with_or_abort_and_return(&mut inner.state, |state| match state { PipeState::Idle => ( Poll::Pending, PipeState::PendingRead { read_wake_queue_sender: read_wake_queue_sender.clone(), read_task_id, dst: SendPointerMut(dst), dst_len, }, ), PipeState::PendingRead { .. } => { todo!("read with a read already pending") } PipeState::PendingWrite { write_wake_queue_sender, write_task_id, src, src_len, } => { let len = dst_len.min(src_len); unsafe { copy_nonoverlapping(src.0, dst, len as _) } write_wake_queue_sender .send(WakeParams { task_id: write_task_id, param: len, }) .unwrap(); (Poll::Ready(len), PipeState::Idle) } PipeState::Closed => (Poll::Ready(0), PipeState::Closed), }) } pub fn close(&self) { self.inner.lock().unwrap().close(); } } impl PipeWriter { pub unsafe fn write( &self, write_wake_queue_sender: &UnboundedSender<WakeParams>, write_task_id: TaskId, src: *const u8, src_len: u32, ) -> Poll<u32> { if src_len == 0 { return Poll::Ready(0); } let mut inner = self.inner.lock().unwrap(); replace_with_or_abort_and_return(&mut inner.state, |state| match state { PipeState::Idle => ( Poll::Pending, PipeState::PendingWrite { write_wake_queue_sender: write_wake_queue_sender.clone(), write_task_id, src: SendPointer(src), src_len, }, ), PipeState::PendingRead { read_wake_queue_sender, read_task_id, dst, dst_len, } => { let len = src_len.min(dst_len); unsafe { copy_nonoverlapping(src, dst.0, len as _) } read_wake_queue_sender .send(WakeParams { task_id: read_task_id, param: len, }) .unwrap(); (Poll::Ready(len), PipeState::Idle) } PipeState::PendingWrite { .. } => todo!("write with a write already pending"), PipeState::Closed => todo!("write to a closed pipe"), }) } pub fn close(&self) { self.inner.lock().unwrap().close(); } }
use std::ptr::copy_nonoverlapping; use std::sync::{Arc, Mutex}; use std::task::Poll; use replace_with::{replace_with_or_abort, replace_with_or_abort_and_return}; use tokio::sync::mpsc::UnboundedSender; use crate::{TaskId, WakeParams}; struct SendPointer<T>(*const T); unsafe impl<T> Send for SendPointer<T> {} struct SendPointerMut<T>(*mut T); unsafe impl<T> Send for SendPointerMut<T> {} pub struct PipeReader { inner: Arc<Mutex<InnerPipe>>, } pub struct PipeWriter { inner: Arc<Mutex<InnerPipe>>, } struct InnerPipe { state: PipeState, } enum PipeState { Idle, PendingRead { read_wake_queue_sender: UnboundedSender<WakeParams>, read_task_id: TaskId, dst: SendPointerMut<u8>, dst_len: u32, }, PendingWrite { write_wake_queue_sender: UnboundedSender<WakeParams>, write_task_id: TaskId, src: SendPointer<u8>, src_len: u32, }, Closed, } pub fn pipe() -> (PipeReader, PipeWriter) { let inner = Arc::new(Mutex::new(InnerPipe { state: PipeState::Idle, })); ( PipeReader { inner: Arc::clone(&inner), }, PipeWriter { inner }, ) } impl InnerPipe { fn close(&mut self) { replace_with_or_abort(&mut self.state, |state| match state { PipeState::Idle => PipeState::Closed, PipeState::PendingRead { read_wake_queue_sender, read_task_id, .. } => { read_wake_queue_sender .send(WakeParams { task_id: read_task_id, param: 0, }) .unwrap(); PipeState::Closed } PipeState::PendingWrite { .. } => { todo!("write to a closed pipe") } PipeState::Closed => PipeState::Closed, }); } } impl PipeReader { pub unsafe fn read( &self, read_wake_queue_sender: &UnboundedSender<WakeParams>, read_task_id: TaskId, dst: *mut u8, dst_len: u32, ) -> Poll<u32> { if dst_len == 0 { return Poll::Ready(0); } let mut inner = self.inner.lock().unwrap(); replace_with_or_abort_and_return(&mut inner.state, |state| match state { PipeState::Idle => ( Poll::Pending, PipeState::PendingRead { read_wake_queue_sender: read_wake_queue_sender.clone(), read_task_id, dst: SendPointerMut(dst), dst_len, }, ),
pub fn close(&self) { self.inner.lock().unwrap().close(); } } impl PipeWriter { pub unsafe fn write( &self, write_wake_queue_sender: &UnboundedSender<WakeParams>, write_task_id: TaskId, src: *const u8, src_len: u32, ) -> Poll<u32> { if src_len == 0 { return Poll::Ready(0); } let mut inner = self.inner.lock().unwrap(); replace_with_or_abort_and_return(&mut inner.state, |state| match state { PipeState::Idle => ( Poll::Pending, PipeState::PendingWrite { write_wake_queue_sender: write_wake_queue_sender.clone(), write_task_id, src: SendPointer(src), src_len, }, ), PipeState::PendingRead { read_wake_queue_sender, read_task_id, dst, dst_len, } => { let len = src_len.min(dst_len); unsafe { copy_nonoverlapping(src, dst.0, len as _) } read_wake_queue_sender .send(WakeParams { task_id: read_task_id, param: len, }) .unwrap(); (Poll::Ready(len), PipeState::Idle) } PipeState::PendingWrite { .. } => todo!("write with a write already pending"), PipeState::Closed => todo!("write to a closed pipe"), }) } pub fn close(&self) { self.inner.lock().unwrap().close(); } }
PipeState::PendingRead { .. } => { todo!("read with a read already pending") } PipeState::PendingWrite { write_wake_queue_sender, write_task_id, src, src_len, } => { let len = dst_len.min(src_len); unsafe { copy_nonoverlapping(src.0, dst, len as _) } write_wake_queue_sender .send(WakeParams { task_id: write_task_id, param: len, }) .unwrap(); (Poll::Ready(len), PipeState::Idle) } PipeState::Closed => (Poll::Ready(0), PipeState::Closed), }) }
function_block-function_prefix_line
[ { "content": "#[doc(hidden)]\n\npub fn wake_internal(task_id: u32, param: usize, init: fn()) {\n\n if task_id == u32::MAX {\n\n init();\n\n } else {\n\n dispatch_wake(TaskId(task_id), param);\n\n }\n\n run();\n\n}\n\n\n\n// TODO: Expand this into a fancy proc macro, something like #[ignition::init] to wrap init().\n\n#[macro_export]\n\nmacro_rules! emit_wake {\n\n ($init:ident) => {\n\n #[no_mangle]\n\n pub extern \"C\" fn wake(task_id: u32, param: usize) {\n\n ::ignition_guest::wake_internal(task_id, param, $init);\n\n }\n\n };\n\n}\n", "file_path": "wasm/ignition-guest/src/lib.rs", "rank": 0, "score": 215636.60796011362 }, { "content": "pub fn spawn(future: impl Future<Output = ()> + Send + 'static) {\n\n EXECUTOR.lock().unwrap().spawn(Task::new(future));\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/executor.rs", "rank": 1, "score": 184894.6308520201 }, { "content": "pub fn dispatch_wake(task_id: TaskId, param: usize) {\n\n REACTOR.lock().unwrap().dispatch_wake(task_id, param);\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 3, "score": 181595.45818173676 }, { "content": "pub fn log(mut caller: Caller<'_, Arc<Process>>, ptr: u32, len: u32) -> Result<(), Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n let message = get_str(caller.as_context(), memory, ptr, len)?;\n\n\n\n println!(\n\n \"[{}] {} {}\",\n\n Utc::now().to_rfc3339_opts(SecondsFormat::Micros, true),\n\n caller.data().pid(),\n\n message,\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ignition-host/src/api/core.rs", "rank": 5, "score": 175684.52138807104 }, { "content": "pub fn get_state_and_map_mut_ptr<T>(\n\n context: StoreContextMut<T>,\n\n memory: Memory,\n\n ptr: u32,\n\n) -> Result<(&mut T, *mut u8), Trap> {\n\n // TODO: Pass in a range and check that range.\n\n if (ptr as usize) < memory.data_size(context.as_context()) {\n\n let (data, process_state) = memory.data_and_store_mut(context);\n\n let mapped_ptr = unsafe { data.as_mut_ptr().offset(ptr as _) };\n\n Ok((process_state, mapped_ptr))\n\n } else {\n\n Err(Trap::new(\"pointer out of range\"))\n\n }\n\n}\n", "file_path": "ignition-host/src/util/mod.rs", "rank": 6, "score": 166396.1146215187 }, { "content": "pub fn sleep(caller: Caller<'_, Arc<Process>>, task_id: u32, usec: u32) {\n\n let task_id = TaskId(task_id);\n\n let duration = Duration::from_micros(usec.into());\n\n\n\n let wake_queue_sender = caller.data().wake_queue_sender().clone();\n\n tokio::spawn(async move {\n\n tokio::time::sleep(duration).await;\n\n wake_queue_sender\n\n .send(WakeParams { task_id, param: 0 })\n\n .unwrap();\n\n });\n\n}\n\n\n", "file_path": "ignition-host/src/api/time.rs", "rank": 7, "score": 160252.28838277617 }, { "content": "pub fn get_wake_param(task_id: TaskId) -> Option<usize> {\n\n REACTOR.lock().unwrap().get_wake_param(task_id)\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 8, "score": 159785.56202332326 }, { "content": "pub fn io_close(caller: Caller<'_, Arc<Process>>, io: u32) -> Result<(), Trap> {\n\n caller.data().io_close(io)\n\n}\n", "file_path": "ignition-host/src/api/io.rs", "rank": 9, "score": 155110.29996138497 }, { "content": "pub fn impulse(caller: Caller<'_, Arc<Process>>, task_id: u32) {\n\n let task_id = TaskId(task_id);\n\n\n\n caller\n\n .data()\n\n .wake_queue_sender()\n\n .send(WakeParams { task_id, param: 0 })\n\n .unwrap();\n\n}\n", "file_path": "ignition-host/src/api/core.rs", "rank": 10, "score": 152519.87715453823 }, { "content": "pub fn io_read(\n\n mut caller: Caller<'_, Arc<Process>>,\n\n task_id: u32,\n\n io: u32,\n\n ptr: u32,\n\n len: u32,\n\n n_ptr: u32,\n\n) -> Result<u32, Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n let (process_state, dst) = get_state_and_map_mut_ptr(caller.as_context_mut(), memory, ptr)?;\n\n let result = unsafe { process_state.io_read(TaskId(task_id), io as _, dst, len) }?;\n\n match result {\n\n Poll::Ready(n) => {\n\n let mut n_data = get_slice_mut(caller.as_context_mut(), memory, n_ptr, 4)?;\n\n n_data.write_u32::<LittleEndian>(n).unwrap();\n\n Ok(0)\n\n }\n\n Poll::Pending => Ok(1),\n\n }\n\n}\n\n\n", "file_path": "ignition-host/src/api/io.rs", "rank": 11, "score": 150415.92988854193 }, { "content": "pub fn future_dropped(task_id: TaskId) {\n\n REACTOR.lock().unwrap().future_dropped(task_id);\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 13, "score": 146949.0048280811 }, { "content": "pub fn drop_unused_task(task_id: TaskId) {\n\n REACTOR.lock().unwrap().drop_unused_task(task_id);\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 15, "score": 145457.62931404385 }, { "content": "pub fn get_slice_mut<T>(\n\n context: StoreContextMut<T>,\n\n memory: Memory,\n\n ptr: u32,\n\n len: u32,\n\n) -> Result<&mut [u8], Trap> {\n\n memory\n\n .data_mut(context)\n\n .get_mut(ptr as usize..)\n\n .and_then(|arr| arr.get_mut(..len as usize))\n\n .ok_or_else(|| Trap::new(\"data out of bounds\"))\n\n}\n\n\n", "file_path": "ignition-host/src/util/mod.rs", "rank": 16, "score": 141589.0303835148 }, { "content": "struct InnerConnectionState {\n\n fs: &'static FileSystem,\n\n fids: HashMap<u32, FidState>,\n\n}\n\n\n\nimpl ConnectionState {\n\n pub fn new(fs: &'static FileSystem) -> ConnectionState {\n\n ConnectionState {\n\n inner: Arc::new(Mutex::new(InnerConnectionState {\n\n fs,\n\n fids: HashMap::new(),\n\n })),\n\n }\n\n }\n\n\n\n pub async fn handle_request(&self, req: &Message) -> Result<MessageBody, HandleRequestError> {\n\n match req.body {\n\n MessageBody::TVersion(TVersion { msize, ref version }) => {\n\n if req.tag != Tag::NOTAG {\n\n return rerror(\"expected NOTAG in Tversion request\");\n", "file_path": "ignition-demo-9p-server/src/connection_state.rs", "rank": 17, "score": 140269.3986098894 }, { "content": "pub fn get_state_and_map_ptr<T>(\n\n context: StoreContextMut<T>,\n\n memory: Memory,\n\n ptr: u32,\n\n) -> Result<(&mut T, *const u8), Trap> {\n\n // TODO: Pass in a range and check that range.\n\n if (ptr as usize) < memory.data_size(context.as_context()) {\n\n let (data, process_state) = memory.data_and_store_mut(context);\n\n let mapped_ptr = unsafe { data.as_ptr().offset(ptr as _) };\n\n Ok((process_state, mapped_ptr))\n\n } else {\n\n Err(Trap::new(\"pointer out of range\"))\n\n }\n\n}\n\n\n", "file_path": "ignition-host/src/util/mod.rs", "rank": 18, "score": 138829.96761577035 }, { "content": "pub fn store_waker(task_id: TaskId, waker: Waker) {\n\n REACTOR.lock().unwrap().store_waker(task_id, waker);\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 19, "score": 133819.34854069032 }, { "content": "pub fn new_task() -> TaskId {\n\n REACTOR.lock().unwrap().new_task()\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 20, "score": 132459.55732015736 }, { "content": "pub fn get_memory<T>(caller: &mut Caller<T>) -> Result<Memory, Trap> {\n\n match caller.get_export(\"memory\") {\n\n Some(Extern::Memory(memory)) => Ok(memory),\n\n _ => Err(Trap::new(\"failed to find memory\")),\n\n }\n\n}\n\n\n", "file_path": "ignition-host/src/util/mod.rs", "rank": 21, "score": 129792.71749236852 }, { "content": "pub fn abort() -> ! {\n\n // SAFETY: No special considerations.\n\n unsafe { sys::abort() }\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/api/mod.rs", "rank": 23, "score": 117194.32686423756 }, { "content": "pub fn io_write(\n\n mut caller: Caller<'_, Arc<Process>>,\n\n task_id: u32,\n\n io: u32,\n\n ptr: u32,\n\n len: u32,\n\n n_ptr: u32,\n\n) -> Result<u32, Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n let (process_state, src) = get_state_and_map_ptr(caller.as_context_mut(), memory, ptr)?;\n\n let result = unsafe { process_state.io_write(TaskId(task_id), io, src, len) }?;\n\n match result {\n\n Poll::Ready(n) => {\n\n let mut n_data = get_slice_mut(caller.as_context_mut(), memory, n_ptr, 4)?;\n\n n_data.write_u32::<LittleEndian>(n).unwrap();\n\n Ok(0)\n\n }\n\n Poll::Pending => Ok(1),\n\n }\n\n}\n\n\n", "file_path": "ignition-host/src/api/io.rs", "rank": 24, "score": 117194.32686423756 }, { "content": "pub fn shutdown() {\n\n // SAFETY: No special considerations.\n\n unsafe { sys::shutdown() }\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/api/mod.rs", "rank": 25, "score": 117194.32686423756 }, { "content": "#[proc_macro_derive(ReadFrom, attributes(ignition_9p_wire))]\n\npub fn derive_read(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = input.ident;\n\n let generics = add_trait_bounds(input.generics);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let struct_attrs = parse_struct_attrs(&input.attrs);\n\n let struct_attr_errors = &struct_attrs.errors;\n\n let read_body = make_read_body(&struct_attrs, &name, &input.data);\n\n\n\n let expanded = quote! {\n\n #(#struct_attr_errors)*\n\n impl #impl_generics ::ignition_9p_wire::ReadFrom for #name #ty_generics #where_clause {\n\n fn read_from<R: ::std::io::Read>(r: &mut R) -> ::std::io::Result<Self> {\n\n #read_body\n\n }\n\n }\n\n };\n\n\n\n expanded.into()\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 26, "score": 116777.19102735037 }, { "content": "pub fn rpc_server_create(\n\n mut caller: Caller<'_, Arc<Process>>,\n\n params_ptr: u32,\n\n) -> Result<u32, Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n let mut params_data = get_slice(\n\n caller.as_context(),\n\n memory,\n\n params_ptr,\n\n RpcServerParams::SIZE,\n\n )?;\n\n\n\n let params = RpcServerParams::from_wasm(caller.as_context(), memory, &mut params_data)?;\n\n Ok(Process::rpc_server_create(caller.data(), &params))\n\n}\n\n\n", "file_path": "ignition-host/src/api/rpc_server.rs", "rank": 27, "score": 113460.94584029459 }, { "content": "pub fn rpc_client_create(\n\n mut caller: Caller<'_, Arc<Process>>,\n\n service_name_ptr: u32,\n\n service_name_len: u32,\n\n) -> Result<u32, Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n let service_name = get_str(\n\n caller.as_context(),\n\n memory,\n\n service_name_ptr,\n\n service_name_len,\n\n )?\n\n .to_owned();\n\n\n\n Ok(caller.data().rpc_client_create(service_name))\n\n}\n\n\n", "file_path": "ignition-host/src/api/rpc_client.rs", "rank": 28, "score": 113460.94584029459 }, { "content": "pub fn rpc_client_request(\n\n mut caller: Caller<'_, Arc<Process>>,\n\n rpc_client: u32,\n\n method_name_ptr: u32,\n\n method_name_len: u32,\n\n request_io_ptr: u32,\n\n response_io_ptr: u32,\n\n) -> Result<u32, Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n let method_name = get_str(\n\n caller.as_context(),\n\n memory,\n\n method_name_ptr,\n\n method_name_len,\n\n )?\n\n .to_owned();\n\n\n\n let (request_io, response_io) =\n\n Process::rpc_client_request(caller.data(), rpc_client, &method_name)?;\n\n\n", "file_path": "ignition-host/src/api/rpc_client.rs", "rank": 29, "score": 113460.94584029459 }, { "content": "// BUG: Fields named `r` or `w` would shadow the reader or writer and either fail to compile or\n\n// maybe rarely do something unexpected.\n\nfn make_read_body(struct_attrs: &StructAttrs, struct_name: &Ident, data: &Data) -> TokenStream {\n\n let embedded_length_decl = struct_attrs.embedded_size_prefix.as_ref().map(|ty| {\n\n quote! {\n\n let len = <u64 as ::std::convert::TryFrom<_>>::try_from(\n\n <#ty as ::ignition_9p_wire::ReadFrom>::read_from(r)?,\n\n ).map_err(|_| ::std::io::Error::new(\n\n ::std::io::ErrorKind::InvalidInput,\n\n \"value too large to represent in memory\",\n\n ))?;\n\n let r = &mut ::std::io::Read::take(::std::io::Read::by_ref(r), len);\n\n }\n\n });\n\n let embedded_length_check = struct_attrs.embedded_size_prefix.as_ref().map(|_| {\n\n quote! {\n\n if r.limit() != 0 {\n\n return Err(::std::io::Error::new(\n\n ::std::io::ErrorKind::InvalidData,\n\n // TODO: Break this out into a proper Error type and surface the specific numbers.\n\n \"unread bytes after length-prefixed value\",\n\n ));\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 30, "score": 111992.97768585786 }, { "content": "pub fn get_slice<T>(\n\n context: StoreContext<T>,\n\n memory: Memory,\n\n ptr: u32,\n\n len: u32,\n\n) -> Result<&[u8], Trap> {\n\n memory\n\n .data(context)\n\n .get(ptr as usize..)\n\n .and_then(|arr| arr.get(..len as usize))\n\n .ok_or_else(|| Trap::new(\"data out of bounds\"))\n\n}\n\n\n", "file_path": "ignition-host/src/util/mod.rs", "rank": 31, "score": 111918.44431165981 }, { "content": "pub fn get_str<T>(\n\n context: StoreContext<T>,\n\n memory: Memory,\n\n ptr: u32,\n\n len: u32,\n\n) -> Result<&str, Trap> {\n\n from_utf8(get_slice(context, memory, ptr, len)?).map_err(|_| Trap::new(\"invalid utf-8\"))\n\n}\n\n\n", "file_path": "ignition-host/src/util/mod.rs", "rank": 32, "score": 111918.44431165981 }, { "content": "pub fn rpc_server_get_request(\n\n mut caller: Caller<'_, Arc<Process>>,\n\n task_id: u32,\n\n rpc_server: u32,\n\n metadata_ptr: u32,\n\n) -> Result<u32, Trap> {\n\n let memory = get_memory(&mut caller)?;\n\n\n\n let result = caller\n\n .data()\n\n .rpc_server_get_request(TaskId(task_id), rpc_server)?;\n\n match result {\n\n Poll::Ready(metadata) => {\n\n metadata.to_wasm(&mut get_slice_mut(\n\n caller.as_context_mut(),\n\n memory,\n\n metadata_ptr,\n\n RpcMetadata::SIZE,\n\n )?)?;\n\n Ok(0)\n\n }\n\n Poll::Pending => Ok(1),\n\n }\n\n}\n", "file_path": "ignition-host/src/api/rpc_server.rs", "rank": 33, "score": 111734.30587505945 }, { "content": "pub fn rpc_client_wait_healthy(\n\n caller: Caller<'_, Arc<Process>>,\n\n task_id: u32,\n\n rpc_client: u32,\n\n) -> Result<u32, Trap> {\n\n match Process::rpc_client_wait_healthy(caller.data(), TaskId(task_id), rpc_client)? {\n\n Poll::Ready(()) => Ok(0),\n\n Poll::Pending => Ok(1),\n\n }\n\n}\n\n\n", "file_path": "ignition-host/src/api/rpc_client.rs", "rank": 34, "score": 111734.30587505945 }, { "content": "struct FidState {\n\n node: Node<'static>,\n\n is_open: bool,\n\n}\n\n\n\nimpl FidState {\n\n fn new(node: Node<'static>) -> FidState {\n\n FidState {\n\n node,\n\n is_open: false,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum AllocateFidError {\n\n #[error(\"fid {} already in use\", .fid.0)]\n\n FidAlreadyInUse { fid: Fid },\n\n}\n", "file_path": "ignition-demo-9p-server/src/connection_state.rs", "rank": 35, "score": 109770.19737860511 }, { "content": "pub fn abort() -> Result<(), Trap> {\n\n Err(Trap::new(\"aborted\"))\n\n}\n\n\n", "file_path": "ignition-host/src/api/core.rs", "rank": 36, "score": 109030.97938454954 }, { "content": "#[async_trait]\n\npub trait FileSystem: Clone + Send + Sync {\n\n type File: AsyncRead + AsyncSeek + AsyncWrite + Send + Sync + Unpin;\n\n\n\n async fn open(&self, path: &Path) -> io::Result<Self::File>;\n\n async fn create(&self, path: &Path) -> io::Result<Self::File>;\n\n async fn make_temporary_file(&self) -> io::Result<PathBuf>;\n\n async fn rename(&self, path_from: &Path, path_to: &Path) -> io::Result<()>;\n\n async fn create_dir_all(&self, path: &Path) -> io::Result<()>;\n\n}\n", "file_path": "testable-file-system/src/lib.rs", "rank": 37, "score": 107300.92598056277 }, { "content": "pub fn log(message: &str) {\n\n // SAFETY: `message` and `len` refer to a UTF-8 string.\n\n unsafe { sys::log(message.as_bytes().as_ptr() as *const c_void, message.len()) }\n\n}\n\n\n\npub async fn impulse() {\n\n let task_id = reactor::new_task();\n\n\n\n // SAFETY: No special considerations.\n\n unsafe { sys::impulse(task_id) };\n\n\n\n wait(task_id).await;\n\n}\n\n\n\npub async fn sleep(duration: Duration) {\n\n let task_id = reactor::new_task();\n\n let usec = duration.as_micros().try_into().unwrap();\n\n\n\n // SAFETY: No special considerations.\n\n unsafe { sys::sleep(task_id, usec) };\n\n\n\n wait(task_id).await;\n\n}\n", "file_path": "wasm/ignition-guest/src/api/mod.rs", "rank": 38, "score": 107213.43031605365 }, { "content": "pub fn real_file_system() -> RealFileSystem {\n\n RealFileSystem { _private: () }\n\n}\n", "file_path": "testable-file-system/src/real.rs", "rank": 39, "score": 106731.87027022458 }, { "content": "fn executor_run(arc: &Arc<Mutex<Executor>>) {\n\n loop {\n\n let mut inner = arc.lock().unwrap();\n\n let task = match inner.awake.pop_front() {\n\n Some(task) => task,\n\n None => break,\n\n };\n\n drop(inner);\n\n\n\n let arc_waker = Arc::new(TaskWaker::new(task));\n\n let waker = arc_waker::new(Arc::clone(&arc_waker));\n\n let mut context = Context::from_waker(&waker);\n\n // The result from poll() may be discarded without action. All clones of the waker will\n\n // eventually be dropped, dropping the task only after it is no longer needed. If\n\n // incomplete, it will eventually be respawned when wakened.\n\n arc_waker.start_deferred_wake();\n\n let _ = arc_waker.task_mut().as_mut().unwrap().poll(&mut context);\n\n arc_waker.end_deferred_wake();\n\n }\n\n}\n", "file_path": "wasm/ignition-guest/src/runtime/executor.rs", "rank": 40, "score": 105039.3907278901 }, { "content": "fn add_trait_bounds(mut generics: Generics) -> Generics {\n\n for param in &mut generics.params {\n\n if let GenericParam::Type(ref mut type_param) = *param {\n\n type_param\n\n .bounds\n\n .push(parse_quote!(::ignition_9p_wire::ReadFrom))\n\n }\n\n }\n\n generics\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 41, "score": 104806.00725357355 }, { "content": "struct InnerProcess {\n\n rpc_clients: Slab<RpcClient>,\n\n rpc_servers: Slab<RpcServer>,\n\n io_objects: Slab<IoObject>,\n\n}\n\n\n\nimpl Process {\n\n pub fn new(pid: usize) -> (Self, UnboundedReceiver<WakeParams>) {\n\n let (wake_queue_sender, wake_queue_receiver) = unbounded_channel();\n\n let state = Process {\n\n pid,\n\n start_time: Instant::now(),\n\n is_shutdown: AtomicBool::new(false),\n\n wake_queue_sender,\n\n inner: Mutex::new(InnerProcess {\n\n rpc_clients: Slab::new(),\n\n rpc_servers: Slab::new(),\n\n io_objects: Slab::new(),\n\n }),\n\n };\n", "file_path": "ignition-host/src/process/process.rs", "rank": 42, "score": 102802.27501827254 }, { "content": "#[derive(Clone, Copy)]\n\nenum InnerNode {\n\n Directory(DirectoryIndex),\n\n File(FileIndex),\n\n}\n\n\n\nimpl InnerNode {\n\n fn to_node(self, file_system: &FileSystem) -> Node<'_> {\n\n match self {\n\n InnerNode::File(index) => Node::File(File { file_system, index }),\n\n InnerNode::Directory(index) => Node::Directory(Directory { file_system, index }),\n\n }\n\n }\n\n}\n\n\n", "file_path": "ignition-demo-9p-server/src/file_system.rs", "rank": 43, "score": 102512.11318136362 }, { "content": "struct InnerFile {\n\n name: String,\n\n content: Vec<u8>,\n\n qid_path: u64,\n\n}\n\n\n\nimpl InnerFile {\n\n pub fn qid(&self) -> Qid {\n\n Qid {\n\n file_type: FileType::default().with_dir(false),\n\n version: 0,\n\n path: self.qid_path,\n\n }\n\n }\n\n\n\n pub fn stat(&self) -> Stat {\n\n let qid = self.qid();\n\n Stat {\n\n kernel_type: 0,\n\n kernel_dev: 0,\n", "file_path": "ignition-demo-9p-server/src/file_system.rs", "rank": 44, "score": 100751.35230957804 }, { "content": "struct InnerDirectory {\n\n parent: DirectoryIndex,\n\n name: String,\n\n content: Vec<u8>,\n\n cut_points: Vec<usize>,\n\n entries: HashMap<String, InnerNode>,\n\n qid_path: u64,\n\n}\n\n\n\nimpl InnerDirectory {\n\n fn qid(&self) -> Qid {\n\n Qid {\n\n file_type: FileType::default().with_dir(true),\n\n version: 0,\n\n path: self.qid_path,\n\n }\n\n }\n\n\n\n fn stat(&self) -> Stat {\n\n let qid = self.qid();\n", "file_path": "ignition-demo-9p-server/src/file_system.rs", "rank": 45, "score": 100751.35230957804 }, { "content": "struct TaskState {\n\n waker: Option<Waker>,\n\n // None before wakened and Some after.\n\n wake_param: Option<usize>,\n\n future_is_dropped: bool,\n\n}\n\n\n\nimpl TaskState {\n\n fn new() -> Self {\n\n Self {\n\n waker: None,\n\n wake_param: None,\n\n future_is_dropped: false,\n\n }\n\n }\n\n\n\n fn ready_to_drop(&self) -> bool {\n\n self.wake_param.is_some() && self.future_is_dropped\n\n }\n\n}\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 46, "score": 100517.22904481424 }, { "content": "struct SharedState {\n\n counter: AtomicUsize,\n\n client: RpcClient,\n\n}\n\n\n\nemit_wake!(init);\n\n\n", "file_path": "wasm/ignition-echo-client/src/lib.rs", "rank": 47, "score": 100517.22904481424 }, { "content": "fn unexpected_io_error(e: impl std::error::Error) -> Status {\n\n log::error!(\"unexpected I/O error: {}\", e);\n\n Status::internal(&format!(\"unexpected I/O error: {}\", e))\n\n}\n\n\n", "file_path": "ignition-blob/src/main.rs", "rank": 48, "score": 100463.40243538984 }, { "content": "pub fn shutdown(caller: Caller<'_, Arc<Process>>) {\n\n caller.data().shutdown();\n\n}\n\n\n", "file_path": "ignition-host/src/api/core.rs", "rank": 49, "score": 98836.58282511806 }, { "content": "#[derive(Default)]\n\nstruct InnerServiceRegistry {\n\n servers_by_service_name: HashMap<String, HashSet<RpcServerRef>>,\n\n tasks_waiting_by_service_name: HashMap<String, HashSet<ProcessTask>>,\n\n}\n\n\n\nimpl ServiceRegistry {\n\n pub fn register(&self, service_name: String, rpc_server_ref: RpcServerRef) {\n\n let mut inner = self.inner.lock().unwrap();\n\n\n\n // Wake any processes that were waiting for this service to become available.\n\n if let Some(mut process_tasks) = inner.tasks_waiting_by_service_name.remove(&service_name) {\n\n for entry in process_tasks.drain() {\n\n // NOTE: This will be a recursive acquire if the process that's registering this\n\n // server had a client waiting on that same service name. That could be fixed by\n\n // putting an async queue in here.\n\n entry\n\n .process\n\n .wake_queue_sender()\n\n .send(WakeParams {\n\n task_id: entry.task_id,\n", "file_path": "ignition-host/src/process/service_registry.rs", "rank": 50, "score": 98808.41247401203 }, { "content": "struct InnerQidSpace {\n\n next_path: u64,\n\n}\n\n\n\nimpl QidSpace {\n\n pub fn new() -> QidSpace {\n\n QidSpace {\n\n inner: Arc::new(Mutex::new(InnerQidSpace { next_path: 0 })),\n\n }\n\n }\n\n\n\n fn allocate_path(&self) -> u64 {\n\n let mut inner = self.inner.lock().unwrap();\n\n let result = inner.next_path;\n\n inner.next_path += 1;\n\n result\n\n }\n\n}\n\n\n", "file_path": "ignition-demo-9p-server/src/concurrent_file_system.rs", "rank": 51, "score": 96965.14616965997 }, { "content": "/// Types that can be deserialized from the 9p2000 wire protocol.\n\n///\n\n/// # Derivable\n\n///\n\n/// This trait can be used with `#[derive]` on structs or tuple structs if all fields are `ReadFrom`\n\n/// or are [`Vec`]s or slices of `ReadFrom` elements.\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire_derive::ReadFrom;\n\n/// #[derive(ReadFrom)]\n\n/// struct Example {\n\n/// field_a: u32,\n\n/// field_b: String,\n\n/// }\n\n/// ```\n\n///\n\n/// The `derive`d implementation reads each field in declaration order.\n\n///\n\n/// ## Attributes\n\n///\n\n/// A number of options are available on the `#[ignition_9p_wire()]` attribute to customize the\n\n/// behavior of a `derive`d implementation, adding collection counts and byte sizes on the wire. All\n\n/// sizes are enforced during deserialization. Attempting to read past the end of a delimited region\n\n/// or leaving any bytes unread at the end of a delimited region raises an error.\n\n///\n\n/// ### Struct prefixes\n\n///\n\n/// Structs may have an embedded size prefix on the wire.\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire::ReadFrom;\n\n/// # use ignition_9p_wire_derive::ReadFrom;\n\n/// #[derive(Debug, ReadFrom, PartialEq)]\n\n/// #[ignition_9p_wire(embedded_size_prefix = \"u32\")]\n\n/// struct Example {\n\n/// field_a: u16,\n\n/// field_b: u16,\n\n/// }\n\n///\n\n/// let mut data: &'static [u8] = &[4, 0, 0, 0, 0x55, 0xaa, 0x34, 0x12];\n\n/// assert_eq!(\n\n/// Example::read_from(&mut data)?,\n\n/// Example {\n\n/// field_a: 0xaa55,\n\n/// field_b: 0x1234,\n\n/// },\n\n/// );\n\n/// # Result::<(), std::io::Error>::Ok(())\n\n/// ```\n\n///\n\n/// ### Field prefixes\n\n///\n\n/// Scalar fields may be unprefixed or size-prefixed. [`Vec`] and slice fields are count-prefixed\n\n/// or, for `u8` elements only, may be handled as length-prefixed bytes. Specifying multiple field\n\n/// prefix options is an error.\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire::ReadFrom;\n\n/// # use ignition_9p_wire_derive::ReadFrom;\n\n/// #[derive(Debug, ReadFrom, PartialEq)]\n\n/// struct Example {\n\n/// unprefixed: u32,\n\n///\n\n/// #[ignition_9p_wire(size_prefixed = \"u8\")]\n\n/// size_prefixed: u32,\n\n///\n\n/// #[ignition_9p_wire(count_prefixed = \"u16\")]\n\n/// count_prefixed: Vec<u16>,\n\n///\n\n/// #[ignition_9p_wire(length_prefixed_bytes = \"u32\")]\n\n/// length_prefixed_bytes: Vec<u8>,\n\n/// }\n\n///\n\n/// let mut data: &'static [u8] = &[\n\n/// // unprefixed field\n\n/// 0x78, 0x56, 0x34, 0x12,\n\n///\n\n/// // u8-size-prefixed field\n\n/// 4, 0x78, 0x56, 0x34, 0x12,\n\n///\n\n/// // u16-count-prefixed field\n\n/// 3, 0, 0x34, 0x12, 0x78, 0x56, 0xbc, 0x9a,\n\n///\n\n/// // u32-length-prefixed bytes field\n\n/// 8, 0, 0, 0, 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef,\n\n/// ];\n\n/// assert_eq!(\n\n/// Example::read_from(&mut data)?,\n\n/// Example {\n\n/// unprefixed: 0x12345678,\n\n/// size_prefixed: 0x12345678,\n\n/// count_prefixed: vec![0x1234, 0x5678, 0x9abc],\n\n/// length_prefixed_bytes: vec![0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef],\n\n/// },\n\n/// );\n\n/// # Result::<(), std::io::Error>::Ok(())\n\n/// ```\n\npub trait ReadFrom: Sized {\n\n fn read_from<R: Read>(r: &mut R) -> io::Result<Self>;\n\n}\n\n\n", "file_path": "ignition-9p-wire/src/traits.rs", "rank": 52, "score": 96495.57244791699 }, { "content": "fn parse_struct_attrs(attrs: &[Attribute]) -> StructAttrs {\n\n let mut embedded_size_prefix = vec![];\n\n let mut errors = vec![];\n\n for attr in attrs {\n\n if !path_is_ident(&attr.path, \"ignition_9p_wire\") {\n\n continue;\n\n }\n\n let key_values: KeyValues = syn::parse2(attr.tokens.clone()).unwrap();\n\n for key_value in key_values.0 {\n\n match key_value.key {\n\n x if x == \"embedded_size_prefix\" => {\n\n let ty: Path = match key_value.value.parse() {\n\n Ok(ty) => ty,\n\n Err(e) => {\n\n errors.push(AttrError::Other(e.into()));\n\n continue;\n\n }\n\n };\n\n embedded_size_prefix.push(ty);\n\n }\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 53, "score": 94431.30790287812 }, { "content": "pub fn monotonic_time(caller: Caller<'_, Arc<Process>>) -> u64 {\n\n (Instant::now() - caller.data().start_time())\n\n .as_micros()\n\n .try_into()\n\n .unwrap()\n\n}\n", "file_path": "ignition-host/src/api/time.rs", "rank": 54, "score": 93899.97540611005 }, { "content": "struct BlobServiceImpl<F: FileSystem> {\n\n file_system: F,\n\n}\n\n\n\n#[tonic::async_trait]\n\nimpl<F: FileSystem + 'static> blob_pb::blob_service_server::BlobService for BlobServiceImpl<F> {\n\n type GetStream = ReceiverStream<Result<blob_pb::GetResponse, Status>>;\n\n\n\n async fn get(\n\n &self,\n\n request: Request<blob_pb::GetRequest>,\n\n ) -> Result<Response<Self::GetStream>, Status> {\n\n let id = request\n\n .get_ref()\n\n .id\n\n .as_ref()\n\n .ok_or_else(|| Status::invalid_argument(\"id field was unset\"))?;\n\n\n\n let path = path_for_id(id).map_err(|PathForIdError::UnknownHashAlgorithm| {\n\n Status::invalid_argument(\"unknown hash algorithm\")\n", "file_path": "ignition-blob/src/main.rs", "rank": 55, "score": 92961.49235585066 }, { "content": "#[proc_macro_derive(WriteTo, attributes(ignition_9p_wire))]\n\npub fn derive_write(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = input.ident;\n\n let generics = add_trait_bounds(input.generics);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let struct_attrs = parse_struct_attrs(&input.attrs);\n\n let struct_attr_errors = &struct_attrs.errors;\n\n let write_body = make_write_body(&struct_attrs, &input.data);\n\n\n\n let expanded = quote! {\n\n #(#struct_attr_errors)*\n\n impl #impl_generics ::ignition_9p_wire::WriteTo for #name #ty_generics #where_clause {\n\n fn write_to<W: ::std::io::Write>(&self, w: &mut W) -> ::std::io::Result<()> {\n\n #write_body\n\n }\n\n }\n\n };\n\n\n\n expanded.into()\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 56, "score": 90053.71780270063 }, { "content": "fn make_write_body(struct_attrs: &StructAttrs, data: &Data) -> TokenStream {\n\n let write_embedded_length = struct_attrs.embedded_size_prefix.as_ref().map(|ty| {\n\n quote! {\n\n let size = ::ignition_9p_wire::EmbeddedSize::embedded_size(self);\n\n ::ignition_9p_wire::WriteTo::write_to(\n\n &<#ty as ::std::convert::TryFrom<_>>::try_from(size)\n\n .map_err(|_| ::std::io::Error::new(\n\n ::std::io::ErrorKind::InvalidInput,\n\n \"value too large to serailize\",\n\n ))?,\n\n w,\n\n )?;\n\n let w = &mut ::ignition_9p_wire::LimitedWriter::new(w, size as u64);\n\n }\n\n });\n\n let check_embedded_length = struct_attrs.embedded_size_prefix.as_ref().map(|_| {\n\n quote! {\n\n if w.limit() != 0 {\n\n return Err(::std::io::Error::new(\n\n ::std::io::ErrorKind::InvalidData,\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 57, "score": 86494.97024681221 }, { "content": "struct StructAttrs {\n\n embedded_size_prefix: Option<Path>,\n\n errors: Vec<AttrError>,\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 58, "score": 77784.31776807769 }, { "content": "fn rerror<T: Into<String>, E>(msg: T) -> Result<MessageBody, E> {\n\n Ok(MessageBody::RError(RError { ename: msg.into() }))\n\n}\n\n\n\npub struct ConnectionState {\n\n inner: Arc<Mutex<InnerConnectionState>>,\n\n}\n\n\n", "file_path": "ignition-demo-9p-server/src/connection_state.rs", "rank": 59, "score": 73337.53158554083 }, { "content": "enum Node {\n\n Directory,\n\n File(Arc<Mutex<Cursor<Vec<u8>>>>),\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct InMemoryFile(Arc<Mutex<Cursor<Vec<u8>>>>);\n\n\n\nimpl AsyncRead for InMemoryFile {\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n _cx: &mut Context,\n\n buf: &mut ReadBuf,\n\n ) -> Poll<io::Result<()>> {\n\n // TODO(perf): Use unsafe to avoid initializing the buffer.\n\n let n = match io::Read::read(&mut *self.0.lock().unwrap(), buf.initialize_unfilled()) {\n\n Ok(n) => n,\n\n Err(e) => return Poll::Ready(Err(e)),\n\n };\n\n buf.advance(n);\n", "file_path": "testable-file-system/src/in_memory.rs", "rank": 60, "score": 71694.23077570536 }, { "content": "enum Prefixed {\n\n None,\n\n Count { ty: Path },\n\n Size { ty: Path },\n\n LengthBytes { ty: Path },\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 61, "score": 71694.23077570536 }, { "content": "#[derive(Debug, Error)]\n\nenum PathForIdError {\n\n #[error(\"unknown hash algorithm\")]\n\n UnknownHashAlgorithm,\n\n}\n\n\n", "file_path": "ignition-blob/src/main.rs", "rank": 62, "score": 70490.55420497776 }, { "content": "enum AttrError {\n\n UnexpectedKey { key: Ident },\n\n PrefixConflict,\n\n EmbeddedSizeConflict,\n\n Other(Box<dyn Error>),\n\n}\n\nimpl ToTokens for &AttrError {\n\n fn to_tokens(&self, tokens: &mut TokenStream) {\n\n match self {\n\n AttrError::UnexpectedKey { key } => {\n\n let message = Literal::string(&format!(\n\n \"unsupported attribute key: {}\",\n\n key.to_token_stream(),\n\n ));\n\n let error = quote! {\n\n compile_error!(#message);\n\n };\n\n error.to_tokens(tokens);\n\n }\n\n AttrError::PrefixConflict => {\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 63, "score": 70490.55420497776 }, { "content": "#[derive(Default)]\n\nstruct Executor {\n\n awake: VecDeque<Task>,\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/executor.rs", "rank": 64, "score": 68720.65590616607 }, { "content": "struct FieldAttrs {\n\n prefixed: Prefixed,\n\n errors: Vec<AttrError>,\n\n}\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 65, "score": 68720.65590616607 }, { "content": "struct Wait {\n\n task_id: TaskId,\n\n}\n\n\n\nimpl Drop for Wait {\n\n fn drop(&mut self) {\n\n reactor::future_dropped(self.task_id);\n\n }\n\n}\n\n\n\nimpl Future for Wait {\n\n type Output = usize;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<usize> {\n\n if let Some(param) = reactor::get_wake_param(self.task_id) {\n\n Poll::Ready(param)\n\n } else {\n\n reactor::store_waker(self.task_id, cx.waker().clone());\n\n Poll::Pending\n\n }\n\n }\n\n}\n", "file_path": "wasm/ignition-guest/src/api/wait.rs", "rank": 66, "score": 68720.65590616607 }, { "content": "struct KeyValue {\n\n key: Ident,\n\n value: LitStr,\n\n}\n\nimpl Parse for KeyValues {\n\n fn parse(input: ParseStream) -> syn::Result<Self> {\n\n let content;\n\n parenthesized!(content in input);\n\n Ok(KeyValues(Punctuated::parse_terminated(&content)?))\n\n }\n\n}\n\nimpl Parse for KeyValue {\n\n fn parse(input: ParseStream) -> syn::Result<Self> {\n\n let key = input.parse()?;\n\n let _eq: Token![=] = input.parse()?;\n\n let value: Literal = input.parse()?;\n\n let value: LitStr = syn::parse2(value.into_token_stream())?;\n\n Ok(KeyValue { key, value })\n\n }\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 67, "score": 68720.65590616607 }, { "content": "#[derive(Default)]\n\nstruct Reactor {\n\n tasks: Slab<TaskState>,\n\n}\n\n\n", "file_path": "wasm/ignition-guest/src/runtime/reactor.rs", "rank": 68, "score": 68720.65590616607 }, { "content": "#[derive(Hash, PartialEq, Eq)]\n\nstruct ProcessTask {\n\n process: PointerIdentityArc<Process>,\n\n task_id: TaskId,\n\n}\n\n\n\n#[derive(Clone, Hash, PartialEq, Eq)]\n\npub struct RpcServerRef {\n\n pub process: PointerIdentityArc<Process>,\n\n pub rpc_server: u32,\n\n}\n\n\n\nlazy_static! {\n\n pub static ref SERVICE_REGISTRY: Arc<ServiceRegistry> = Default::default();\n\n}\n", "file_path": "ignition-host/src/process/service_registry.rs", "rank": 69, "score": 67591.20485850403 }, { "content": "struct MethodBuilder {\n\n name: String,\n\n handler: Handler,\n\n}\n\n\n\nimpl RpcServerBuilder {\n\n pub fn new(name: &str) -> Self {\n\n Self {\n\n name: name.to_owned(),\n\n methods: Default::default(),\n\n }\n\n }\n\n\n\n pub fn add_handler(mut self, name: &str, handler: Handler) -> Self {\n\n self.methods.push(MethodBuilder {\n\n name: name.to_owned(),\n\n handler,\n\n });\n\n self\n\n }\n", "file_path": "wasm/ignition-guest/src/rpc_server.rs", "rank": 70, "score": 67591.20485850403 }, { "content": "/// Types that can be serialized to the 9p2000 wire protocol.\n\n///\n\n/// # Derivable\n\n///\n\n/// This trait can be used with `#[derive]` on structs or tuple structs if all fields are `WriteTo`\n\n/// or are [`Vec`]s or slices of `WriteTo` elements.\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire_derive::WriteTo;\n\n/// #[derive(WriteTo)]\n\n/// struct Example {\n\n/// field_a: u32,\n\n/// field_b: String,\n\n/// }\n\n/// ```\n\n///\n\n/// The `derive`d implementation writes each field in declaration order.\n\n///\n\n/// ## Attributes\n\n///\n\n/// A number of options are available on the `#[ignition_9p_wire()]` attribute to customize the\n\n/// behavior of a `derive`d implementation, adding collection counts and byte sizes on the wire. All\n\n/// sizes are enforced during serialization. Attempting to write past the end of a delimited region\n\n/// or leaving a delimited region less than fully written raises an error.\n\n///\n\n/// ### Struct prefixes\n\n///\n\n/// Structs may have an embedded size prefix on the wire. The struct must implement\n\n/// [`EmbeddedSize`].\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire::{EmbeddedSize, WriteTo};\n\n/// # use ignition_9p_wire_derive::WriteTo;\n\n/// #[derive(WriteTo)]\n\n/// #[ignition_9p_wire(embedded_size_prefix = \"u32\")]\n\n/// struct Example {\n\n/// field_a: u16,\n\n/// field_b: u16,\n\n/// }\n\n/// impl EmbeddedSize for Example {\n\n/// fn embedded_size(&self) -> usize {\n\n/// 4\n\n/// }\n\n/// }\n\n///\n\n/// let mut data = vec![];\n\n/// Example {\n\n/// field_a: 0xaa55,\n\n/// field_b: 0x1234,\n\n/// }.write_to(&mut data)?;\n\n/// assert_eq!(data.as_slice(), &[4, 0, 0, 0, 0x55, 0xaa, 0x34, 0x12]);\n\n/// # Result::<(), std::io::Error>::Ok(())\n\n/// ```\n\n///\n\n/// ### Field prefixes\n\n///\n\n/// Scalar fields may be unprefixed or size-prefixed. [`Vec`] and slice fields are count-prefixed\n\n/// or, for `u8` elements only, may be handled as length-prefixed bytes. Specifying multiple field\n\n/// prefix options is an error.\n\n///\n\n/// Size-prefixed fields must implement [`SerializedSize`].\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire::{SerializedSize, WriteTo};\n\n/// # use ignition_9p_wire_derive::WriteTo;\n\n/// #[derive(WriteTo)]\n\n/// struct Example {\n\n/// unprefixed: u32,\n\n///\n\n/// #[ignition_9p_wire(size_prefixed = \"u8\")]\n\n/// size_prefixed: SizedU32,\n\n///\n\n/// #[ignition_9p_wire(count_prefixed = \"u16\")]\n\n/// count_prefixed: Vec<u16>,\n\n///\n\n/// #[ignition_9p_wire(length_prefixed_bytes = \"u32\")]\n\n/// length_prefixed_bytes: Vec<u8>,\n\n/// }\n\n/// #[derive(WriteTo)]\n\n/// struct SizedU32(u32);\n\n/// impl SerializedSize for SizedU32 {\n\n/// fn serialized_size(&self) -> usize {\n\n/// 4\n\n/// }\n\n/// }\n\n///\n\n/// let mut data = vec![];\n\n/// Example {\n\n/// unprefixed: 0x12345678,\n\n/// size_prefixed: SizedU32(0x12345678),\n\n/// count_prefixed: vec![0x1234, 0x5678, 0x9abc],\n\n/// length_prefixed_bytes: vec![0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef],\n\n/// }.write_to(&mut data)?;\n\n/// assert_eq!(\n\n/// data.as_slice(),\n\n/// &[\n\n/// // unprefixed field\n\n/// 0x78, 0x56, 0x34, 0x12,\n\n///\n\n/// // u8-size-prefixed field\n\n/// 4, 0x78, 0x56, 0x34, 0x12,\n\n///\n\n/// // u16-count-prefixed field\n\n/// 3, 0, 0x34, 0x12, 0x78, 0x56, 0xbc, 0x9a,\n\n///\n\n/// // u32-length-prefixed bytes field\n\n/// 8, 0, 0, 0, 0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef,\n\n/// ],\n\n/// );\n\n/// # Result::<(), std::io::Error>::Ok(())\n\n/// ```\n\npub trait WriteTo {\n\n fn write_to<W: Write>(&self, w: &mut W) -> io::Result<()>;\n\n}\n\n\n", "file_path": "ignition-9p-wire/src/traits.rs", "rank": 71, "score": 65244.207284625154 }, { "content": "fn make_client(\n\n file_system: &InMemoryFileSystem,\n\n) -> BlobServiceClient<BlobServiceServer<BlobServiceImpl<InMemoryFileSystem>>> {\n\n BlobServiceClient::new(BlobServiceServer::new(BlobServiceImpl {\n\n file_system: file_system.clone(),\n\n }))\n\n}\n\n\n\n#[tokio::test]\n\nasync fn get_success() {\n\n let file_system = InMemoryFileSystem::new();\n\n file_system.write(\n\n \"data/blake3/ed/e5c0b10f2ec4979c69b52f61e42ff5b413519ce09be0f14d098dcfe5f6f98d\",\n\n b\"Hello, world!\".to_vec(),\n\n );\n\n let mut client = make_client(&file_system);\n\n assert_eq!(\n\n client\n\n .get(blob_pb::GetRequest {\n\n id: Some(blob_pb::BlobId {\n", "file_path": "ignition-blob/src/service_tests.rs", "rank": 72, "score": 65121.43573969175 }, { "content": "fn init() {\n\n spawn(async {\n\n let shared_state = Arc::new(SharedState {\n\n counter: AtomicUsize::new(MESSAGES.len()),\n\n client: RpcClient::new(\"EchoService\"),\n\n });\n\n shared_state.client.wait_healthy().await;\n\n\n\n for message in MESSAGES.iter().copied() {\n\n let shared_state = Arc::clone(&shared_state);\n\n spawn(async move {\n\n let start_time = Instant::now();\n\n\n\n let request = shared_state.client.request(\"echo\");\n\n request.write_all(message.as_bytes()).await;\n\n let response = request.into_response().read_to_end().await;\n\n let elapsed_seconds = (Instant::now() - start_time).as_secs_f64();\n\n\n\n assert_eq!(message.as_bytes(), response);\n\n log(&format!(\n", "file_path": "wasm/ignition-echo-client/src/lib.rs", "rank": 73, "score": 65121.43573969175 }, { "content": "fn init() {\n\n const COUNT: usize = 1_000_000;\n\n\n\n let start_time = Instant::now();\n\n let mut impulses = Vec::new();\n\n for _ in 0..COUNT {\n\n impulses.push(impulse());\n\n }\n\n spawn(async move {\n\n let mut f: FuturesUnordered<_> = impulses.into_iter().collect();\n\n while f.next().await.is_some() {}\n\n let elapsed_seconds = (Instant::now() - start_time).as_secs_f64();\n\n log(&format!(\n\n \"Elapsed: {} s, {} ns per impulse\",\n\n elapsed_seconds,\n\n (elapsed_seconds * (1e9 / COUNT as f64)).ceil(),\n\n ));\n\n shutdown();\n\n });\n\n}\n", "file_path": "wasm/ignition-impulse-bench/src/lib.rs", "rank": 74, "score": 65121.43573969175 }, { "content": "fn init() {\n\n spawn(async {\n\n sleep(Duration::from_secs(1)).await;\n\n\n\n let counter = Arc::new(AtomicUsize::new(5));\n\n RpcServerBuilder::new(\"EchoService\")\n\n .add_handler(\n\n \"echo\",\n\n Box::new(move |request, response| {\n\n let counter = Arc::clone(&counter);\n\n Box::pin(async move {\n\n response.write_all(&request.read_to_end().await).await;\n\n if counter.fetch_sub(1, Ordering::SeqCst) == 1 {\n\n shutdown();\n\n }\n\n })\n\n }),\n\n )\n\n .build();\n\n });\n\n}\n", "file_path": "wasm/ignition-echo-server/src/lib.rs", "rank": 75, "score": 65121.43573969175 }, { "content": "#[derive(Clone, Copy)]\n\nstruct FileIndex(usize);\n\n\n", "file_path": "ignition-demo-9p-server/src/file_system.rs", "rank": 76, "score": 64316.06421050687 }, { "content": "#[derive(Clone, Copy)]\n\nstruct DirectoryIndex(usize);\n\n\n\n/// A simple immutable file system.\n\npub struct FileSystem {\n\n directories: Vec<InnerDirectory>,\n\n files: Vec<InnerFile>,\n\n}\n\n\n\nimpl FileSystem {\n\n pub fn builder() -> builder::FileSystem {\n\n builder::FileSystem::new()\n\n }\n\n\n\n pub fn root(&self) -> Directory<'_> {\n\n Directory {\n\n file_system: self,\n\n index: DirectoryIndex(0),\n\n }\n\n }\n\n\n", "file_path": "ignition-demo-9p-server/src/file_system.rs", "rank": 77, "score": 64316.06421050687 }, { "content": "/// Types that embed their size as a prefix of their wire format.\n\n///\n\n/// The motivating example is the 9p `stat` struct, which embeds the length of its contents,\n\n/// excluding the size field itself. This trait allows `stat` to avoid declaring a size field and\n\n/// asking all users to fill it in accurately, having it automated instead.\n\n///\n\n/// Use this trait together with the `embedded_size_prefix` attribute key when deriving [`ReadFrom`]\n\n/// or [`WriteTo`]:\n\n///\n\n/// ```\n\n/// # use ignition_9p_wire::{EmbeddedSize, ReadFrom, WriteTo};\n\n/// # use ignition_9p_wire_derive::{ReadFrom, WriteTo};\n\n/// #[derive(Debug, ReadFrom, WriteTo, PartialEq)]\n\n/// #[ignition_9p_wire(embedded_size_prefix = \"u16\")]\n\n/// struct ExampleType {\n\n/// field_a: u32,\n\n/// }\n\n/// impl EmbeddedSize for ExampleType {\n\n/// fn embedded_size(&self) -> usize {\n\n/// 4\n\n/// }\n\n/// }\n\n///\n\n/// let mut buf: &'static [u8] = &[4, 0, 0x78, 0x56, 0x34, 0x12];\n\n/// assert_eq!(\n\n/// ExampleType::read_from(&mut buf)?,\n\n/// ExampleType { field_a: 0x12345678 },\n\n/// );\n\n/// # Result::<(), std::io::Error>::Ok(())\n\n/// ```\n\npub trait EmbeddedSize {\n\n fn embedded_size(&self) -> usize;\n\n}\n\n\n\nimpl ReadFrom for u8 {\n\n fn read_from<R: Read>(r: &mut R) -> io::Result<Self> {\n\n r.read_u8()\n\n }\n\n}\n\nimpl WriteTo for u8 {\n\n fn write_to<W: Write>(&self, w: &mut W) -> io::Result<()> {\n\n w.write_u8(*self)\n\n }\n\n}\n\n\n\nimpl ReadFrom for u16 {\n\n fn read_from<R: Read>(r: &mut R) -> io::Result<Self> {\n\n r.read_u16::<LittleEndian>()\n\n }\n\n}\n", "file_path": "ignition-9p-wire/src/traits.rs", "rank": 78, "score": 64062.522362427015 }, { "content": "pub trait Wasm {\n\n const SIZE: u32;\n\n}\n\n\n", "file_path": "ignition-host/src/interop/mod.rs", "rank": 79, "score": 64039.67082599792 }, { "content": "/// Types that know their serialized size.\n\n///\n\n/// `Derive`d implementations of [`WriteTo`] serialize the provided size first and then enforce that\n\n/// the rest of the value writes precisely that number of bytes.\n\npub trait SerializedSize {\n\n fn serialized_size(&self) -> usize;\n\n}\n\n\n", "file_path": "ignition-9p-wire/src/traits.rs", "rank": 80, "score": 64039.67082599792 }, { "content": "/// Provides the \"don't touch\" value for use in `wstat` requests.\n\n///\n\n/// A `wstat` request can avoid modifying some properties of the file by providing explicit \"don't\n\n/// touch\" values in the stat data that is sent: zero-length strings for text values and the\n\n/// maximum unsigned value of appropriate size for integral values.\n\npub trait DontTouch {\n\n /// Returns the \"don't touch\" value for this type.\n\n fn dont_touch() -> Self;\n\n}\n\nimpl DontTouch for u8 {\n\n fn dont_touch() -> Self {\n\n !0\n\n }\n\n}\n\nimpl DontTouch for u16 {\n\n fn dont_touch() -> Self {\n\n !0\n\n }\n\n}\n\nimpl DontTouch for u32 {\n\n fn dont_touch() -> Self {\n\n !0\n\n }\n\n}\n\nimpl DontTouch for u64 {\n\n fn dont_touch() -> Self {\n\n !0\n\n }\n\n}\n\nimpl DontTouch for String {\n\n fn dont_touch() -> Self {\n\n String::new()\n\n }\n\n}\n", "file_path": "ignition-9p-wire/src/dont_touch.rs", "rank": 81, "score": 62921.80710367022 }, { "content": " pub trait WakerTrait {\n\n fn wake(&self);\n\n }\n\n\n\n fn raw_waker_clone<T: WakerTrait>(ptr: *const ()) -> RawWaker {\n\n unsafe { Arc::increment_strong_count(ptr) }\n\n new_raw_waker(ptr as *const T)\n\n }\n\n\n\n fn raw_waker_wake<T: WakerTrait>(ptr: *const ()) {\n\n raw_waker_wake_by_ref::<T>(ptr);\n\n raw_waker_drop::<T>(ptr);\n\n }\n\n\n\n fn raw_waker_wake_by_ref<T: WakerTrait>(ptr: *const ()) {\n\n let data = unsafe { (ptr as *const T).as_ref() }.unwrap();\n\n data.wake();\n\n }\n\n\n\n fn raw_waker_drop<T: WakerTrait>(ptr: *const ()) {\n", "file_path": "wasm/ignition-guest/src/runtime/executor.rs", "rank": 82, "score": 61857.24303307079 }, { "content": "/// A handle to either a file or a directory.\n\npub trait Node {\n\n fn boxed_clone(&self) -> Box<dyn Node>;\n\n fn freeze(&self);\n\n}\n\n\n\n/// A handle to a directory.\n\n#[derive(Clone)]\n\npub struct Directory {\n\n inner: Arc<Mutex<InnerDirectory>>,\n\n}\n\n\n\npub struct InnerDirectory {\n\n qid_space: QidSpace,\n\n frozen: bool,\n\n /// None only during initialization.\n\n parent: Option<Directory>,\n\n name: String,\n\n qid_path: u64,\n\n qid_version: u32,\n\n entries: HashMap<String, Box<dyn Node>>,\n", "file_path": "ignition-demo-9p-server/src/concurrent_file_system.rs", "rank": 83, "score": 61857.24303307079 }, { "content": "pub trait ToWasm: Wasm {\n\n fn to_wasm(&self, data: &mut &mut [u8]) -> Result<(), Trap>;\n\n}\n", "file_path": "ignition-host/src/interop/mod.rs", "rank": 84, "score": 61210.25603176604 }, { "content": "pub trait FromWasm: Wasm {\n\n fn from_wasm<T>(\n\n context: StoreContext<T>,\n\n memory: Memory,\n\n data: &mut &[u8],\n\n ) -> Result<Self, Trap>\n\n where\n\n Self: Sized;\n\n}\n\n\n", "file_path": "ignition-host/src/interop/mod.rs", "rank": 85, "score": 61210.25603176604 }, { "content": "fn make_file_system() -> FileSystem {\n\n let mut fs = FileSystem::builder();\n\n let mut root = fs.root();\n\n let mut hello_txt = root.new_file(\"hello.txt\").unwrap();\n\n hello_txt.set_content(b\"words go here\".to_vec());\n\n let mut subdir = root.new_directory(\"subdir\").unwrap();\n\n let mut abc123 = subdir.new_file(\"abc123.txt\").unwrap();\n\n abc123.set_content(b\"def456\".to_vec());\n\n fs.build()\n\n}\n", "file_path": "ignition-demo-9p-server/src/main.rs", "rank": 86, "score": 59994.194317972775 }, { "content": "struct KeyValues(Punctuated<KeyValue, Token![,]>);\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 87, "score": 58856.79140935943 }, { "content": "fn parse_field_attrs(attrs: &[Attribute]) -> FieldAttrs {\n\n let mut prefixed = vec![];\n\n let mut errors = vec![];\n\n for attr in attrs {\n\n if !path_is_ident(&attr.path, \"ignition_9p_wire\") {\n\n continue;\n\n }\n\n let key_values: KeyValues = syn::parse2(attr.tokens.clone()).unwrap();\n\n for key_value in key_values.0 {\n\n match key_value.key {\n\n x if x == \"count_prefixed\" => {\n\n let ty: Path = match key_value.value.parse() {\n\n Ok(ty) => ty,\n\n Err(e) => {\n\n errors.push(AttrError::Other(e.into()));\n\n continue;\n\n }\n\n };\n\n prefixed.push(Prefixed::Count { ty });\n\n }\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 88, "score": 55061.47963222325 }, { "content": "fn path_is_ident(path: &Path, expected: &str) -> bool {\n\n match path.get_ident() {\n\n Some(ident) => ident == expected,\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "ignition-9p-wire-derive/src/lib.rs", "rank": 89, "score": 52748.24024668711 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n tonic_build::compile_protos(\"./blob.proto\")?;\n\n Ok(())\n\n}\n", "file_path": "ignition-blob-proto/build.rs", "rank": 90, "score": 49118.19205550216 }, { "content": "fn path_for_id(id: &blob_pb::BlobId) -> Result<PathBuf, PathForIdError> {\n\n match id.algorithm() {\n\n blob_pb::HashAlgorithm::Unknown => Err(PathForIdError::UnknownHashAlgorithm),\n\n\n\n blob_pb::HashAlgorithm::Blake3 => {\n\n assert_eq!(id.hash.len(), 32);\n\n let mut hash_hex = [0; 64];\n\n hex::encode_to_slice(&id.hash[..], &mut hash_hex[..]).unwrap();\n\n let hash_hex_str = std::str::from_utf8(&hash_hex[..]).unwrap();\n\n let mut path = PathBuf::from(\"data/blake3\");\n\n path.push(&hash_hex_str[..2]);\n\n path.push(&hash_hex_str[2..]);\n\n Ok(path)\n\n }\n\n }\n\n}\n", "file_path": "ignition-blob/src/main.rs", "rank": 91, "score": 47846.13853712934 }, { "content": "fn fs_error<T>(msg: &'static str) -> Result<T, FsError> {\n\n Err(FsError(msg))\n\n}\n", "file_path": "ignition-demo-9p-server/src/concurrent_file_system.rs", "rank": 92, "score": 46995.86563738182 } ]
Rust
src/krull64.rs
SamiPerttu/rand_krull
1b613461fb08329588506efaf091206492ae9726
#[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use wrapping_arithmetic::wrappit; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Eq, PartialEq, Debug)] pub struct Krull64 { lcg0: u64, lcg1: u64, stream: u64, } #[inline] fn origin_0(stream: u64) -> u64 { !stream } #[inline] fn origin_128(stream: u64) -> u128 { origin_0(stream) as u128 } impl Krull64 { #[inline] fn lcg_128(&self) -> u128 { self.lcg0 as u128 | ((self.lcg1 as u128) << 64) } #[inline] fn multiplier(&self) -> u64 { super::LCG_M65_1 as u64 } #[inline] fn multiplier_128(&self) -> u128 { super::LCG_M65_1 } #[inline] fn increment_128(&self) -> u128 { ((self.stream as u128) << 1) | 1 } #[inline] fn origin_0(&self) -> u64 { origin_0(self.stream) } #[inline] fn origin_128(&self) -> u128 { origin_128(self.stream) } #[wrappit] #[inline] pub fn step(&mut self) -> u64 { let lcg = (self.lcg0 as u128) * self.multiplier() as u128 + self.increment_128(); self.lcg1 = ((lcg >> 64) as u64) + self.lcg1 * self.multiplier() + self.lcg0; self.lcg0 = lcg as u64; self.get() } #[inline] pub fn step_128(&mut self) -> u128 { self.step() as u128 | ((self.step() as u128) << 64) } #[wrappit] #[inline] pub fn get(&self) -> u64 { let x = self.lcg1; let x = (x ^ (x >> 30)) * 0xbf58476d1ce4e5b9; let x = (x ^ (x >> 27)) * 0x94d049bb133111eb; let x = (x ^ (x >> 31)) * 0xd6e8feb86659fd93; x ^ (x >> 32) } #[wrappit] #[inline] pub fn step_slow(&mut self) -> u64 { let lcg = self.lcg_128() * self.multiplier_128() + self.increment_128(); self.lcg0 = lcg as u64; self.lcg1 = (lcg >> 64) as u64; self.get() } #[allow(clippy::new_without_default)] pub fn new() -> Self { Krull64 { lcg0: origin_0(0), lcg1: 0, stream: 0, } } pub fn from_32(seed: u32) -> Self { Krull64::from_64(seed as u64) } pub fn from_64(seed: u64) -> Self { Krull64 { lcg0: origin_0(seed), lcg1: 0, stream: seed, } } pub fn from_128(seed: u128) -> Self { let mut krull = Krull64::from_64(((seed >> 64) ^ seed) as u64); krull.set_position((seed as u128) << 64); krull } pub fn jump(&mut self, steps: i128) { let lcg = crate::lcg::get_state( self.multiplier_128(), self.increment_128(), self.lcg_128(), steps as u128, ); self.lcg0 = lcg as u64; self.lcg1 = (lcg >> 64) as u64; } pub fn position(&self) -> u128 { crate::lcg::get_iterations( self.multiplier_128(), self.increment_128(), self.origin_128(), self.lcg_128(), ) } pub fn set_position(&mut self, position: u128) { let lcg = crate::lcg::get_state( self.multiplier_128(), self.increment_128(), self.origin_128(), position, ); self.lcg0 = lcg as u64; self.lcg1 = (lcg >> 64) as u64; } #[inline] pub fn reset(&mut self) { self.lcg0 = self.origin_0(); self.lcg1 = 0; } #[inline] pub fn stream(&self) -> u64 { self.stream } pub fn set_stream(&mut self, stream: u64) { self.stream = stream; self.reset(); } } use super::{Error, RngCore, SeedableRng}; impl RngCore for Krull64 { fn next_u32(&mut self) -> u32 { self.step() as u32 } fn next_u64(&mut self) -> u64 { self.step() } fn fill_bytes(&mut self, dest: &mut [u8]) { let bytes = dest.len(); let mut i = 0; while i < bytes { let x = self.step(); let j = bytes.min(i + 8); dest[i..j].copy_from_slice(&x.to_le_bytes()[0..(j - i)]); i = j; } } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.fill_bytes(dest); Ok(()) } } impl SeedableRng for Krull64 { type Seed = [u8; 16]; fn from_seed(seed: Self::Seed) -> Self { Krull64::from_128(u128::from_le_bytes(seed)) } } #[cfg(test)] mod tests { use super::super::*; use super::*; #[test] pub fn run_tests() { let krull64_expected: [u64; 16] = [ 0x57c1b6c1df5ed4d2, 0x1efdba83398cf412, 0xa02d8dfda06ac9ce, 0xf6e3f32be5e81841, 0xc2a690083e597e0d, 0x3b1b2ed3fa6c15aa, 0x241c691340a479b2, 0x88c24c8d79bb67c1, 0x09f213c4fc2b61dc, 0xa4b6ad95c713c951, 0xa43904ae3341edf7, 0xee2dca4d5fd5f8fa, 0x27bdddbeaa4aadb0, 0x98c78e68dbf634b2, 0xf0edc57017a0d5a5, 0x8647ea5de51eca23, ]; let mut krull64 = Krull64::from_64(0); for x in krull64_expected { assert_eq!(x, krull64.next_u64()); } let mut r: u128 = 0; let mut rnd = || -> u128 { r = r.wrapping_mul(LCG_M128_1).wrapping_add(0xffff); r }; for _ in 0..1 << 12 { let seed = rnd() as u64; let mut krull1 = Krull64::new(); assert_eq!(0, krull1.stream()); assert_eq!(0, krull1.position()); krull1.set_stream(seed); assert_eq!(seed, krull1.stream()); assert_eq!(0, krull1.position()); let mut krull2 = Krull64::from_64(seed); assert_eq!(seed, krull2.stream()); assert_eq!(0, krull2.position()); let pos2 = rnd(); let pos1 = pos2 & rnd(); krull1.set_position(pos1); krull2.set_position(pos2); assert_eq!(pos1, krull1.position()); assert_eq!(pos2, krull2.position()); krull1.jump((pos2 - pos1) as i128); assert_eq!(pos2, krull1.position()); assert_eq!(krull1.next_u64(), krull2.next_u64()); krull1.jump(-1); assert_eq!(pos2, krull1.position()); krull2.jump(-1); assert_eq!(pos2, krull2.position()); krull1.jump(-((pos2 - pos1) as i128)); assert_eq!(pos1, krull1.position()); let n = 1 + (rnd() & 0x3ff); for _ in 0..n { krull1.next_u64(); } assert_eq!(pos1 + n, krull1.position()); assert_eq!(seed, krull1.stream()); let bytes = 1 + (rnd() & 0x7f); let mut buffer1 = [0u8; 0x80]; let mut buffer2 = [0u8; 0x80]; krull1.reset(); assert_eq!(0, krull1.position()); krull1.fill_bytes(&mut buffer1[0..bytes as usize]); krull2.reset(); for i in 0..0x10 { let x = krull2.next_u64(); buffer2[(i << 3)..((i + 1) << 3)].copy_from_slice(&x.to_le_bytes()); } assert!(buffer1[0..bytes as usize] .iter() .zip(buffer2[0..bytes as usize].iter()) .all(|(x, y)| x == y)); } } }
#[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use wrapping_arithmetic::wrappit; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[derive(Clone, Eq, PartialEq, Debug)] pub struct Krull64 { lcg0: u64, lcg1: u64, stream: u64, } #[inline] fn origin_0(stream: u64) -> u64 { !stream } #[inline] fn origin_128(stream: u64) -> u128 { origin_0(stream) as u128 } impl Krull64 { #[inline] fn lcg_128(&self) -> u128 { self.lcg0 as u128 | ((self.lcg1 as u128) << 64) } #[inline] fn multiplier(&self) -> u64 { super::LCG_M65_1 as u64 } #[inline] fn multiplier_128(&self) -> u128 { super::LCG_M65_1 } #[inline] fn increment_128(&self) -> u128 { ((self.stream as u128) << 1) | 1 } #[inline] fn origin_0(&self) -> u64 { origin_0(self.stream) } #[inline] fn origin_128(&self) -> u128 { origin_128(self.stream) } #[wrappit] #[inline] pub fn step(&mut self) -> u64 { let lcg = (self.lcg0 as u128) * self.multiplier() as u128 + self.increment_128(); self.lcg1 = ((lcg >> 64) as u64) + self.lcg1 * self.multiplier() + self.lcg0; self.lcg0 = lcg as u64; self.get() } #[inline] pub fn step_128(&mut self) -> u128 { self.step() as u128 | ((self.step() as u128) << 64) } #[wrappit] #[inline] pub fn get(&self) -> u64 { let x = self.lcg1; let x = (x ^ (x >> 30)) * 0xbf58476d1ce4e5b9; let x = (x ^ (x >> 27)) * 0x94d049bb133111eb; let x = (x ^ (x >> 31)) * 0xd6e8feb86659fd93; x ^ (x >> 32) } #[wrappit] #[inline] pub fn step_slow(&mut self) -> u64 { let lcg = self.lcg_128() * self.multiplier_128() + self.increment_128(); self.lcg0 = lcg as u64; self.lcg1 = (lcg >> 64) as u64; self.get() } #[allow(clippy::new_without_default)] pub fn new() -> Self { Krull64 { lcg0: origin_0(0), lcg1: 0, stream: 0, } } pub fn from_32(seed: u32) -> Self { Krull64::from_64(seed as u64) }
pub fn from_128(seed: u128) -> Self { let mut krull = Krull64::from_64(((seed >> 64) ^ seed) as u64); krull.set_position((seed as u128) << 64); krull } pub fn jump(&mut self, steps: i128) { let lcg = crate::lcg::get_state( self.multiplier_128(), self.increment_128(), self.lcg_128(), steps as u128, ); self.lcg0 = lcg as u64; self.lcg1 = (lcg >> 64) as u64; } pub fn position(&self) -> u128 { crate::lcg::get_iterations( self.multiplier_128(), self.increment_128(), self.origin_128(), self.lcg_128(), ) } pub fn set_position(&mut self, position: u128) { let lcg = crate::lcg::get_state( self.multiplier_128(), self.increment_128(), self.origin_128(), position, ); self.lcg0 = lcg as u64; self.lcg1 = (lcg >> 64) as u64; } #[inline] pub fn reset(&mut self) { self.lcg0 = self.origin_0(); self.lcg1 = 0; } #[inline] pub fn stream(&self) -> u64 { self.stream } pub fn set_stream(&mut self, stream: u64) { self.stream = stream; self.reset(); } } use super::{Error, RngCore, SeedableRng}; impl RngCore for Krull64 { fn next_u32(&mut self) -> u32 { self.step() as u32 } fn next_u64(&mut self) -> u64 { self.step() } fn fill_bytes(&mut self, dest: &mut [u8]) { let bytes = dest.len(); let mut i = 0; while i < bytes { let x = self.step(); let j = bytes.min(i + 8); dest[i..j].copy_from_slice(&x.to_le_bytes()[0..(j - i)]); i = j; } } fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.fill_bytes(dest); Ok(()) } } impl SeedableRng for Krull64 { type Seed = [u8; 16]; fn from_seed(seed: Self::Seed) -> Self { Krull64::from_128(u128::from_le_bytes(seed)) } } #[cfg(test)] mod tests { use super::super::*; use super::*; #[test] pub fn run_tests() { let krull64_expected: [u64; 16] = [ 0x57c1b6c1df5ed4d2, 0x1efdba83398cf412, 0xa02d8dfda06ac9ce, 0xf6e3f32be5e81841, 0xc2a690083e597e0d, 0x3b1b2ed3fa6c15aa, 0x241c691340a479b2, 0x88c24c8d79bb67c1, 0x09f213c4fc2b61dc, 0xa4b6ad95c713c951, 0xa43904ae3341edf7, 0xee2dca4d5fd5f8fa, 0x27bdddbeaa4aadb0, 0x98c78e68dbf634b2, 0xf0edc57017a0d5a5, 0x8647ea5de51eca23, ]; let mut krull64 = Krull64::from_64(0); for x in krull64_expected { assert_eq!(x, krull64.next_u64()); } let mut r: u128 = 0; let mut rnd = || -> u128 { r = r.wrapping_mul(LCG_M128_1).wrapping_add(0xffff); r }; for _ in 0..1 << 12 { let seed = rnd() as u64; let mut krull1 = Krull64::new(); assert_eq!(0, krull1.stream()); assert_eq!(0, krull1.position()); krull1.set_stream(seed); assert_eq!(seed, krull1.stream()); assert_eq!(0, krull1.position()); let mut krull2 = Krull64::from_64(seed); assert_eq!(seed, krull2.stream()); assert_eq!(0, krull2.position()); let pos2 = rnd(); let pos1 = pos2 & rnd(); krull1.set_position(pos1); krull2.set_position(pos2); assert_eq!(pos1, krull1.position()); assert_eq!(pos2, krull2.position()); krull1.jump((pos2 - pos1) as i128); assert_eq!(pos2, krull1.position()); assert_eq!(krull1.next_u64(), krull2.next_u64()); krull1.jump(-1); assert_eq!(pos2, krull1.position()); krull2.jump(-1); assert_eq!(pos2, krull2.position()); krull1.jump(-((pos2 - pos1) as i128)); assert_eq!(pos1, krull1.position()); let n = 1 + (rnd() & 0x3ff); for _ in 0..n { krull1.next_u64(); } assert_eq!(pos1 + n, krull1.position()); assert_eq!(seed, krull1.stream()); let bytes = 1 + (rnd() & 0x7f); let mut buffer1 = [0u8; 0x80]; let mut buffer2 = [0u8; 0x80]; krull1.reset(); assert_eq!(0, krull1.position()); krull1.fill_bytes(&mut buffer1[0..bytes as usize]); krull2.reset(); for i in 0..0x10 { let x = krull2.next_u64(); buffer2[(i << 3)..((i + 1) << 3)].copy_from_slice(&x.to_le_bytes()); } assert!(buffer1[0..bytes as usize] .iter() .zip(buffer2[0..bytes as usize].iter()) .all(|(x, y)| x == y)); } } }
pub fn from_64(seed: u64) -> Self { Krull64 { lcg0: origin_0(seed), lcg1: 0, stream: seed, } }
function_block-full_function
[ { "content": "#[inline]\n\nfn origin_b_128() -> u128 {\n\n origin_b0() as u128\n\n}\n\n\n\nimpl Krull65 {\n\n #[inline]\n\n fn multiplier_a(&self) -> u64 {\n\n super::LCG_M65_1 as u64\n\n }\n\n\n\n #[inline]\n\n fn multiplier_a_128(&self) -> u128 {\n\n super::LCG_M65_1\n\n }\n\n\n\n #[inline]\n\n fn multiplier_b(&self) -> u64 {\n\n super::LCG_M65_4 as u64\n\n }\n\n\n", "file_path": "src/krull65.rs", "rank": 2, "score": 57622.31336682559 }, { "content": "#[inline]\n\nfn origin_a_128() -> u128 {\n\n origin_a0() as u128\n\n}\n\n\n", "file_path": "src/krull65.rs", "rank": 3, "score": 57622.31336682559 }, { "content": "#[inline]\n\nfn origin_a0() -> u64 {\n\n 0\n\n}\n\n\n", "file_path": "src/krull65.rs", "rank": 4, "score": 55112.842755572055 }, { "content": "#[inline]\n\nfn origin_b0() -> u64 {\n\n 1\n\n}\n\n\n", "file_path": "src/krull65.rs", "rank": 5, "score": 55112.842755572055 }, { "content": "// Define an ad hoc trait to make our functions generic.\n\npub trait Int:\n\n Copy\n\n + Eq\n\n + PartialEq\n\n + Ord\n\n + PartialOrd\n\n + Add<Output = Self>\n\n + Sub<Output = Self>\n\n + Mul<Output = Self>\n\n + Not<Output = Self>\n\n + BitAnd<Output = Self>\n\n + BitOr<Output = Self>\n\n + BitXor<Output = Self>\n\n + Shl<usize, Output = Self>\n\n + Shr<usize, Output = Self>\n\n{\n\n fn zero() -> Self;\n\n fn one() -> Self;\n\n fn wrapping_add(self, other: Self) -> Self;\n\n fn wrapping_sub(self, other: Self) -> Self;\n", "file_path": "src/lcg.rs", "rank": 6, "score": 46126.90655969679 }, { "content": "pub fn criterion_benchmark(c: &mut Criterion) {\n\n let mut krull1 = Krull64::new();\n\n c.bench_function(\"Krull64::step\", move |b| b.iter(|| krull1.step()));\n\n let mut krull2 = Krull64::new();\n\n c.bench_function(\"Krull64::step_slow\", move |b| b.iter(|| krull2.step_slow()));\n\n let mut krull3 = Krull65::new();\n\n c.bench_function(\"Krull65::step\", move |b| b.iter(|| krull3.step()));\n\n}\n\n\n\ncriterion_group!(benches, criterion_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "benches/benchmark.rs", "rank": 7, "score": 42855.13676935482 }, { "content": "#[wrappit]\n\npub fn get_jump<T: Int>(m: T, p: T, n: T) -> (T, T) {\n\n // Algorithm from Brown, F. B., \"Random Number Generation with Arbitrary Stride\",\n\n // Transactions of the American Nuclear Society, 1994.\n\n let mut unit_m = m;\n\n let mut unit_p = p;\n\n let mut jump_m = T::one();\n\n let mut jump_p = T::zero();\n\n let mut delta = n;\n\n\n\n while delta > T::zero() {\n\n if delta & T::one() == T::one() {\n\n jump_m = jump_m * unit_m;\n\n jump_p = jump_p * unit_m + unit_p;\n\n }\n\n unit_p = (unit_m + T::one()) * unit_p;\n\n unit_m = unit_m * unit_m;\n\n delta = delta >> 1;\n\n }\n\n (jump_m, jump_p)\n\n}\n\n\n\n/// LCG iteration is state <- state * m + p.\n\n/// Returns the number of iterations between origin state and the given state.\n\n/// Assumes (m, p) is full period.\n", "file_path": "src/lcg.rs", "rank": 8, "score": 40376.20952125855 }, { "content": "#[wrappit]\n\npub fn get_state<T: Int>(m: T, p: T, origin: T, iterations: T) -> T {\n\n let mut jump_m = m;\n\n let mut jump_p = p;\n\n let mut state = origin;\n\n let mut ordinal = iterations;\n\n\n\n while ordinal > T::zero() {\n\n if ordinal & T::one() == T::one() {\n\n state = state * jump_m + jump_p;\n\n }\n\n jump_p = (jump_m + T::one()) * jump_p;\n\n jump_m *= jump_m;\n\n ordinal = ordinal >> 1;\n\n }\n\n state\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::super::*;\n", "file_path": "src/lcg.rs", "rank": 9, "score": 37159.277629161494 }, { "content": "#[wrappit]\n\npub fn get_iterations<T: Int>(m: T, p: T, origin: T, state: T) -> T {\n\n let mut jump_m = m;\n\n let mut jump_p = p;\n\n let mut ordinal = T::zero();\n\n let mut bit = T::one();\n\n let mut address = origin;\n\n\n\n while address != state {\n\n if (bit & address) != (bit & state) {\n\n address = address * jump_m + jump_p;\n\n ordinal = ordinal + bit;\n\n }\n\n jump_p = (jump_m + T::one()) * jump_p;\n\n jump_m *= jump_m;\n\n bit = bit << 1;\n\n }\n\n ordinal\n\n}\n\n\n\n/// LCG iteration is state <- state * m + p.\n\n/// Returns state after the specified number of iterations from the origin state.\n\n/// Assumes (m, p) is full period.\n", "file_path": "src/lcg.rs", "rank": 10, "score": 37159.277629161494 }, { "content": " fn wrapping_mul(self, other: Self) -> Self;\n\n}\n\n\n\nmacro_rules! impl_int {\n\n ( $($t:ty),* ) => {\n\n $( impl Int for $t {\n\n #[inline] fn zero() -> Self { 0 }\n\n #[inline] fn one() -> Self { 1 }\n\n #[inline] fn wrapping_add(self, other: Self) -> Self { <$t>::wrapping_add(self, other) }\n\n #[inline] fn wrapping_sub(self, other: Self) -> Self { <$t>::wrapping_sub(self, other) }\n\n #[inline] fn wrapping_mul(self, other: Self) -> Self { <$t>::wrapping_mul(self, other) }\n\n }) *\n\n }\n\n}\n\nimpl_int! { u8, u16, u32, u64, u128 }\n\n\n\n/// LCG iteration is state <- state * m + p.\n\n/// Returns the (m, p) pair that iterates by n steps at once.\n\n/// Assumes (m, p) is full period.\n", "file_path": "src/lcg.rs", "rank": 11, "score": 19920.221060159955 }, { "content": " use super::*;\n\n\n\n #[test]\n\n pub fn run_tests() {\n\n let mut r: u128 = 0;\n\n let mut rnd = || -> u128 {\n\n r = r.wrapping_mul(LCG_M128_1).wrapping_add(0xffff);\n\n r\n\n };\n\n\n\n for _ in 0..1 << 12 {\n\n let m = match rnd() % 3 {\n\n 0 => LCG_M128_1,\n\n 1 => LCG_M128_2,\n\n _ => LCG_M128_3,\n\n };\n\n let p = rnd() | 1;\n\n let origin = rnd();\n\n\n\n assert_eq!(\n", "file_path": "src/lcg.rs", "rank": 12, "score": 19916.93159335652 }, { "content": "use core::ops::{Add, BitAnd, BitOr, BitXor, Mul, Not, Shl, Shr, Sub};\n\nuse wrapping_arithmetic::wrappit;\n\n\n\n// This module contains utility functions for working with\n\n// LCGs (linear congruential generators).\n\n\n\n// Define an ad hoc trait to make our functions generic.\n", "file_path": "src/lcg.rs", "rank": 13, "score": 19910.733819268487 }, { "content": " origin.wrapping_mul(m).wrapping_add(p),\n\n get_state(m, p, origin, 1)\n\n );\n\n assert_eq!(\n\n 1,\n\n get_iterations(m, p, origin, origin.wrapping_mul(m).wrapping_add(p))\n\n );\n\n\n\n // Run some consistency tests.\n\n let state = rnd();\n\n let n = get_iterations(m, p, origin, state);\n\n assert_eq!(state, get_state(m, p, origin, n));\n\n\n\n let (m_total, p_total) = get_jump(m, p, n);\n\n assert_eq!(origin.wrapping_mul(m_total).wrapping_add(p_total), state);\n\n\n\n let n = rnd();\n\n let state = get_state(m, p, origin, n);\n\n assert_eq!(n, get_iterations(m, p, origin, state));\n\n\n\n // Get h <= n.\n\n let h = n & rnd();\n\n let state_h = get_state(m, p, origin, h);\n\n assert_eq!(n - h, get_iterations(m, p, state_h, state));\n\n }\n\n }\n\n}\n", "file_path": "src/lcg.rs", "rank": 14, "score": 19909.185486546587 }, { "content": "#[cfg(feature = \"serde\")]\n\nuse serde::{Deserialize, Serialize};\n\nuse wrapping_arithmetic::wrappit;\n\n\n\n// Krull65 features\n\n// -\"trivially strong\" design by Sami Perttu\n\n// -64-bit output, 256-bit state, 320-bit footprint\n\n// -full 256-bit state space with no bad states and no bad seeds\n\n// -2**128 pairwise independent streams of length 2**128\n\n// -streams are equidistributed with each 64-bit number appearing 2**64 times\n\n// -random access inside streams\n\n// -generation takes approximately 4.6 ns (where PCG-128 is 2.4 ns and Krull64 is 3.0 ns)\n\n\n\n/// Krull65 non-cryptographic RNG. 64-bit output, 320-bit footprint.\n\n#[cfg_attr(feature = \"serde\", derive(Serialize, Deserialize))]\n\n#[derive(Clone, Eq, PartialEq, Debug)]\n\npub struct Krull65 {\n\n /// LCG A state, low 64 bits.\n\n a0: u64,\n\n /// LCG A state, high 64 bits.\n", "file_path": "src/krull65.rs", "rank": 32, "score": 17.627240645725948 }, { "content": " /// Sets stream and initializes position to 0.\n\n pub fn set_stream(&mut self, stream: u128) {\n\n // This transformation enhances diversity of nearby streams.\n\n self.c1 = (stream ^ (stream >> 64)) as u64;\n\n self.reset();\n\n self.set_b_128(crate::lcg::get_state(\n\n self.multiplier_b_128(),\n\n self.increment_b_128(),\n\n origin_b_128(),\n\n (stream as u64) as u128,\n\n ));\n\n }\n\n}\n\n\n\nuse super::{Error, RngCore, SeedableRng};\n\n\n\nimpl RngCore for Krull65 {\n\n fn next_u32(&mut self) -> u32 {\n\n self.step() as u32\n\n }\n", "file_path": "src/krull65.rs", "rank": 33, "score": 15.15711399984825 }, { "content": " }\n\n }\n\n\n\n /// Creates a new Krull65 RNG from a 32-bit seed.\n\n /// Stream is set to the given seed and position is set to 0.\n\n /// All seeds work equally well.\n\n pub fn from_32(seed: u32) -> Self {\n\n let mut krull = Self::new();\n\n krull.set_stream(seed as u128);\n\n krull\n\n }\n\n\n\n /// Creates a new Krull65 RNG from a 64-bit seed.\n\n /// Stream is set to the given seed and position is set to 0.\n\n /// All seeds work equally well.\n\n pub fn from_64(seed: u64) -> Self {\n\n let mut krull = Self::new();\n\n krull.set_stream(seed as u128);\n\n krull\n\n }\n", "file_path": "src/krull65.rs", "rank": 34, "score": 13.900211717663398 }, { "content": "#![no_std]\n\n\n\npub mod krull64;\n\npub mod krull65;\n\npub mod lcg;\n\n\n\npub use krull64::*;\n\npub use krull65::*;\n\npub use rand_core::*;\n\n\n\n// LCG multipliers from Steele, G. and Vigna, S.,\n\n// Computationally Easy, Spectrally Good Multipliers for\n\n// Congruential Pseudorandom Number Generators (2020).\n\n\n\n// 128-bit LCG multipliers.\n\npub const LCG_M128_1: u128 = 0xde92a69f6e2f9f25fd0d90f576075fbd;\n\npub const LCG_M128_2: u128 = 0x576bc0a2178fcf7c619f3ebc7363f7f5;\n\npub const LCG_M128_3: u128 = 0x87ea3de194dd2e97074f3d0c2ea63d35;\n\npub const LCG_M128_4: u128 = 0xf48c0745581cf801619cd45257f0ab65;\n\n\n", "file_path": "src/lib.rs", "rank": 35, "score": 12.731887728568182 }, { "content": "\n\n /// Creates a new Krull65 RNG from a 128-bit seed.\n\n /// Stream is set to the given seed and position is set to 0.\n\n /// All seeds work equally well.\n\n pub fn from_128(seed: u128) -> Self {\n\n let mut krull = Self::new();\n\n krull.set_stream(seed);\n\n krull\n\n }\n\n\n\n /// Creates a new Krull65 RNG from a 192-bit seed.\n\n /// All seeds work equally well.\n\n /// Each seed accesses a unique sequence of length 2**64.\n\n /// Sets stream to (seed0 XOR seed1) to decorrelate nearby seeds in both arguments.\n\n /// High bits of position are taken from seed1.\n\n pub fn from_192(seed0: u128, seed1: u64) -> Self {\n\n let mut krull = Self::new();\n\n krull.set_stream(seed0 ^ (seed1 as u128));\n\n krull.set_position((seed1 as u128) << 64);\n\n krull\n", "file_path": "src/krull65.rs", "rank": 36, "score": 11.886808833809587 }, { "content": " self.a1 = 0;\n\n self.b0 = origin_b0();\n\n self.b1 = 0;\n\n }\n\n\n\n /// Returns current stream. The full state of the generator is (stream, position).\n\n #[inline]\n\n pub fn stream(&self) -> u128 {\n\n let a_n = self.position();\n\n let b_n = super::lcg::get_iterations(\n\n self.multiplier_b_128(),\n\n self.increment_b_128(),\n\n origin_b_128(),\n\n self.b_128(),\n\n );\n\n // Low bits of stream are encoded as the phase difference (B - A).\n\n let delta = b_n.wrapping_sub(a_n) as u64;\n\n (((delta ^ self.c1) as u128) << 64) | (delta as u128)\n\n }\n\n\n", "file_path": "src/krull65.rs", "rank": 37, "score": 10.996436956616062 }, { "content": "// 65-bit LCG multipliers for 128-bit LCGs.\n\npub const LCG_M65_1: u128 = 0x1df77a66a374e300d;\n\npub const LCG_M65_2: u128 = 0x1d605bbb58c8abbfd;\n\npub const LCG_M65_3: u128 = 0x1d7d8dd3a6a72b43d;\n\npub const LCG_M65_4: u128 = 0x1f20529e418340d05;\n\n\n\n// 64-bit LCG multipliers.\n\npub const LCG_M64_1: u64 = 0xd1342543de82ef95;\n\npub const LCG_M64_2: u64 = 0xaf251af3b0f025b5;\n\npub const LCG_M64_3: u64 = 0xb564ef22ec7aece5;\n\npub const LCG_M64_4: u64 = 0xf7c2ebc08f67f2b5;\n", "file_path": "src/lib.rs", "rank": 38, "score": 10.988767130532342 }, { "content": " 0x802396acf22d0d0c,\n\n 0x2f1d038e51a0314d,\n\n 0x782203919d148d96,\n\n 0x9935a93e6a838e24,\n\n ];\n\n let mut krull65 = Krull65::from_64(0);\n\n for x in krull65_expected {\n\n assert_eq!(x, krull65.next_u64());\n\n }\n\n\n\n let mut r: u128 = 0;\n\n let mut rnd = || -> u128 {\n\n r = r.wrapping_mul(LCG_M128_1).wrapping_add(0xffff);\n\n r\n\n };\n\n\n\n for _ in 0..1 << 12 {\n\n let seed = rnd();\n\n let mut krull1 = Krull65::new();\n\n assert_eq!(0, krull1.stream());\n", "file_path": "src/krull65.rs", "rank": 39, "score": 10.950005645224037 }, { "content": " }\n\n}\n\n\n\nuse core::convert::TryInto;\n\n\n\nimpl SeedableRng for Krull65 {\n\n type Seed = [u8; 24];\n\n\n\n /// Creates a new Krull65 RNG from a seed.\n\n /// Each seed accesses a unique sequence of length 2**64.\n\n /// All seeds work equally well.\n\n fn from_seed(seed: Self::Seed) -> Self {\n\n // Always use Little-Endian.\n\n Krull65::from_192(\n\n u128::from_le_bytes(seed[0..16].try_into().unwrap()),\n\n u64::from_le_bytes(seed[16..24].try_into().unwrap()),\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/krull65.rs", "rank": 40, "score": 10.446389711947266 }, { "content": " #[inline]\n\n fn set_a_128(&mut self, a: u128) {\n\n self.a0 = a as u64;\n\n self.a1 = (a >> 64) as u64;\n\n }\n\n\n\n #[inline]\n\n fn b_128(&self) -> u128 {\n\n self.b0 as u128 | ((self.b1 as u128) << 64)\n\n }\n\n\n\n #[inline]\n\n fn set_b_128(&mut self, b: u128) {\n\n self.b0 = b as u64;\n\n self.b1 = (b >> 64) as u64;\n\n }\n\n\n\n /// Generates the next 64-bit random number.\n\n #[wrappit]\n\n #[inline]\n", "file_path": "src/krull65.rs", "rank": 41, "score": 10.313730080066366 }, { "content": " pub fn position(&self) -> u128 {\n\n // Position is encoded in A.\n\n super::lcg::get_iterations(\n\n self.multiplier_a_128(),\n\n self.increment_a_128(),\n\n origin_a_128(),\n\n self.a_128(),\n\n )\n\n }\n\n\n\n /// Sets position in stream.\n\n pub fn set_position(&mut self, position: u128) {\n\n let delta = position.wrapping_sub(self.position());\n\n self.jump(delta as i128);\n\n }\n\n\n\n /// Resets stream position to 0. Equivalent to set_position(0).\n\n #[inline]\n\n pub fn reset(&mut self) {\n\n self.a0 = origin_a0();\n", "file_path": "src/krull65.rs", "rank": 42, "score": 9.63453680530254 }, { "content": " a1: u64,\n\n /// LCG B state, low 64 bits.\n\n b0: u64,\n\n /// LCG B state, high 64 bits.\n\n b1: u64,\n\n /// Stream number, high 64 bits.\n\n c1: u64,\n\n}\n\n\n\n#[inline]\n", "file_path": "src/krull65.rs", "rank": 43, "score": 8.868529423123828 }, { "content": " #[inline]\n\n fn multiplier_b_128(&self) -> u128 {\n\n super::LCG_M65_4\n\n }\n\n\n\n #[inline]\n\n fn increment_a_128(&self) -> u128 {\n\n ((self.c1 as u128) << 1) ^ super::LCG_M128_1\n\n }\n\n\n\n #[inline]\n\n fn increment_b_128(&self) -> u128 {\n\n ((self.c1 as u128) << 1) ^ 1\n\n }\n\n\n\n #[inline]\n\n fn a_128(&self) -> u128 {\n\n self.a0 as u128 | ((self.a1 as u128) << 64)\n\n }\n\n\n", "file_path": "src/krull65.rs", "rank": 44, "score": 8.646402884929547 }, { "content": " pub fn step(&mut self) -> u64 {\n\n // We can get a widening 64-to-128-bit multiply by casting the arguments from 64 bits.\n\n // 65-bit multiplies are ~0.5 ns faster here than 128-bit.\n\n // We also add the increment in 128-bit to get the carry for free.\n\n let a = (self.a0 as u128) * self.multiplier_a() as u128 + self.increment_a_128();\n\n self.a1 = ((a >> 64) as u64) + self.a1 * self.multiplier_a() + self.a0;\n\n self.a0 = a as u64;\n\n let b = (self.b0 as u128) * self.multiplier_b() as u128 + self.increment_b_128();\n\n self.b1 = ((b >> 64) as u64) + self.b1 * self.multiplier_b() + self.b0;\n\n self.b0 = b as u64;\n\n self.get()\n\n }\n\n\n\n /// Generates the next 128-bit random number.\n\n #[inline]\n\n pub fn step_128(&mut self) -> u128 {\n\n self.step() as u128 | ((self.step() as u128) << 64)\n\n }\n\n\n\n /// Returns the current 64-bit output.\n", "file_path": "src/krull65.rs", "rank": 45, "score": 8.033510448260307 }, { "content": " }\n\n\n\n /// Jumps forward (if steps > 0) or backward (if steps < 0) or does nothing (if steps = 0).\n\n /// The stream wraps around, so signed steps can be interpreted as unsigned.\n\n pub fn jump(&mut self, steps: i128) {\n\n self.set_a_128(crate::lcg::get_state(\n\n self.multiplier_a_128(),\n\n self.increment_a_128(),\n\n self.a_128(),\n\n steps as u128,\n\n ));\n\n self.set_b_128(crate::lcg::get_state(\n\n self.multiplier_b_128(),\n\n self.increment_b_128(),\n\n self.b_128(),\n\n steps as u128,\n\n ));\n\n }\n\n\n\n /// Returns current position in stream. The full state of the generator is (stream, position).\n", "file_path": "src/krull65.rs", "rank": 46, "score": 7.753477108131772 }, { "content": " // At that level of correlation, we need a second round of hashing\n\n // to purify streams pairwise. The output hash is intended to also\n\n // pass tests as an indexed RNG.\n\n //\n\n let x = (x ^ (x >> 30)) * 0xbf58476d1ce4e5b9; // round 1\n\n let x = (x ^ (x >> 27)) * 0x94d049bb133111eb; // round 2\n\n let x = (x ^ (x >> 31)) * 0xd6e8feb86659fd93; // round 3\n\n x ^ (x >> 32)\n\n }\n\n\n\n /// Creates a new Krull65 RNG.\n\n /// Stream and position are set to 0.\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n Krull65 {\n\n a0: origin_a0(),\n\n a1: 0,\n\n b0: origin_b0(),\n\n b1: 0,\n\n c1: 0,\n", "file_path": "src/krull65.rs", "rank": 47, "score": 7.433878557893538 }, { "content": " #[wrappit]\n\n #[inline]\n\n pub fn get(&self) -> u64 {\n\n // Krull65 algorithm consists of two 128-bit LCGs advancing in synchrony.\n\n // The LCGs A and B realize two cycles of length 2**128,\n\n // with constants determined from high 64 bits of C, the stream.\n\n // Low 64 bits of C are chosen by positioning B against A.\n\n //\n\n // As our starting point, we take the XOR of some high quality bits from A and B.\n\n // Choose high 64 bits from B and A.\n\n // As we're mixing different bits of the LCGs together,\n\n // and the rest of the pipeline is bijective, this guarantees\n\n // equidistribution with each 64-bit output appearing 2**64 times in each stream.\n\n //\n\n let x = self.b1 ^ (self.a1 << 32) ^ (self.a1 >> 32);\n\n\n\n // The signal is already quite high quality here, as the minimum periodicity\n\n // left in the bits is 2**96 samples.\n\n //\n\n // We can examine our chosen worst case of the user XORing two streams X and Y\n", "file_path": "src/krull65.rs", "rank": 48, "score": 6.988174759300894 }, { "content": " assert_eq!(0, krull1.position());\n\n krull1.set_stream(seed);\n\n assert_eq!(seed, krull1.stream());\n\n assert_eq!(0, krull1.position());\n\n let mut krull2 = Krull65::from_128(seed);\n\n assert_eq!(seed, krull2.stream());\n\n assert_eq!(0, krull2.position());\n\n\n\n let pos2 = rnd();\n\n let pos1 = pos2 & rnd();\n\n krull1.set_position(pos1);\n\n krull2.set_position(pos2);\n\n assert_eq!(pos1, krull1.position());\n\n assert_eq!(pos2, krull2.position());\n\n krull1.jump((pos2 - pos1) as i128);\n\n assert_eq!(pos2, krull1.position());\n\n assert_eq!(krull1.next_u64(), krull2.next_u64());\n\n assert_eq!(krull1.step_128(), krull2.step_128());\n\n krull1.jump(-3);\n\n assert_eq!(pos2, krull1.position());\n", "file_path": "src/krull65.rs", "rank": 49, "score": 6.482378063510509 }, { "content": "# [Krull64/65 Random Number Generators](https://github.com/SamiPerttu/rand_krull)\n\n\n\n## Sample with Confidence\n\n\n\n- High quality, non-cryptographic, medium-fast [RNGs](https://en.wikipedia.org/wiki/Random_number_generation).\n\n- \"Trivially strong\" algorithms combining [LCGs](https://en.wikipedia.org/wiki/Linear_congruential_generator) with a strong output hash.\n\n- 64-bit output, 192-bit (Krull64) or 256-bit (Krull65) state.\n\n- 2\\*\\*64 (Krull64) or 2\\*\\*128 (Krull65) pairwise independent streams of period 2\\*\\*128.\n\n- Streams are equidistributed with each 64-bit number appearing 2\\*\\*64 times.\n\n- Full state space with no bad states and no bad seeds.\n\n- Random access inside streams.\n\n- No unsafe code and no `std` required.\n\n- LCGs are run economically with [65-bit multipliers](https://arxiv.org/abs/2001.05304) using 64-to-128-bit widening multiplies.\n\n\n\nKrull64/65 are intended as non-cryptographic workhorse RNGs\n\nsuitable for simulations and procedural content generation\n\nthat are solid, easy to use, and have a full feature set.\n\n\n\n## Crate\n\n\n\nThis crate depends on [rand_core](https://crates.io/crates/rand_core), which is\n\na part of the [Rand project](https://github.com/rust-random/rand).\n\n\n\n[Serde](https://serde.rs/) support is opt-in, so enable the `serde` feature if you need it.\n\n\n\n## License\n\n\n\nMIT\n", "file_path": "README.md", "rank": 50, "score": 6.328877452891229 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::super::*;\n\n use super::*;\n\n\n\n #[test]\n\n pub fn run_tests() {\n\n let krull65_expected: [u64; 16] = [\n\n 0xb452db3477d0f28c,\n\n 0x4b18143b734a1323,\n\n 0xd3bcfb9bc0490552,\n\n 0xeff12231bb834360,\n\n 0xe42f4a761df9af10,\n\n 0x406dfd26792bd055,\n\n 0xc9a5a47d267b062f,\n\n 0x820b92893554fee5,\n\n 0x5486526d00390100,\n\n 0x01e7fe5dadb6c4b6,\n\n 0x92321eb90e80990b,\n\n 0xdd0718b66afad71e,\n", "file_path": "src/krull65.rs", "rank": 51, "score": 5.817627715698327 }, { "content": " krull2.jump(-3);\n\n assert_eq!(pos2, krull2.position());\n\n krull1.jump(-((pos2 - pos1) as i128));\n\n assert_eq!(pos1, krull1.position());\n\n\n\n let n = 1 + (rnd() & 0x3ff);\n\n for _ in 0..n {\n\n krull1.next_u64();\n\n }\n\n assert_eq!(pos1 + n, krull1.position());\n\n\n\n assert_eq!(seed, krull1.stream());\n\n\n\n let bytes = 1 + (rnd() & 0x7f);\n\n let mut buffer1 = [0u8; 0x80];\n\n let mut buffer2 = [0u8; 0x80];\n\n krull1.reset();\n\n assert_eq!(0, krull1.position());\n\n krull1.fill_bytes(&mut buffer1[0..bytes as usize]);\n\n krull2.reset();\n", "file_path": "src/krull65.rs", "rank": 52, "score": 5.211041293794248 }, { "content": "\n\n fn next_u64(&mut self) -> u64 {\n\n self.step()\n\n }\n\n\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n let bytes = dest.len();\n\n let mut i = 0;\n\n while i < bytes {\n\n let x = self.step();\n\n let j = bytes.min(i + 8);\n\n // Always use Little-Endian.\n\n dest[i..j].copy_from_slice(&x.to_le_bytes()[0..(j - i)]);\n\n i = j;\n\n }\n\n }\n\n\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n self.fill_bytes(dest);\n\n Ok(())\n", "file_path": "src/krull65.rs", "rank": 53, "score": 3.528290435003015 }, { "content": "use criterion::{criterion_group, criterion_main, Criterion};\n\n\n\nuse rand_krull::*;\n\n\n", "file_path": "benches/benchmark.rs", "rank": 54, "score": 2.9530828647055145 }, { "content": " // at the worst possible location with C being identical.\n\n // At this point in the pipeline, pairwise correlations between X and Y\n\n // can be measured easily, as they are just autocorrelations of B: A is identical.\n\n // So the sequence X XOR Y is the XOR of B with a lagged copy of itself.\n\n //\n\n // Fortunately, only in a vanishing fraction of cases does the output hash\n\n // have to do significant work to remove the pairwise correlations.\n\n // The level of correlation is indicated by the lowest differing bit in C.\n\n // In the next table we can see how hashing improves the result\n\n // with some statistical failures of X XOR Y investigated with PractRand.\n\n //\n\n // Identical bits 31 63 95 127\n\n // ---------------------------------------------\n\n // No hashing 256MB 1MB 1MB 1MB\n\n // 1 round >1TB 32GB 1MB 1MB\n\n // 2 rounds ? ? ~64TB 1MB\n\n // 3 rounds ? ? ? >1TB\n\n //\n\n // We have cordoned off 64 bits of the theoretical 128-bit phase difference\n\n // to avoid extreme correlations, leaving our worst case at 63 identical bits.\n", "file_path": "src/krull65.rs", "rank": 55, "score": 2.1310101569243702 }, { "content": " for i in 0..0x10 {\n\n let x = krull2.next_u64();\n\n buffer2[(i << 3)..((i + 1) << 3)].copy_from_slice(&x.to_le_bytes());\n\n }\n\n assert!(buffer1[0..bytes as usize]\n\n .iter()\n\n .zip(buffer2[0..bytes as usize].iter())\n\n .all(|(x, y)| x == y));\n\n }\n\n }\n\n}\n", "file_path": "src/krull65.rs", "rank": 56, "score": 1.7881853316571568 } ]
Rust
compiler/src/parser/mod.rs
dvberkel/bergen
81aeb2347655590ba01f1fe813a926a3ec8d26de
use super::brnfck::Command; const NEWLINE: u8 = 10u8; pub fn parse(source: &[u8]) -> Result<Vec<Command>, ParseError> { rows(source).and_then(|(top, middle, bottom)| { if top.len() != middle.len() || middle.len() != bottom.len() { return Err(ParseError::DifferentNumberOfRows); } let mut program = vec![]; let mut column = 0; while column < top.len() { if let Some((command, next_column)) = peek(column, top, middle, bottom) { column = next_column; program.push(command); } else { return Err(ParseError::UnknownMountainRange(column)); } } Ok(program) }) } fn peek(column: usize, top: &[u8], middle: &[u8], bottom: &[u8]) -> Option<(Command, usize)> { if (column + 6) <= top.len() { if &top[column..column + 6] == " /\\ ".as_bytes() && &middle[column..column + 6] == " / \\ ".as_bytes() && &bottom[column..column + 6] == "/ \\".as_bytes() { return Some((Command::IncrementPointer, column + 6)); } } if (column + 8) <= top.len() { if &top[column..column + 8] == " /\\/\\ ".as_bytes() && &middle[column..column + 8] == " / \\ ".as_bytes() && &bottom[column..column + 8] == "/ \\".as_bytes() { return Some((Command::DecrementPointer, column + 8)); } } if (column + 4) <= top.len() { if &top[column..column + 4] == " ".as_bytes() && &middle[column..column + 4] == " /\\ ".as_bytes() && &bottom[column..column + 4] == "/ \\".as_bytes() { return Some((Command::Increment, column + 4)); } } if (column + 6) <= top.len() { if &top[column..column + 6] == " ".as_bytes() && &middle[column..column + 6] == " /\\/\\ ".as_bytes() && &bottom[column..column + 6] == "/ \\".as_bytes() { return Some((Command::Decrement, column + 6)); } } if (column + 8) <= top.len() { if &top[column..column + 8] == " /\\ ".as_bytes() && &middle[column..column + 8] == " / \\/\\ ".as_bytes() && &bottom[column..column + 8] == "/ \\".as_bytes() { return Some((Command::JumpAhead, column + 8)); } } if (column + 8) <= top.len() { if &top[column..column + 8] == " /\\ ".as_bytes() && &middle[column..column + 8] == " /\\/ \\ ".as_bytes() && &bottom[column..column + 8] == "/ \\".as_bytes() { return Some((Command::JumpBack, column + 8)); } } if (column + 2) <= top.len() { if &top[column..column + 2] == " ".as_bytes() && &middle[column..column + 2] == " ".as_bytes() && &bottom[column..column + 2] == "/\\".as_bytes() { return Some((Command::Write, column + 2)); } } if (column + 10) <= top.len() { if &top[column..column + 10] == " /\\ /\\ ".as_bytes() && &middle[column..column + 10] == " / \\/ \\ ".as_bytes() && &bottom[column..column + 10] == "/ \\".as_bytes() { return Some((Command::Read, column + 10)); } } None } fn rows(source: &[u8]) -> Result<(&[u8], &[u8], &[u8]), ParseError> { let mut index = 0; while index < source.len() && source[index] != NEWLINE { index += 1; } let first_index = index; index += 1; while index < source.len() && source[index] != NEWLINE { index += 1; } let second_index = index; index += 1; while index < source.len() && source[index] != NEWLINE { index += 1; } let third_index = index; if index < source.len() { Ok(( &source[0..first_index], &source[first_index + 1..second_index], &source[second_index + 1..third_index], )) } else { Err(ParseError::NotEnoughRows) } } #[derive(Debug, PartialEq)] pub enum ParseError { Unknown, NotEnoughRows, DifferentNumberOfRows, UnknownMountainRange(usize), } #[cfg(test)] mod tests { use super::super::brnfck::Command; use super::*; #[test] fn should_parse_empty_source() { let source: &[u8] = "\n\n\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 0); } else { assert!(false); } } #[test] fn should_parse_increment_pointer() { let source: &[u8] = " /\\ \n / \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::IncrementPointer]) } else { assert!(false); } } #[test] fn should_parse_decrement_pointer() { let source: &[u8] = " /\\/\\ \n / \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::DecrementPointer]) } else { assert!(false); } } #[test] fn should_parse_increment() { let source: &[u8] = " \n /\\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Increment]) } else { assert!(false); } } #[test] fn should_parse_decrement() { let source: &[u8] = " \n /\\/\\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Decrement]) } else { assert!(false); } } #[test] fn should_parse_jump_ahead() { let source: &[u8] = " /\\ \n / \\/\\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::JumpAhead]) } else { assert!(false); } } #[test] fn should_parse_jump_back() { let source: &[u8] = " /\\ \n /\\/ \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::JumpBack]) } else { assert!(false); } } #[test] fn should_parse_write() { let source: &[u8] = " \n \n/\\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Write]) } else { assert!(false); } } #[test] fn should_parse_read() { let source: &[u8] = " /\\ /\\ \n / \\/ \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Read]) } else { assert!(false); } } #[test] fn should_parse_program() { let source: &[u8] = " \n /\\ /\\/\\ \n/ \\/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 2); assert_eq!(instructions, vec![Command::Increment, Command::Decrement]) } else { assert!(false); } } #[test] fn should_throw_when_number_of_columns_do_not_agree() { let source: &[u8] = " \n\n\n".as_bytes(); if let Err(problem) = parse(source) { assert_eq!(problem, ParseError::DifferentNumberOfRows); } else { assert!(false); } } #[test] fn should_throw_when_there_are_to_few_rows() { let source: &[u8] = " \n\n".as_bytes(); if let Err(problem) = parse(source) { assert_eq!(problem, ParseError::NotEnoughRows); } else { assert!(false); } } }
use super::brnfck::Command; const NEWLINE: u8 = 10u8; pub fn parse(source: &[u8]) -> Result<Vec<Command>, ParseError> { rows(source).and_then(|(top, middle, bottom)| { if top.len() != middle.len() || middle.len() != bottom.len() { return Err(ParseError::DifferentNumberOfRows); } let mut program = vec![]; let mut column = 0; while column < top.len() { if let Some((command, next_column)) = peek(column, top, middle, bottom) { column = next_column; program.push(command); } else { return Err(ParseError::UnknownMountainRange(column)); } } Ok(program) }) } fn peek(column: usize, top: &[u8], middle: &[u8], bottom: &[u8]) -> Option<(Command, usize)> { if (column + 6) <= top.len() { if &top[column..column + 6] == " /\\ ".as_bytes() && &middle[column..column + 6] == " / \\ ".as_bytes() && &bottom[column..column + 6] == "/ \\".as_bytes() { return Some((Command::IncrementPointer, column + 6)); } } if (column + 8) <= top.len() { if &top[column..column + 8] == " /\\/\\ ".as_bytes() && &middle[column..column + 8] == " / \\ ".as_bytes() && &bottom[column..column + 8] == "/ \\".as_bytes() { return Some((Command::DecrementPointer, column + 8)); } } if (column + 4) <= top.len() { if &top[column..column + 4] == " ".as_bytes() && &middle[column..column + 4] == " /\\ ".as_bytes() && &bottom[column..column + 4] == "/ \\".as_bytes() { return Some((Command::Increment, column + 4)); } } if (column + 6) <= top.len() { if &top[column..column + 6] == " ".as_bytes() && &middle[column..column + 6] == " /\\/\\ ".as_bytes() && &bottom[column..column + 6] == "/ \\".as_bytes() { return Some((Command::Decrement, column + 6)); } } if (column + 8) <= top.len() { if &top[column..column + 8] == " /\\ ".as_bytes() && &middle[column..column + 8] == " / \\/\\ ".as_bytes() && &bottom[column..column + 8] == "/ \\".as_bytes() { return Some((Command::JumpAhead, column + 8)); } } if (column + 8) <= top.len() { if &top[column..column + 8] == " /\\ ".as_bytes() && &middle[column..column + 8] == " /\\/ \\ ".as_bytes() && &bottom[column..column + 8] == "/ \\".as_bytes() { return Some((Command::JumpBack, column + 8)); } } if (column + 2) <= top.len() { if &top[column..column + 2] == " ".as_bytes() && &middle[column..column + 2] == " ".as_bytes() && &bottom[column..column + 2] == "/\\".as_bytes() { return Some((Command::Write, column + 2)); } } if (column + 10) <= top.len() { if &top[column..column + 10] == " /\\ /\\ ".as_bytes() && &middle[column..column + 10] == " / \\/ \\ ".as_bytes() && &bottom[column..column + 10] == "/ \\".as_bytes() { return Some((Command::Read, column + 10)); } } None } fn rows(source: &[u8]) -> Result<(&[u8], &[u8], &[u8]), ParseError> { let mut index = 0; while index < source.len() && source[index] != NEWLINE { index += 1; } let first_index = index; index += 1; while index < source.len() && source[index] != NEWLINE { index += 1; } let second_index = index; index += 1; while index < source.len() && source[index] != NEWLINE { index += 1; } let third_index = index; if index < source.len() { Ok(( &source[0..first_index], &source[first_index + 1..second_index], &source[second_index + 1..third_index], )) } else { Err(ParseError::NotEnoughRows) } } #[derive(Debug, PartialEq)] pub enum ParseError { Unknown, NotEnoughRows, DifferentNumberOfRows, UnknownMountainRange(usize), } #[cfg(test)] mod tests { use super::super::brnfck::Command; use super::*; #[test] fn should_parse_empty_source() { let source: &[u8] = "\n\n\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 0); } else { assert!(false); } } #[test] fn should_parse_increment_pointer() { let source: &[u8] = " /\\ \n / \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::IncrementPointer]) } else { assert!(false); } } #[test] fn should_parse_decrement_pointer() { let source: &[u8] = " /\\/\\ \n / \\ \n/ \\\n".as_bytes();
#[test] fn should_parse_increment() { let source: &[u8] = " \n /\\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Increment]) } else { assert!(false); } } #[test] fn should_parse_decrement() { let source: &[u8] = " \n /\\/\\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Decrement]) } else { assert!(false); } } #[test] fn should_parse_jump_ahead() { let source: &[u8] = " /\\ \n / \\/\\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::JumpAhead]) } else { assert!(false); } } #[test] fn should_parse_jump_back() { let source: &[u8] = " /\\ \n /\\/ \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::JumpBack]) } else { assert!(false); } } #[test] fn should_parse_write() { let source: &[u8] = " \n \n/\\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Write]) } else { assert!(false); } } #[test] fn should_parse_read() { let source: &[u8] = " /\\ /\\ \n / \\/ \\ \n/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::Read]) } else { assert!(false); } } #[test] fn should_parse_program() { let source: &[u8] = " \n /\\ /\\/\\ \n/ \\/ \\\n".as_bytes(); if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 2); assert_eq!(instructions, vec![Command::Increment, Command::Decrement]) } else { assert!(false); } } #[test] fn should_throw_when_number_of_columns_do_not_agree() { let source: &[u8] = " \n\n\n".as_bytes(); if let Err(problem) = parse(source) { assert_eq!(problem, ParseError::DifferentNumberOfRows); } else { assert!(false); } } #[test] fn should_throw_when_there_are_to_few_rows() { let source: &[u8] = " \n\n".as_bytes(); if let Err(problem) = parse(source) { assert_eq!(problem, ParseError::NotEnoughRows); } else { assert!(false); } } }
if let Ok(instructions) = parse(source) { assert_eq!(instructions.len(), 1); assert_eq!(instructions, vec![Command::DecrementPointer]) } else { assert!(false); } }
function_block-function_prefix_line
[ { "content": "pub fn program_from(characters: &[u8]) -> Vec<Command> {\n\n let mut program = Vec::new();\n\n let mut index = 0;\n\n let mut last_character = 0;\n\n while index < characters.len() {\n\n let mut difference = characters[index] as i16 - last_character as i16;\n\n let command = if difference > 0 { Command::Increment } else { Command::Decrement };\n\n difference = difference.abs();\n\n if difference >= 2 {\n\n let factors = factors_of(difference);\n\n change_to_character_by(factors.len(), &factors, command, &mut program);\n\n } else if difference == 1 {\n\n program.push(command);\n\n } else {\n\n /* do nothing; difference == 0 */\n\n }\n\n program.push(Command::Write);\n\n last_character = characters[index];\n\n index += 1;\n\n }\n\n\n\n program\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 1, "score": 140532.11590932688 }, { "content": "fn alter_to_left(n: usize, command: Command, program: &mut Vec<Command>) {\n\n if n == 0 {\n\n program.push(command);\n\n } else {\n\n program.push(Command::DecrementPointer);\n\n alter_to_left(n - 1, command, program);\n\n program.push(Command::IncrementPointer);\n\n }\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 3, "score": 129305.44710164111 }, { "content": "fn change_to_character_by(n: usize, factors: &[i16], command: Command, program : &mut Vec<Command>) {\n\n if factors.len() == 0 {\n\n alter_to_left(n, command, program);\n\n } else {\n\n program.push(Command::IncrementPointer);\n\n let mut factor = factors[0];\n\n while factor > 0 {\n\n program.push(Command::Increment);\n\n factor -= 1;\n\n }\n\n program.push(Command::JumpAhead);\n\n change_to_character_by(n, &factors[1..], command, program);\n\n program.push(Command::Decrement);\n\n program.push(Command::JumpBack);\n\n program.push(Command::DecrementPointer);\n\n }\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 4, "score": 118470.10031348023 }, { "content": "pub fn parse(source: &[u8]) -> Result<Vec<Command>, ParseError> {\n\n let mut program = Vec::new();\n\n let mut index = 0;\n\n while index < source.len() {\n\n match source[index] {\n\n 43 /* + */ => program.push(Command::Increment),\n\n 44 /* , */ => program.push(Command::Read),\n\n 45 /* - */ => program.push(Command::Decrement),\n\n 46 /* . */ => program.push(Command::Write),\n\n 60 /* < */ => program.push(Command::DecrementPointer),\n\n 62 /* > */ => program.push(Command::IncrementPointer),\n\n 91 /* [ */ => program.push(Command::JumpAhead),\n\n 93 /* ] */ => program.push(Command::JumpBack),\n\n 10 /* LF */ => { /* Accept, but ignore */},\n\n 13 /* CR */ => { /* Accept, but ignore */},\n\n 32 /* SPACE */ => { /* Accept, but ignore */ },\n\n _ => return Err(ParseError::UnknownCharacter(source[index])),\n\n }\n\n index += 1;\n\n }\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 5, "score": 117066.20141793179 }, { "content": "fn factors_of(mut n: i16) -> Vec<i16> {\n\n let mut factors = Vec::new();\n\n let mut divisor = 2;\n\n while divisor <= n {\n\n while n % divisor == 0 {\n\n factors.push(divisor);\n\n n = n / divisor;\n\n }\n\n divisor += 1;\n\n }\n\n factors\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn run_simple_program() {\n\n let instructions = [\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 7, "score": 108568.10042480388 }, { "content": "#[test]\n\nfn write_to_vec() {\n\n let bytes: [u8; 3] = [97, 98, 99];\n\n let mut buffer: [u8; 1];\n\n let mut output: Vec<u8> = vec![];\n\n {\n\n let output_ref = &mut output;\n\n\n\n for byte in bytes.iter() {\n\n buffer = [*byte];\n\n let result = output_ref.write(&buffer);\n\n assert!(result.is_ok());\n\n assert_eq!(result.unwrap(), 1);\n\n }\n\n }\n\n assert_eq!(output, vec!(97, 98, 99));\n\n}\n", "file_path": "compiler/tests/read_write.rs", "rank": 8, "score": 79107.03220967468 }, { "content": "pub fn to_brnfck<O: Write>(\n\n instructions: &[machine::Command],\n\n mut output: O,\n\n) -> Result<(), io::Error> {\n\n let mut program = String::new();\n\n for instruction in instructions {\n\n program.push(instruction.to_brnfck())\n\n }\n\n\n\n output.write_all(program.as_bytes())\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 9, "score": 77703.83843157474 }, { "content": "pub fn to_bergen<O: Write>(instructions: &[machine::Command], mut output: O) -> Result<(), io::Error> {\n\n\tlet (mut top, mut middle, mut bottom) = (String::new(), String::new(), String::new());\n\n\tfor instruction in instructions {\n\n\t\ttop = top + instruction.top();\n\n\t\tmiddle = middle + instruction.middle();\n\n\t\tbottom = bottom + instruction.bottom();\n\n\t}\n\n\n\n\toutput.write_all(top.as_bytes())?;\n\n\toutput.write_all(&NEWLINE)?;\n\n\toutput.write_all(middle.as_bytes())?;\n\n\toutput.write_all(&NEWLINE)?;\n\n\toutput.write_all(bottom.as_bytes())?;\n\n\toutput.write_all(&NEWLINE)\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 10, "score": 70231.32386652223 }, { "content": "pub fn io_run<I: Read, O: Write>(\n\n instructions: &[machine::Command],\n\n input: I,\n\n output: O,\n\n) -> Result<(), machine::MachineError> {\n\n let input_box = Box::new(input);\n\n let output_box = Box::new(output);\n\n let machine: machine::Machine<I, O> = machine::Machine::io(instructions, input_box, output_box);\n\n\n\n machine.run()\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 11, "score": 68999.2533888412 }, { "content": "pub fn run(instructions: &[machine::Command]) -> Result<(), machine::MachineError> {\n\n let machine: machine::Machine<&[u8], Vec<u8>> = machine::Machine::new(instructions);\n\n\n\n machine.run()\n\n}\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 12, "score": 60704.5874610487 }, { "content": "fn same_cells(lhs: &[u8; SIZE], rhs: &[u8; SIZE]) -> bool {\n\n let mut index = 0;\n\n while index < SIZE && lhs[index] == rhs[index] {\n\n index += 1;\n\n }\n\n index == SIZE\n\n}\n\n\n\nimpl<'a, I, O> Eq for Machine<'a, I, O>\n\nwhere\n\n I: Read,\n\n O: Write,\n\n{\n\n}\n\n\n\nimpl<'a, I, O> Debug for Machine<'a, I, O>\n\nwhere\n\n I: Read,\n\n O: Write,\n\n{\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 13, "score": 59847.75098745824 }, { "content": "#[test]\n\nfn read_from_buffer() {\n\n let mut input: &[u8] = \"Hello, World!\".as_bytes();\n\n let mut buffer: [u8; 1];\n\n\n\n for character in vec![\n\n \"H\", \"e\", \"l\", \"l\", \"o\", \",\", \" \", \"W\", \"o\", \"r\", \"l\", \"d\", \"!\",\n\n ] {\n\n buffer = [0];\n\n let result = input.read(&mut buffer);\n\n assert!(result.is_ok());\n\n assert_eq!(result.unwrap(), 1);\n\n assert_eq!(buffer, character.as_bytes());\n\n }\n\n\n\n buffer = [0];\n\n if let Ok(size) = input.read(&mut buffer) {\n\n assert_eq!(size, 0);\n\n } else {\n\n assert!(false);\n\n }\n\n}\n\n\n", "file_path": "compiler/tests/read_write.rs", "rank": 14, "score": 58393.532760287184 }, { "content": "use std::io::{Read, Write};\n\n\n\n#[test]\n", "file_path": "compiler/tests/read_write.rs", "rank": 15, "score": 23126.562944049303 }, { "content": "use std::io::{self, Read, Write};\n\nuse std::ops::Add;\n\n\n\nconst NEWLINE: [u8;1] = [10];\n\n\n\nmod machine;\n\npub mod parser;\n\n\n\npub use self::machine::{Command, MachineError};\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 18, "score": 21301.956518453535 }, { "content": " Command::Increment,\n\n Command::Increment,\n\n Command::JumpAhead,\n\n Command::Decrement,\n\n Command::JumpBack,\n\n ];\n\n\n\n assert_eq!(run(&instructions), Ok(()));\n\n }\n\n\n\n #[test]\n\n fn run_io_program() {\n\n let instructions = [Command::Read, Command::Write];\n\n let input: &[u8] = \"a\".as_bytes();\n\n let mut output: Vec<u8> = vec![];\n\n\n\n assert_eq!(io_run(&instructions, input, &mut output), Ok(()));\n\n assert_eq!(output, vec!(97));\n\n }\n\n\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 21, "score": 21300.763630553345 }, { "content": " #[test]\n\n fn determine_prime_factors() {\n\n assert_eq!(factors_of(2), vec![2]);\n\n assert_eq!(factors_of(3), vec![3]);\n\n assert_eq!(factors_of(4), vec![2, 2]);\n\n assert_eq!(factors_of(5), vec![5]);\n\n assert_eq!(factors_of(6), vec![2, 3]);\n\n }\n\n}\n", "file_path": "compiler/src/brnfck/mod.rs", "rank": 29, "score": 21294.462772371597 }, { "content": " Ok(program)\n\n}\n\n\n\npub enum ParseError {\n\n UnknownCharacter(u8),\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_Increment() {\n\n let source = \"+\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::Increment));\n\n } else {\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 30, "score": 20.912620924781066 }, { "content": " pub fn run(mut self) -> Result<(), MachineError> {\n\n while !self.halted() {\n\n match self.execute() {\n\n Ok(next_machine) => {\n\n self = next_machine;\n\n }\n\n\n\n Err(error) => {\n\n return Err(error);\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n fn jump_back_index(&self, start_index: usize) -> Option<usize> {\n\n let mut openings = 1;\n\n let mut index = start_index + 1;\n\n while index < self.instructions.len() && openings != 0 {\n\n match self.instructions[index] {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 31, "score": 11.751131204780787 }, { "content": "\t\t}\n\n\t}\n\n\n\n\tpub fn bottom(&self) -> &str {\n\n\t\tmatch self {\n\n Command::IncrementPointer => \"/ \\\\\",\n\n Command::DecrementPointer => \"/ \\\\\",\n\n Command::Increment => \"/ \\\\\",\n\n Command::Decrement => \"/ \\\\\",\n\n Command::JumpAhead => \"/ \\\\\",\n\n Command::JumpBack => \"/ \\\\\",\n\n Command::Write => \"/\\\\\",\n\n Command::Read => \"/ \\\\\",\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 32, "score": 11.211998569302812 }, { "content": "\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::JumpBack));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_Write() {\n\n let source = \".\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::Write));\n\n } else {\n\n assert!(false);\n\n }\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 33, "score": 11.096457392053523 }, { "content": " let source = \">\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::IncrementPointer));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_DecrementPointer() {\n\n let source = \"<\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::DecrementPointer));\n\n } else {\n\n assert!(false);\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 34, "score": 11.030187311816192 }, { "content": " }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_Read() {\n\n let source = \",\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::Read));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_program() {\n\n let source = \"++[-]\".as_bytes();\n\n\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 35, "score": 10.842759616867706 }, { "content": " }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_JumpAhead() {\n\n let source = \"[\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::JumpAhead));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_JumpBack() {\n\n let source = \"]\".as_bytes();\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 36, "score": 10.327874020695983 }, { "content": " assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_Decrement() {\n\n let source = \"-\".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 1);\n\n assert_eq!(program, vec!(Command::Decrement));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_IncrementPointer() {\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 37, "score": 10.156685307257648 }, { "content": " if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 5);\n\n assert_eq!(program, vec!(Command::Increment, Command::Increment, Command::JumpAhead, Command::Decrement, Command::JumpBack));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n #[allow(non_snake_case)]\n\n fn should_parse_program_wuth_whitespace() {\n\n let source = \"++ [ - ] \".as_bytes();\n\n\n\n if let Ok(program) = parse(&source) {\n\n assert_eq!(program.len(), 5);\n\n assert_eq!(program, vec!(Command::Increment, Command::Increment, Command::JumpAhead, Command::Decrement, Command::JumpBack));\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n}", "file_path": "compiler/src/brnfck/parser.rs", "rank": 38, "score": 9.761388443230645 }, { "content": " Command::JumpAhead => openings += 1,\n\n Command::JumpBack => openings -= 1,\n\n _ => { /* do nothing */ }\n\n }\n\n index += 1\n\n }\n\n if index <= self.instructions.len() && openings == 0 {\n\n Some(index - 1)\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n fn jump_ahead_index(&self, start_index: usize) -> Option<usize> {\n\n let mut closings = 1;\n\n let mut index: isize = start_index as isize - 1;\n\n while index >= 0 && closings != 0 {\n\n match self.instructions[index as usize] {\n\n Command::JumpAhead => closings -= 1,\n\n Command::JumpBack => closings += 1,\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 39, "score": 9.569828414391218 }, { "content": " return Err(MachineError::NoInput);\n\n }\n\n {\n\n let mut input = self.input.as_mut().unwrap();\n\n let mut buffer: [u8; 1] = [0; 1];\n\n if let Ok(size) = (*input).read(&mut buffer) {\n\n if size == 1 {\n\n self.instruction_pointer += 1;\n\n self.cells[self.cell_pointer] = buffer[0];\n\n } else {\n\n return Err(MachineError::NoByteRead);\n\n }\n\n } else {\n\n return Err(MachineError::InputError);\n\n }\n\n }\n\n Ok(self)\n\n }\n\n Command::Write => {\n\n if self.output.is_none() {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 40, "score": 8.003865725036789 }, { "content": "use std::fmt::{self, Debug, Formatter};\n\nuse std::io::{Read, Write};\n\n\n\nconst SIZE: usize = 30_000;\n\n\n\npub struct Machine<'a, I: 'a, O: 'a>\n\nwhere\n\n I: Read,\n\n O: Write,\n\n{\n\n input: Option<Box<I>>,\n\n output: Option<Box<O>>,\n\n instruction_pointer: usize,\n\n instructions: &'a [Command],\n\n cell_pointer: usize,\n\n cells: [u8; SIZE],\n\n}\n\n\n\nimpl<'a, I, O> Machine<'a, I, O>\n\nwhere\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 41, "score": 7.910679555720421 }, { "content": " instructions: self.instructions,\n\n cell_pointer: self.cell_pointer,\n\n cells: self.cells,\n\n }\n\n }\n\n\n\n fn build(self) -> Machine<'a, &'a [u8], Vec<u8>> {\n\n Machine {\n\n input: None,\n\n output: None,\n\n instruction_pointer: self.instruction_pointer,\n\n instructions: self.instructions,\n\n cell_pointer: self.cell_pointer,\n\n cells: self.cells,\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn execute_increment_pointer_will_result_in_a_machine() {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 42, "score": 7.413477397852272 }, { "content": " instruction_pointer,\n\n instructions: self.instructions,\n\n cell_pointer: self.cell_pointer,\n\n cells: self.cells,\n\n }\n\n }\n\n\n\n fn cell_pointer_at(self, cell_pointer: usize) -> Self {\n\n BuildMachine {\n\n instruction_pointer: self.instruction_pointer,\n\n instructions: self.instructions,\n\n cell_pointer,\n\n cells: self.cells,\n\n }\n\n }\n\n\n\n fn cell(mut self, index: usize, value: u8) -> Self {\n\n self.cells[index] = value;\n\n BuildMachine {\n\n instruction_pointer: self.instruction_pointer,\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 43, "score": 7.280220231897847 }, { "content": " self.instruction_pointer += 1;\n\n Ok(self)\n\n }\n\n }\n\n Command::JumpBack => {\n\n let current_value = self.cells[self.cell_pointer];\n\n if current_value != 0 {\n\n if let Some(index) = self.jump_ahead_index(self.instruction_pointer) {\n\n self.instruction_pointer = index + 1;\n\n Ok(self)\n\n } else {\n\n Err(MachineError::UnmatchedJumpBack)\n\n }\n\n } else {\n\n self.instruction_pointer += 1;\n\n Ok(self)\n\n }\n\n }\n\n Command::Read => {\n\n if self.input.is_none() {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 44, "score": 6.895050838946639 }, { "content": " _ => { /* do nothing */ }\n\n }\n\n index -= 1\n\n }\n\n if index >= 0 && closings == 0 {\n\n Some(index as usize + 1)\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, I, O> PartialEq for Machine<'a, I, O>\n\nwhere\n\n I: Read,\n\n O: Write,\n\n{\n\n fn eq(&self, rhs: &Self) -> bool {\n\n if self.instruction_pointer != rhs.instruction_pointer {\n\n return false;\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 45, "score": 6.768339019537292 }, { "content": " assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn jumping_should_work_correctly() {\n\n let instructions = [\n\n Command::Increment,\n\n Command::Increment,\n\n Command::JumpAhead,\n\n Command::Decrement,\n\n Command::JumpBack,\n\n ];\n\n let machine: Machine<&[u8], Vec<u8>> = Machine::new(&instructions);\n\n\n\n if let Ok(result_machine) = machine\n\n .execute()\n\n .and_then(|machine| {\n\n assert_eq!(\n\n machine,\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 46, "score": 6.6500830105779825 }, { "content": "pub mod brnfck;\n\npub mod parser;\n", "file_path": "compiler/src/lib.rs", "rank": 47, "score": 6.419453114494143 }, { "content": " return Err(MachineError::NoOutput);\n\n }\n\n {\n\n let buffer: [u8; 1] = [self.cells[self.cell_pointer]; 1];\n\n let mut output = self.output.as_mut().unwrap();\n\n if let Ok(size) = (*output).write(&buffer) {\n\n if size == 1 {\n\n self.instruction_pointer += 1;\n\n } else {\n\n return Err(MachineError::NoByteWritten);\n\n }\n\n } else {\n\n return Err(MachineError::OutputError);\n\n }\n\n }\n\n Ok(self)\n\n }\n\n }\n\n }\n\n\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 48, "score": 6.200268388426471 }, { "content": " .build();\n\n let machine: Machine<&[u8], Vec<u8>> = Machine::new(&instructions);\n\n\n\n if let Ok(result_machine) = machine.execute() {\n\n assert_eq!(result_machine, expected_machine);\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn execute_instruction_will_result_in_an_error() {\n\n for (instruction, expected_error) in vec![\n\n (\n\n Command::DecrementPointer,\n\n MachineError::PointerDecrementOutOfBound,\n\n ),\n\n (Command::Decrement, MachineError::CellUnderflow),\n\n (Command::Read, MachineError::NoInput),\n\n (Command::Write, MachineError::NoOutput),\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 49, "score": 6.026439256751162 }, { "content": " fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> {\n\n write!(f, \"<{}:[\", self.instruction_pointer)?;\n\n for instruction in self.instructions {\n\n write!(f, \" {:?}\", instruction)?;\n\n }\n\n write!(f, \" ]|{};{{\", self.cell_pointer)?;\n\n for index in 0..SIZE {\n\n if self.cells[index] != 0 {\n\n write!(f, \"({},{})\", index, self.cells[index])?;\n\n }\n\n }\n\n write!(f, \"}}>\")\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub enum MachineError {\n\n PointerIncrementOutOfBound,\n\n PointerDecrementOutOfBound,\n\n CellOverflow,\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 50, "score": 5.9758918549997215 }, { "content": " Command::Decrement => {\n\n let current_value = self.cells[self.cell_pointer];\n\n if current_value != u8::min_value() {\n\n self.instruction_pointer += 1;\n\n self.cells[self.cell_pointer] -= 1;\n\n Ok(self)\n\n } else {\n\n Err(MachineError::CellUnderflow)\n\n }\n\n }\n\n Command::JumpAhead => {\n\n let current_value = self.cells[self.cell_pointer];\n\n if current_value == 0 {\n\n if let Some(index) = self.jump_back_index(self.instruction_pointer) {\n\n self.instruction_pointer = index + 1;\n\n Ok(self)\n\n } else {\n\n Err(MachineError::UnmatchedJumpAhead)\n\n }\n\n } else {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 51, "score": 5.722193196173709 }, { "content": " let instructions = [Command::IncrementPointer];\n\n let expected_machine = BuildMachine::with(&instructions)\n\n .instruction_pointer_at(1)\n\n .cell_pointer_at(1)\n\n .build();\n\n let machine: Machine<&[u8], Vec<u8>> = Machine::new(&instructions);\n\n\n\n if let Ok(result_machine) = machine.execute() {\n\n assert_eq!(result_machine, expected_machine);\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn execute_increment_will_result_in_a_machine() {\n\n let instructions = [Command::Increment];\n\n let expected_machine = BuildMachine::with(&instructions)\n\n .instruction_pointer_at(1)\n\n .cell(0, 1)\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 52, "score": 5.614246706139488 }, { "content": " cell_pointer: 0,\n\n cells: [0; SIZE],\n\n }\n\n }\n\n\n\n pub fn halted(&self) -> bool {\n\n self.instructions.len() <= self.instruction_pointer\n\n }\n\n\n\n pub fn execute(mut self) -> Result<Machine<'a, I, O>, MachineError> {\n\n let command = self.instructions[self.instruction_pointer];\n\n match command {\n\n Command::IncrementPointer => {\n\n if self.cell_pointer != SIZE - 1 {\n\n self.instruction_pointer += 1;\n\n self.cell_pointer += 1;\n\n Ok(self)\n\n } else {\n\n Err(MachineError::PointerIncrementOutOfBound)\n\n }\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 53, "score": 5.138748040944488 }, { "content": " ] {\n\n let instructions = [instruction];\n\n let machine: Machine<&[u8], Vec<u8>> = Machine::new(&instructions);\n\n\n\n if let Err(result_error) = machine.execute() {\n\n assert_eq!(result_error, expected_error);\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n }\n\n\n\n #[test]\n\n fn increment_pointer_should_error_when_on_boundary() {\n\n let instructions = [Command::IncrementPointer];\n\n let machine = BuildMachine::with(&instructions)\n\n .cell_pointer_at(SIZE - 1)\n\n .build();\n\n\n\n if let Err(result_error) = machine.execute() {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 54, "score": 4.658946659378379 }, { "content": " I: Read,\n\n O: Write,\n\n{\n\n pub fn new(instructions: &'a [Command]) -> Machine<'a, I, O> {\n\n Machine {\n\n input: None,\n\n output: None,\n\n instruction_pointer: 0,\n\n instructions: instructions,\n\n cell_pointer: 0,\n\n cells: [0; SIZE],\n\n }\n\n }\n\n\n\n pub fn io(instructions: &'a [Command], input: Box<I>, output: Box<O>) -> Machine<'a, I, O> {\n\n Machine {\n\n input: Some(input),\n\n output: Some(output),\n\n instruction_pointer: 0,\n\n instructions: instructions,\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 55, "score": 4.4206081047667105 }, { "content": " #[test]\n\n fn jump_ahead_should_error_when_missing_jump_back() {\n\n let instructions = [Command::JumpAhead];\n\n let machine: Machine<&[u8], Vec<u8>> = Machine::new(&instructions);\n\n\n\n if let Err(result_error) = machine.execute() {\n\n assert_eq!(result_error, MachineError::UnmatchedJumpAhead);\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn jump_back_should_error_when_missing_jump_ahead() {\n\n let instructions = [Command::JumpBack];\n\n let machine = BuildMachine::with(&instructions).cell(0, 1).build();\n\n\n\n if let Err(result_error) = machine.execute() {\n\n assert_eq!(result_error, MachineError::UnmatchedJumpBack);\n\n } else {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 56, "score": 4.403807679601295 }, { "content": "\n\n struct BuildMachine<'a> {\n\n instruction_pointer: usize,\n\n instructions: &'a [Command],\n\n cell_pointer: usize,\n\n cells: [u8; SIZE],\n\n }\n\n\n\n impl<'a> BuildMachine<'a> {\n\n fn with(instructions: &'a [Command]) -> Self {\n\n BuildMachine {\n\n instruction_pointer: 0,\n\n instructions,\n\n cell_pointer: 0,\n\n cells: [0; SIZE],\n\n }\n\n }\n\n\n\n fn instruction_pointer_at(self, instruction_pointer: usize) -> Self {\n\n BuildMachine {\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 57, "score": 4.251992233392292 }, { "content": " Write,\n\n}\n\n\n\nimpl Command {\n\n pub fn to_brnfck(&self) -> char {\n\n match self {\n\n Command::IncrementPointer => '>',\n\n Command::DecrementPointer => '<',\n\n Command::Increment => '+',\n\n Command::Decrement => '-',\n\n Command::JumpAhead => '[',\n\n Command::JumpBack => ']',\n\n Command::Read => ',',\n\n Command::Write => '.',\n\n }\n\n }\n\n\n\n\tpub fn top(&self) -> &str {\n\n\t\tmatch self {\n\n Command::IncrementPointer => \" /\\\\ \",\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 58, "score": 4.085223509204857 }, { "content": " }\n\n Command::DecrementPointer => {\n\n if self.cell_pointer != 0 {\n\n self.instruction_pointer += 1;\n\n self.cell_pointer -= 1;\n\n Ok(self)\n\n } else {\n\n Err(MachineError::PointerDecrementOutOfBound)\n\n }\n\n }\n\n Command::Increment => {\n\n let current_value = self.cells[self.cell_pointer];\n\n if current_value != u8::max_value() {\n\n self.instruction_pointer += 1;\n\n self.cells[self.cell_pointer] += 1;\n\n Ok(self)\n\n } else {\n\n Err(MachineError::CellOverflow)\n\n }\n\n }\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 59, "score": 3.5493939950968327 }, { "content": " Command::DecrementPointer => \" /\\\\/\\\\ \",\n\n Command::Increment => \" \",\n\n Command::Decrement => \" \",\n\n Command::JumpAhead => \" /\\\\ \",\n\n Command::JumpBack => \" /\\\\ \",\n\n Command::Write => \" \",\n\n Command::Read => \" /\\\\ /\\\\ \",\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn middle(&self) -> &str {\n\n\t\tmatch self {\n\n Command::IncrementPointer => \" / \\\\ \",\n\n Command::DecrementPointer => \" / \\\\ \",\n\n Command::Increment => \" /\\\\ \",\n\n Command::Decrement => \" /\\\\/\\\\ \",\n\n Command::JumpAhead => \" / \\\\/\\\\ \",\n\n Command::JumpBack => \" /\\\\/ \\\\ \",\n\n Command::Write => \" \",\n\n Command::Read => \" / \\\\/ \\\\ \",\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 60, "score": 3.1272509444772814 }, { "content": "use crate::brnfck::Command;\n\n\n", "file_path": "compiler/src/brnfck/parser.rs", "rank": 61, "score": 2.803821271636508 }, { "content": " assert_eq!(result_error, MachineError::PointerIncrementOutOfBound);\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn increment_should_error_when_on_around() {\n\n let instructions = [Command::Increment];\n\n let machine = BuildMachine::with(&instructions)\n\n .cell(0, u8::max_value())\n\n .build();\n\n\n\n if let Err(result_error) = machine.execute() {\n\n assert_eq!(result_error, MachineError::CellOverflow);\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 62, "score": 2.74092083351545 }, { "content": "# I created a programming languages and so can you\n\n\n\n<div class=\"content-warning\">\n\n <span class=\"warning\">Foul Language</span>\n\n</div>\n\n\n\n???\n\n\n\n# The foul language is a name of a esoteric programming language\n\n\n\n---\n\n\n\n# Croco Loco is Turing-Complete\n\n\n\n<center><img src=\"image/croco-body.png\" width=\"480px\" align></center>\n\n\n\n???\n\n\n\n# If you have seen my earlier talks\n\n# You might suspect this subject\n\n\n\n---\n\n\n\n# Nope\n\n\n\n--\n\n\n\n# Croco Loco is not Turing-Complete\n\n\n\n???\n\n\n\n# that is not true\n\n\n\n---\n\n\n\n# Croco Loco is a regular\n\n\n\n--\n\n\n\n```\n\nH(1|2|3)*T\n\n```\n\n\n\n???\n\n\n\n# It is only needs a Deterministic Finite Automaton (DFA)\n\n# Not a Turing Machine\n\n\n\n---\n\n\n\n<textarea id=\"haiku\" class=\"haiku\" rows=\"8\" cols=\"30\" wrap=\"off\"></textarea>\n\n\n\n???\n\n\n\n# Poem celebrating a great conference\n\n# But what is the header?\n\n\n\n---\n\n\n\n# [Demo](https://asciinema.org/a/S3WoUDKA2vJXQbd7qglAu9VEb)\n\n\n\n[![asciicast](https://asciinema.org/a/S3WoUDKA2vJXQbd7qglAu9VEb.svg)](https://asciinema.org/a/S3WoUDKA2vJXQbd7qglAu9VEb)\n\n\n\n---\n\n\n\n[![Iconic Bergen? sign](image/bergen.jpg)](https://en.wikipedia.org/wiki/Bergen)\n\n\n\n\n\n???\n\n\n\n* BoosterConf 2018\n\n* Flew in\n\n* Greeted by iconic Bergen sign\n\n* With Rico & Felienne\n\n* Inspired by the sign\n\n* Invented \"Bergen, bergen, bergen\"\n\n* Itself inspired by [\"Chicken Chicken Chicken: Chicken Chicken\"](https://isotropic.org/papers/chicken.pdf)\n\n\n\n---\n\n\n\n# Esoteric programming language\n\n\n\n> a programming language designed to test the boundaries of computer programming language design, as a proof of concept, as software art, as a hacking interface to another language (particularly functional programming or procedural programming languages), or as a joke.\n\n\n\n???\n\n\n\n* Software development; a narrow view what can be done\n\n* Where is the fun?\n\n* Where is the creativity?\n\n* How does it work?\n\n* How do you create a language\n\n\n\n---\n\n\n\n<div id=\"brnfck-container\"></div>\n\n\n\n???\n\n\n\n# Target\n\n## What machine are you targetting?\n\n# Brainf*ck\n\n## What is brainfuck\n\n### Bank of registers\n\n### Register pointer\n\n### Instructions\n\n\n\n---\n\n\n", "file_path": "docs/presentation.md", "rank": 63, "score": 2.491883973021473 }, { "content": " }\n\n\n\n #[test]\n\n fn machine_should_have_halted_when_there_are_no_instructions_left() {\n\n let instructions = [\n\n Command::Increment,\n\n Command::Increment,\n\n Command::JumpAhead,\n\n Command::Decrement,\n\n Command::JumpBack,\n\n ];\n\n let machine = BuildMachine::with(&instructions)\n\n .instruction_pointer_at(5)\n\n .build();\n\n\n\n assert!(machine.halted());\n\n }\n\n}\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 64, "score": 1.603770980506734 }, { "content": " CellUnderflow,\n\n UnmatchedJumpAhead,\n\n UnmatchedJumpBack,\n\n NoInput,\n\n InputError,\n\n NoByteRead,\n\n NoOutput,\n\n OutputError,\n\n NoByteWritten,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Eq, Clone, Copy)]\n\npub enum Command {\n\n IncrementPointer,\n\n DecrementPointer,\n\n Increment,\n\n Decrement,\n\n JumpAhead,\n\n JumpBack,\n\n Read,\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 65, "score": 1.5848151069816345 }, { "content": " .instruction_pointer_at(5)\n\n .build()\n\n );\n\n } else {\n\n assert!(false);\n\n }\n\n }\n\n\n\n #[test]\n\n fn machine_should_not_have_halted_when_there_are_instructions_left() {\n\n let instructions = [\n\n Command::Increment,\n\n Command::Increment,\n\n Command::JumpAhead,\n\n Command::Decrement,\n\n Command::JumpBack,\n\n ];\n\n let machine = BuildMachine::with(&instructions).build();\n\n\n\n assert!(!machine.halted());\n", "file_path": "compiler/src/brnfck/machine.rs", "rank": 66, "score": 1.5380979711777643 }, { "content": "# bergen\n\nA mountain inspired programming language.\n\n\n\n## Description\n\nCreated during [Booster Conference 2018][conference] in Bergen, Norway, `bergen` is a dialect of [brainf*ck][].\n\n\n\nMountain ranges our used to represent the different operators. Below you can find a list.\n\n\n\n### Mountain Ranges\n\n#### `>`\n\n> increment the data pointer.\n\n\n\n```\n\n /\\\n\n / \\\n\n/ \\\n\n```\n\n\n\n#### `<`\n\n> decrement the data pointer.\n\n\n\n```\n\n /\\/\\\n\n / \\\n\n/ \\\n\n```\n\n\n\n#### `+`\n\n> increment the byte at the data pointer.\n\n\n\n```\n\n\n\n /\\\n\n/ \\\n\n```\n\n\n\n#### `-`\n\n> decrement the byte at the data pointer.\n\n\n\n```\n\n\n\n /\\/\\\n\n/ \\\n\n```\n\n\n\n#### `[`\n\n> if the byte at the data pointer is zero, then instead of moving the instruction pointer forward to the next command, jump it forward to the command after the matching `]` command.\n\n\n\n```\n\n /\\\n\n / \\/\\\n\n/ \\\n\n```\n\n\n\n#### `]`\n\n> if the byte at the data pointer is nonzero, then instead of moving the instruction pointer forward to the next command, jump it back to the command after the matching `[` command.\n\n\n\n```\n\n /\\\n\n /\\/ \\\n\n/ \\\n\n```\n\n\n\n#### `.`\n\n> output the byte at the data pointer.\n\n\n\n```\n\n/\\\n\n```\n\n\n\n#### `,`\n\n> accept one byte of input, storing its value in the byte at the data pointer.\n\n\n\n```\n\n /\\ /\\\n\n / \\/ \\\n\n/ \\\n\n```\n\n\n\n### States\n\nThe `bergen` parser can be in a few states while parsing input. Below we will\n\nshow the states and the allowed transitions.\n\n\n\n![States of the `bergen` parser](https://cdn.rawgit.com/dvberkel/bergen/f7e438ef/states.png)\n\n\n\n## Tools\n\nThe following tools are being implemented.\n\n\n\n* `bergen`: a interpreter.\n\n* `bergenc`: a bergen to brainf\\*ck compiler.\n\n\n\n[conference]: https://2018.boosterconf.no/\n\n[brainf*ck]: https://en.wikipedia.org/wiki/Brainfuck\n", "file_path": "README.md", "rank": 67, "score": 1.4779754947463708 }, { "content": "# Language\n\n\n\n* `>`\n\n* `<`\n\n* `+`\n\n* `-`\n\n* `.`\n\n* `,`\n\n* `[`\n\n* `]`\n\n\n\n???\n\n\n\n* `>` -- increment pointer\n\n* `<` -- decrement pointer\n\n* `+` -- increment register\n\n* `-` -- decrement register\n\n* `.` -- ASCII output\n\n* `,` -- ASCII input\n\n* `[` -- jump to `]` on zero\n\n* `]` -- jump to `[`\n\n\n\n---\n\n\n\n\n\n# Ook!\n\n\n\n> Ook! is a programming language designed for orang-utans.\n\n\n\n```plain\n\nOok.\n\nOok?\n\nOok!\n\n```\n\n\n\n--\n\n\n\n## Map into Brainf*ck\n\n\n\n```\n\n Ook. Ook? → >\n\n Ook? Ook. → <\n\n Ook. Ook. → +\n\n Ook! Ook! → -\n\n Ook. Ook! → ,\n\n Ook! Ook. → . \n\n Ook! Ook? → [\n\n Ook? Ook! → ]\n\n```\n\n\n\n???\n\n\n\n# Dialects\n\n\n\n> Since the word \"ook\" can convey entire ideas, emotions, and abstract thoughts depending on the nuances of inflection, Ook! has no need of comments. The code itself serves perfectly well to describe in detail what it does and how it does it. Provided you are an orang-utan. \n\n\n\n\n\n---\n\n\n\n# Bergen\n\n\n\n```plain\n\n /\\\n\n / \\\n\n/ \\ → > /\\ → .\n\n\n\n /\\/\\ /\\ /\\\n\n / \\ / \\/ \\\n\n/ \\ → < / \\ → ,\n\n\n\n\n\n /\\\n\n /\\ / \\/\\\n\n/ \\ → + / \\ → [\n\n\n\n\n\n /\\\n\n /\\/\\ /\\/ \\\n\n/ \\ → - / \\ → ] \n\n```\n\n\n\n???\n\n\n\n# Mapping of Bergen onto brainf*ck\n\n# Marvel at the symmetry\n\n\n\n---\n\n\n\n# Create Your Own Language Guide™\n\n\n\n--\n\n\n\n1. Have a interesting idea\n\n\n\n--\n\n\n\n2. Pick a target \n\n\n\n--\n\n\n\n3. Map your idea onto target\n\n\n\n--\n\n\n\n4. Build a translator\n\n\n\n--\n\n\n\n5. [Optionally] build an interpreter\n\n\n\n--\n\n\n\n6. ...\n\n\n\n--\n\n\n\n7. Fun! \n", "file_path": "docs/presentation.md", "rank": 68, "score": 0.911475466385439 } ]
Rust
impl/rust/lisla_lang/src/tree/mod.rs
shohei909/Lisla
9267d0792d6d4f633dec1c079d2a39bc5e4719c1
use tag::*; use std::fmt::Debug; use from::error::*; use from::*; use ::error::*; use leaf::*; #[derive(Debug, Clone, Eq, PartialEq)] pub enum ArrayTree<LeafType:Leaf> { Array(ArrayBranch<WithTag<ArrayTree<LeafType>>>), Leaf(LeafType), } impl FromArrayTree for ArrayTree<StringLeaf> { type Parameters = (); #[allow(unused_variables)] fn from_array_tree( config:&FromArrayTreeConfig, tree:WithTag<ArrayTree<StringLeaf>>, parameters: (), errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<WithTag<ArrayTree<StringLeaf>>, ()> { Result::Ok(tree) } #[allow(unused_variables)] fn from_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> Result<WithTag<Self>, ()> { Result::Ok( WithTag { data: ArrayTree::Array(array), tag, } ) } #[allow(unused_variables)] fn from_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> Result<WithTag<Self>, ()> { Result::Ok( WithTag { data: ArrayTree::Leaf(leaf), tag, } ) } #[allow(unused_variables)] fn match_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: Self::Parameters, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> bool { true } #[allow(unused_variables)] fn match_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> bool { false } } impl<LeafType:Leaf> From<ArrayBranch<WithTag<ArrayTree<LeafType>>>> for ArrayTree<LeafType> { fn from(data:ArrayBranch<WithTag<ArrayTree<LeafType>>>) -> Self { ArrayTree::Array(data) } } impl<LeafType:Leaf> ArrayTree<LeafType> { pub fn to_branch(self) -> Option<ArrayBranch<WithTag<ArrayTree<LeafType>>>> { match self { ArrayTree::Array(branch) => Option::Some(branch), ArrayTree::Leaf(_) => Option::None, } } pub fn to_leaf(self) -> Option<LeafType> { match self { ArrayTree::Array(_) => Option::None, ArrayTree::Leaf(leaf) => Option::Some(leaf), } } } pub trait Tree<LeafType> {} impl<LeafType:Leaf> Tree<LeafType> for ArrayTree<LeafType> {} #[derive(Debug, Clone, Eq, PartialEq)] pub struct ArrayBranch<TreeType:Debug + Clone> { pub vec: Vec<TreeType>, } impl<TreeType:Debug + Clone> ArrayBranch<TreeType> { #[allow(unused_variables)] pub fn shift( &mut self, config: &FromArrayTreeConfig, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<TreeType, ()> { if self.vec.len() == 0 { let range = tag.content_range.clone(); errors.push(FromArrayTreeError::from(NotEnoughArgumentsError{ range })); Result::Err(()) } else { Result::Ok( self.vec.remove(0) ) } } #[allow(unused_variables)] pub fn split_off( &mut self, config: &FromArrayTreeConfig, len:usize, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<Self, ()> { if self.vec.len() < len { let range = tag.content_range.clone(); errors.push(FromArrayTreeError::from(NotEnoughArgumentsError{ range })); Result::Err(()) } else { Result::Ok( Self { vec: self.vec.split_off(len) } ) } } pub fn split_off_rest( &mut self, config: &FromArrayTreeConfig, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<Self, ()> { let len = self.vec.len(); self.split_off(config, len, tag, errors) } #[allow(unused_variables)] pub fn finish( self, config: &FromArrayTreeConfig, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<(), ()> { if self.vec.len() == 0 { Result::Ok(()) } else { let range = tag.content_range.clone(); errors.push(FromArrayTreeError::from(TooManyArgumentsError{ range })); Result::Err(()) } } } impl<T:Debug + Clone + FromArrayTree> FromArrayTree for ArrayBranch<WithTag<T>> { type Parameters = T::Parameters; fn from_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: T::Parameters, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<WithTag<ArrayBranch<WithTag<T>>>, ()> { let mut vec = Vec::new(); for element in array.vec { match FromArrayTree::from_array_tree( config, element, parameters.clone(), errors ) { Result::Ok(data) => { vec.push(data); } Result::Err(()) => {} } if config.continuous_error_limit < errors.len() { return Result::Err(()); } } Result::Ok( WithTag { data: ArrayBranch { vec }, tag } ) } #[allow(unused_variables)] fn from_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> Result<WithTag<Self>, ()> { errors.push( FromArrayTreeError::from( CantBeStringError { range: tag.content_range } ) ); Result::Err(()) } #[allow(unused_variables)] fn match_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: T::Parameters, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> bool { true } #[allow(unused_variables)] fn match_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> bool { false } }
use tag::*; use std::fmt::Debug; use from::error::*; use from::*; use ::error::*; use leaf::*; #[derive(Debug, Clone, Eq, PartialEq)] pub enum ArrayTree<LeafType:Leaf> { Array(ArrayBranch<WithTag<ArrayTree<LeafType>>>), Leaf(LeafType), } impl FromArrayTree for ArrayTree<StringLeaf> { type Parameters = (); #[allow(unused_variables)] fn from_array_tree( config:&FromArrayTreeConfig, tree:WithTag<ArrayTree<StringLeaf>>, parameters: (), errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<WithTag<ArrayTree<StringLeaf>>, ()> { Result::Ok(tree) } #[allow(unused_variables)] fn from_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> Result<WithTag<Self>, ()> { Result::Ok( WithTag { data: ArrayTree::Array(array), tag, } ) } #[allow(unused_variables)] fn from_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> Result<WithTag<Self>, ()> { Result::Ok( WithTag { data: ArrayTree::Leaf(leaf), tag, } ) } #[allow(unused_variables)] fn match_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: Self::Parameters, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> bool { true } #[allow(unused_variables)] fn match_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> bool { false } } impl<LeafType:Leaf> From<ArrayBranch<WithTag<ArrayTree<LeafType>>>> for Arr
errors.push(FromArrayTreeError::from(NotEnoughArgumentsError{ range })); Result::Err(()) } else { Result::Ok( Self { vec: self.vec.split_off(len) } ) } } pub fn split_off_rest( &mut self, config: &FromArrayTreeConfig, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<Self, ()> { let len = self.vec.len(); self.split_off(config, len, tag, errors) } #[allow(unused_variables)] pub fn finish( self, config: &FromArrayTreeConfig, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<(), ()> { if self.vec.len() == 0 { Result::Ok(()) } else { let range = tag.content_range.clone(); errors.push(FromArrayTreeError::from(TooManyArgumentsError{ range })); Result::Err(()) } } } impl<T:Debug + Clone + FromArrayTree> FromArrayTree for ArrayBranch<WithTag<T>> { type Parameters = T::Parameters; fn from_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: T::Parameters, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<WithTag<ArrayBranch<WithTag<T>>>, ()> { let mut vec = Vec::new(); for element in array.vec { match FromArrayTree::from_array_tree( config, element, parameters.clone(), errors ) { Result::Ok(data) => { vec.push(data); } Result::Err(()) => {} } if config.continuous_error_limit < errors.len() { return Result::Err(()); } } Result::Ok( WithTag { data: ArrayBranch { vec }, tag } ) } #[allow(unused_variables)] fn from_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> Result<WithTag<Self>, ()> { errors.push( FromArrayTreeError::from( CantBeStringError { range: tag.content_range } ) ); Result::Err(()) } #[allow(unused_variables)] fn match_array_tree_array( config:&FromArrayTreeConfig, array:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>, tag:Tag, parameters: T::Parameters, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> bool { true } #[allow(unused_variables)] fn match_array_tree_string( config:&FromArrayTreeConfig, leaf:StringLeaf, tag:Tag, parameters: Self::Parameters, errors:&mut ErrorWrite<FromArrayTreeError> ) -> bool { false } }
ayTree<LeafType> { fn from(data:ArrayBranch<WithTag<ArrayTree<LeafType>>>) -> Self { ArrayTree::Array(data) } } impl<LeafType:Leaf> ArrayTree<LeafType> { pub fn to_branch(self) -> Option<ArrayBranch<WithTag<ArrayTree<LeafType>>>> { match self { ArrayTree::Array(branch) => Option::Some(branch), ArrayTree::Leaf(_) => Option::None, } } pub fn to_leaf(self) -> Option<LeafType> { match self { ArrayTree::Array(_) => Option::None, ArrayTree::Leaf(leaf) => Option::Some(leaf), } } } pub trait Tree<LeafType> {} impl<LeafType:Leaf> Tree<LeafType> for ArrayTree<LeafType> {} #[derive(Debug, Clone, Eq, PartialEq)] pub struct ArrayBranch<TreeType:Debug + Clone> { pub vec: Vec<TreeType>, } impl<TreeType:Debug + Clone> ArrayBranch<TreeType> { #[allow(unused_variables)] pub fn shift( &mut self, config: &FromArrayTreeConfig, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<TreeType, ()> { if self.vec.len() == 0 { let range = tag.content_range.clone(); errors.push(FromArrayTreeError::from(NotEnoughArgumentsError{ range })); Result::Err(()) } else { Result::Ok( self.vec.remove(0) ) } } #[allow(unused_variables)] pub fn split_off( &mut self, config: &FromArrayTreeConfig, len:usize, tag: &Tag, errors: &mut ErrorWrite<FromArrayTreeError>, ) -> Result<Self, ()> { if self.vec.len() < len { let range = tag.content_range.clone();
random
[ { "content": "pub trait Leaf : Debug + Clone + Eq + PartialEq {\n\n}\n", "file_path": "impl/rust/lisla_lang/src/leaf/mod.rs", "rank": 0, "score": 241863.12455578276 }, { "content": "pub fn equals(lisla: &WithTag<ArrayTree<StringLeaf>>, json: &Value, path: &str, stack: &mut Vec<usize>) {\n\n match (&lisla.data, json) {\n\n (&ArrayTree::Array(ref s), &Value::Array(ref j)) => {\n\n assert!(s.vec.len() == j.len(),\n\n \"unmatched array length({}:{:?}): {:?} {:?}\",\n\n path,\n\n stack,\n\n s.vec.len(),\n\n j.len());\n\n\n\n for (i, (sc, jc)) in s.vec.iter().zip(j.iter()).enumerate() {\n\n stack.push(i);\n\n equals(sc, jc, path, stack);\n\n stack.pop();\n\n }\n\n }\n\n\n\n (&ArrayTree::Leaf(ref s), &Value::String(ref j)) => {\n\n assert!(s.string.as_str() == j.as_str(),\n\n \"unmatched string({}:{:?}): {:?} {:?}\",\n", "file_path": "impl/rust/lisla_lang/tests/lib.rs", "rank": 1, "score": 195076.8617848865 }, { "content": "pub fn is_blacklist_whitespace(character: char) -> bool {\n\n match character {\n\n '\\u{000B}' |\n\n '\\u{000C}' |\n\n '\\u{0085}' |\n\n '\\u{00A0}' |\n\n '\\u{1680}' |\n\n '\\u{2000}'...'\\u{200A}' |\n\n '\\u{2028}' |\n\n '\\u{2029}' |\n\n '\\u{202F}' |\n\n '\\u{205F}' |\n\n '\\u{3000}' => true,\n\n _ => false,\n\n }\n\n}\n", "file_path": "impl/rust/lisla_lang/src/parse/space.rs", "rank": 2, "score": 159809.8518438302 }, { "content": "// lisla_core::error::Errorを実装する\n\npub fn impl_error(ast: &DeriveInput) -> Tokens {\n\n let error = match ast.body {\n\n Body::Enum(ref _enum) =>\n\n impl_error_for_enum(ast, _enum),\n\n\n\n Body::Struct(ref _struct) =>\n\n impl_error_for_struct(ast, _struct),\n\n };\n\n\n\n quote! {\n\n #error\n\n }\n\n}\n\n\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 3, "score": 154001.35617009044 }, { "content": "fn read_to_string(stream:&mut Read, errors:&mut ErrorWrite<ParseError>) -> Result<String, ()> {\n\n let mut string = String::new();\n\n for (index, char_result) in stream.chars().enumerate() {\n\n match char_result {\n\n Result::Ok(character) => string.push(character),\n\n Result::Err(char_error) => {\n\n // IOエラーまたは、不正なUTF8\n\n let range = Range::with_length(index, 1);\n\n let error = match char_error {\n\n CharsError::NotUtf8 => \n\n ParseError::from(\n\n NotUtf8Error { range }\n\n ),\n\n\n\n CharsError::Other(error) => {\n\n let reason = IoErrorReason{ error };\n\n ParseError::from(\n\n IoError { range, reason }\n\n )\n\n }\n", "file_path": "impl/rust/lisla_lang/src/parse/mod.rs", "rank": 4, "score": 153887.96881357132 }, { "content": "pub fn impl_tuple(ast: &DeriveInput) -> Tokens {\n\n let name = &ast.ident;\n\n let mut parameters = Tokens::new();\n\n\n\n if let &Body::Struct(VariantData::Struct(ref fields)) = &ast.body {\n\n for field in fields {\n\n if let Option::Some(ref ident) = field.ident {\n\n let mut children = Tokens::new();\n\n let mut get_kind = ShiftKind::Shift;\n\n for attr in &field.attrs {\n\n if AttrStyle::Outer == attr.style {\n\n match attr.value {\n\n MetaItem::List(ref ident, ref items) if (ident == \"lisla\") => {\n\n for item in items {\n\n match item {\n\n &NestedMetaItem::MetaItem(MetaItem::NameValue(ref ident, ref value)) if (ident == \"label\") => {\n\n children.append(\n\n quote!{ array_tree!(#value).data }\n\n );\n\n }\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 5, "score": 143673.28067101043 }, { "content": "pub fn impl_structure(ast: &DeriveInput) -> Tokens {\n\n quote! {\n\n \n\n }\n\n}\n", "file_path": "impl/rust/lisla_type_derive/src/structure.rs", "rank": 6, "score": 143673.28067101043 }, { "content": "pub fn impl_union(ast: &DeriveInput) -> Tokens {\n\n quote! {\n\n \n\n }\n\n}\n", "file_path": "impl/rust/lisla_type_derive/src/union.rs", "rank": 7, "score": 143673.28067101043 }, { "content": "pub fn impl_newtype(ast: &DeriveInput) -> Tokens {\n\n let name = &ast.ident;\n\n let option_value_type = Option::None;\n\n\n\n if let Body::Struct(VariantData::Struct(fields)) = ast.body {\n\n for field in fields {\n\n if let Option::Some(\"value\") = field.ident {\n\n option_value_type\n\n }\n\n }\n\n } else {\n\n panic!(\"#[derive(LislaNewtype)] is only defined for struct, not for enum or tuple.\");\n\n }\n\n\n\n let value_type = if Option::Some(value_type) = option_value_type {\n\n value_type\n\n } else {\n\n panic!(\"parameter 'value' is required\");\n\n };\n\n\n", "file_path": "impl/rust/lisla_type_derive/src/newtype.rs", "rank": 8, "score": 143673.28067101043 }, { "content": "#[proc_macro_derive(Error, attributes(message, code))]\n\npub fn derive_error(input: TokenStream) -> TokenStream {\n\n let s = input.to_string();\n\n let ast = syn::parse_derive_input(&s).unwrap();\n\n let implemention = impl_error(&ast);\n\n \n\n implemention.parse().unwrap()\n\n}\n", "file_path": "impl/rust/lisla_derive/src/lib.rs", "rank": 9, "score": 138851.22523475264 }, { "content": "#[proc_macro_derive(LislaTuple, attributes(lisla))]\n\npub fn derive_tuple(input: TokenStream) -> TokenStream {\n\n let s = input.to_string();\n\n let ast = syn::parse_derive_input(&s).unwrap();\n\n let implemention = impl_tuple(&ast);\n\n \n\n implemention.parse().unwrap()\n\n}\n\n\n", "file_path": "impl/rust/lisla_type_derive/src/lib.rs", "rank": 10, "score": 136712.22945670062 }, { "content": "#[proc_macro_derive(LislaStruct, attributes(lisla))]\n\npub fn derive_structure(input: TokenStream) -> TokenStream {\n\n let s = input.to_string();\n\n let ast = syn::parse_derive_input(&s).unwrap();\n\n let implemention = impl_structure(&ast);\n\n \n\n implemention.parse().unwrap()\n\n}\n\n\n", "file_path": "impl/rust/lisla_type_derive/src/lib.rs", "rank": 11, "score": 136712.22945670062 }, { "content": "#[proc_macro_derive(LislaNewtype, attributes(lisla))]\n\npub fn derive_newtype(input: TokenStream) -> TokenStream {\n\n let s = input.to_string();\n\n let ast = syn::parse_derive_input(&s).unwrap();\n\n let implemention = impl_newtype(&ast);\n\n \n\n implemention.parse().unwrap()\n\n}\n\n\n", "file_path": "impl/rust/lisla_type_derive/src/lib.rs", "rank": 12, "score": 136712.22945670062 }, { "content": "#[proc_macro_derive(LislaUnion, attributes(lisla))]\n\npub fn derive_union(input: TokenStream) -> TokenStream {\n\n let s = input.to_string();\n\n let ast = syn::parse_derive_input(&s).unwrap();\n\n let implemention = impl_union(&ast);\n\n\n\n implemention.parse().unwrap()\n\n}\n", "file_path": "impl/rust/lisla_type_derive/src/lib.rs", "rank": 13, "score": 136712.22945670062 }, { "content": "// エラーの種別がstructだったときの実装\n\nfn impl_error_for_struct(ast: &DeriveInput, variant:&VariantData) -> Tokens {\n\n let name = &ast.ident;\n\n let position = impl_position_for_struct(ast, variant);\n\n \n\n quote! {\n\n impl ::lisla_core::error::Error for #name {\n\n fn message(&self) -> String {\n\n // TODO:\n\n \"message\".to_string()\n\n }\n\n\n\n fn code(&self) -> ::lisla_core::error::ErrorCode {\n\n // TODO:\n\n ::lisla_core::error::ErrorCode{ value: \"code\".to_string() }\n\n }\n\n\n\n fn name(&self) -> String {\n\n \"#name\".to_string()\n\n }\n\n }\n\n\n\n #position\n\n }\n\n}\n\n\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 14, "score": 133894.00728571095 }, { "content": "// エラーのアイテムがenumだったときの実装\n\nfn impl_error_for_enum(ast: &DeriveInput, variants:&Vec<Variant>) -> Tokens {\n\n let name = &ast.ident;\n\n let mut arms:Tokens = Tokens::new();\n\n\n\n for variant in variants {\n\n let variant_name = &variant.ident;\n\n arms.append(\n\n quote!(&#name::#variant_name(ref error) => { error })\n\n )\n\n }\n\n\n\n let from_impl = impl_error_from_for_enum(ast, variants);\n\n\n\n quote! {\n\n // このエラー情報をそのまま伝搬する\n\n impl ::lisla_core::error::Error for #name {\n\n fn message(&self) -> String {\n\n ::lisla_core::error::ErrorHolder::child_error(self).message()\n\n }\n\n\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 15, "score": 133505.73372862188 }, { "content": "// エラーのアイテムがenumだったときのFromトレイト実装\n\nfn impl_error_from_for_enum(ast: &DeriveInput, variants:&Vec<Variant>) -> Tokens {\n\n let name = &ast.ident;\n\n let mut from_impl:Tokens = Tokens::new();\n\n\n\n for variant in variants {\n\n let variant_name = &variant.ident;\n\n if let &VariantData::Tuple(ref vec) = &variant.data {\n\n let len = vec.len();\n\n if len == 1 {\n\n let variant_type = &vec[0];\n\n from_impl.append(\n\n quote!(\n\n impl ::std::convert::From<#variant_type> for #name {\n\n fn from(error: #variant_type) -> Self {\n\n #name::#variant_name(error)\n\n }\n\n }\n\n )\n\n )\n\n } else {\n\n panic!(\"VariantData of {} arguments length must be 1, but actual {:?}\", name, len);\n\n };\n\n } else {\n\n panic!(\"VariantData of {} must be tuple, but actual {:?}\", name, variant);\n\n };\n\n }\n\n\n\n from_impl\n\n}\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 16, "score": 133505.68985549576 }, { "content": "pub trait ErrorWrite<Error> {\n\n fn len(&self) -> usize;\n\n fn push(&mut self, data:Error);\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Errors<Error: Debug + Clone> {\n\n pub errors: Vec<Error>,\n\n}\n\n\n\nimpl<Error: Debug + Clone> Errors<Error> {\n\n pub fn new() -> Self {\n\n Errors{ errors:Vec::new() }\n\n }\n\n}\n\n\n\nimpl<Error: Debug + Clone> ErrorWrite<Error> for Errors<Error> {\n\n fn len(&self) -> usize {\n\n self.errors.len()\n\n }\n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 17, "score": 132380.74426984013 }, { "content": "pub trait Error : Position {\n\n fn message(&self) -> String;\n\n fn code(&self) -> ErrorCode;\n\n fn name(&self) -> String;\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n let position_string = self.position_string();\n\n write!(\n\n formatter, \n\n \"{}: {} ({}:{})\", \n\n position_string, \n\n self.message(),\n\n self.name(),\n\n self.code().value,\n\n )?;\n\n\n\n Result::Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ErrorCode { pub value:String }\n\n\n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 19, "score": 130085.5146954103 }, { "content": "// エラー位置情報の実装生成\n\nfn impl_position_for_struct(ast: &DeriveInput, variant:&VariantData) -> Tokens {\n\n let name = &ast.ident;\n\n \n\n let mut range = quote!(Option::None);\n\n let mut source_map = quote!(Option::None);\n\n let mut file_path = quote!(Option::None);\n\n let mut project_root = quote!(Option::None);\n\n\n\n let fields = if let &VariantData::Struct(ref fields) = variant {\n\n fields\n\n } else {\n\n panic!(\"VariantData of {} must be struct, but actual {:?}\", name, variant);\n\n };\n\n\n\n for field in fields {\n\n if let Option::Some(ref ident) = field.ident {\n\n match ident.as_ref() {\n\n \"range\" => range = get_option_value(ast, \"range\", &field.ty),\n\n \"source_map\" => source_map = get_option_value(ast, \"source_map\", &field.ty),\n\n \"file_path\" => file_path = get_option_value(ast, \"file_path\", &field.ty),\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 20, "score": 125444.09584283494 }, { "content": "pub trait FromArrayTree : Sized + Clone + Debug {\n\n type Parameters : Clone + Debug;\n\n\n\n fn from_array_tree(\n\n config:&FromArrayTreeConfig,\n\n tree:WithTag<ArrayTree<StringLeaf>>,\n\n parameters: Self::Parameters,\n\n errors:&mut ErrorWrite<FromArrayTreeError>\n\n ) -> Result<WithTag<Self>, ()> {\n\n return match tree.data {\n\n ArrayTree::Array(array) => {\n\n Self::from_array_tree_array(\n\n config,\n\n array,\n\n tree.tag,\n\n parameters,\n\n errors\n\n )\n\n }\n\n ArrayTree::Leaf(string) => {\n", "file_path": "impl/rust/lisla_lang/src/from/mod.rs", "rank": 21, "score": 115792.36797096008 }, { "content": "pub trait GetTag {\n\n fn get_tag() -> Tag;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Tag {\n\n pub content_range: Option<Range>,\n\n pub leading_space: Option<Space>,\n\n pub kind: Option<TagKind>,\n\n\n\n document:RefCell<DocumentState>,\n\n // type:RefCell<_>,\n\n}\n\n\n\nimpl Tag {\n\n pub fn new(leading_space:Option<Space>, content_range:Range, tag_kind:Option<TagKind>) -> Tag {\n\n Tag {\n\n leading_space,\n\n content_range: Option::Some(content_range),\n\n document: RefCell::new(DocumentState::None),\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 22, "score": 111462.02347354211 }, { "content": "pub trait ErrorHolder {\n\n fn child_error(&self) -> &Error;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ResumableResult<Data, Error> where\n\n Data: Clone + Debug, \n\n Error: Clone + Debug\n\n{\n\n pub data: Option<Data>,\n\n pub errors: Errors<Error>,\n\n}\n\n\n\nimpl<Data, Error> ResumableResult<Data, Error> where\n\n Data: Clone + Debug, \n\n Error: Clone + Debug\n\n{\n\n pub fn new(data:Option<Data>, errors:Errors<Error>) -> Self {\n\n ResumableResult{ data, errors }\n\n }\n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 23, "score": 110671.23808339771 }, { "content": "enum ShiftKind {\n\n Shift,\n\n Spreads(Option<Lit>),\n\n}\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 24, "score": 110505.52657755293 }, { "content": "pub trait Position {\n\n fn range(&self)->Option<&Range>;\n\n fn source_map(&self)->Option<&SourceMap>;\n\n fn file_path(&self)->Option<&FilePathFromProjectRoot>;\n\n fn project_root(&self)->Option<&ProjectRootPath>;\n\n\n\n fn position_string(&self) -> String {\n\n let result = format!(\n\n \"{}:{}\",\n\n match (self.project_root(), self.file_path()) {\n\n (Option::Some(project_root), Option::Some(file_path)) =>\n\n format!(\"{}/{}\", project_root.value, file_path.value),\n\n \n\n (Option::None, Option::Some(file_path)) =>\n\n format!(\"project_root://{}\", file_path.value),\n\n \n\n (Option::Some(project_root), Option::None) =>\n\n format!(\"{}/**\", project_root.value),\n\n \n\n (Option::None, Option::None) =>\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 25, "score": 104780.56984733218 }, { "content": "pub trait StringNewtype : \n\n From<String> + \n\n Clone +\n\n Debug +\n\n Ord\n\n{\n\n fn new(value:String) -> Self;\n\n}\n\n\n", "file_path": "impl/rust/lisla_core/src/data/newtype.rs", "rank": 26, "score": 103266.81894693672 }, { "content": "pub trait I64Newtype : \n\n From<i64> + \n\n Clone +\n\n Copy +\n\n Debug +\n\n Ord\n\n{\n\n fn new(value:String) -> Self;\n\n}\n", "file_path": "impl/rust/lisla_core/src/data/newtype.rs", "rank": 27, "score": 103266.81894693672 }, { "content": "use lisla_core::data::position::*;\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\npub enum FromArrayTreeError {\n\n TooManyArguments(TooManyArgumentsError),\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"too many arguments\"]\n\n#[code = \"eb24aba7-40a9-48e2-a698-50e72b0a0c95\"]\n\npub struct TooManyArgumentsError {\n\n range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"not enough arguments\"]\n\n#[code = \"f7725f42-ca00-4388-8106-55cef8ee041d\"]\n\npub struct NotEnoughArgumentsError {\n\n range: Range,\n\n}\n", "file_path": "impl/rust/lisla_type/src/error/mod.rs", "rank": 28, "score": 89035.14675863618 }, { "content": "// Option型の関数でラップしたTokensを生成\n\nfn get_option_value(ast: &DeriveInput, field_name:&str, ty:&Ty) -> Tokens {\n\n if let &Ty::Path(_, ref path) = ty {\n\n if path.segments.len() == 1 {\n\n let field_ident = Ident::new(field_name.to_string());\n\n if path.segments[0].ident.as_ref() == \"Option\" {\n\n quote!{ self.#field_ident.as_ref() }\n\n } else {\n\n quote!{ Option::Some(&self.#field_ident) }\n\n }\n\n } else {\n\n panic!(\"type path length of `{}`.`{}` must be 1, but actual `{}`\", ast.ident, field_name, path.segments.len());\n\n }\n\n } else {\n\n panic!(\"type of `{}`.`{}` must be Path, but actual `{:?}`\", ast.ident, field_name, ty);\n\n }\n\n}\n\n\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 29, "score": 86813.35434896527 }, { "content": "#[derive(Debug, Clone)]\n\nenum SpaceContextKind {\n\n Normal(NormalContext),\n\n Comment(CommentContext),\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/parse/space.rs", "rank": 30, "score": 77160.18554759183 }, { "content": "#[test]\n\nfn test_macros() {\n\n let tree = array_tree!(\"test\");\n\n}\n", "file_path": "impl/rust/lisla_lang/tests/macros/mod.rs", "rank": 31, "score": 72512.85217713822 }, { "content": "#[test]\n\nfn test_basic() {\n\n let path = format!(\"{}{}\", TEST_CASES_PATH, \"basic\");\n\n let dir = fs::read_dir(path).unwrap();\n\n let parser = Parser::new();\n\n\n\n for result in dir {\n\n let entry = result.unwrap();\n\n let metadata = entry.metadata().unwrap();\n\n if metadata.is_dir() {\n\n continue;\n\n }\n\n\n\n let mut file = fs::File::open(entry.path()).unwrap();\n\n let mut string = String::new();\n\n file.read_to_string(&mut string).unwrap();\n\n\n\n let case_data = parser.parse(&mut string).unwrap();\n\n let mut into_iter = case_data.data.vec.into_iter();\n\n\n\n let lisla_string = into_iter.next().unwrap().data.to_leaf().unwrap().string;\n", "file_path": "impl/rust/lisla_lang/tests/parse/mod.rs", "rank": 32, "score": 72512.85217713822 }, { "content": "#[test]\n\nfn test_invalid_nonfatal() {\n\n let path = format!(\"{}{}\", TEST_CASES_PATH, \"advanced/invalid/nonfatal\");\n\n let dir = fs::read_dir(path).unwrap();\n\n let parser = Parser::new();\n\n\n\n for result in dir {\n\n let entry = result.unwrap();\n\n let metadata = entry.metadata().unwrap();\n\n if metadata.is_dir() {\n\n continue;\n\n }\n\n\n\n let path = entry.path();\n\n let name = String::from(path.to_str().unwrap());\n\n let mut file = fs::File::open(path).unwrap();\n\n let mut string = String::new();\n\n file.read_to_string(&mut string).unwrap();\n\n let result = parser.parse(&string).to_result();\n\n\n\n match result {\n\n Ok(data) => {\n\n panic!(\"data: {} : {:?}\", name, data);\n\n }\n\n Err(_) => {}\n\n }\n\n }\n\n}\n", "file_path": "impl/rust/lisla_lang/tests/parse/mod.rs", "rank": 33, "score": 71578.25295245652 }, { "content": "fn remove_content_indent(\n\n start_index: usize,\n\n mut string:String,\n\n errors:&mut ErrorWrite<ParseError>\n\n) -> String {\n\n // 末尾のインデントを取得\n\n let mut last_indent_stack = String::new();\n\n loop {\n\n match string.pop() {\n\n Option::Some(character) => {\n\n match character {\n\n '\\t' | ' ' => {\n\n last_indent_stack.push(character);\n\n }\n\n '\\n' => {\n\n // 末尾のLFを結果に含めない\n\n if let Option::Some(character) = string.pop() {\n\n // CRLFで1つの改行とみなす\n\n if character != '\\r' {\n\n // CRでなければ、戻す\n", "file_path": "impl/rust/lisla_lang/src/parse/string.rs", "rank": 34, "score": 71578.25295245652 }, { "content": "fn reverse(string:String) -> String {\n\n string.chars().rev().collect()\n\n}\n", "file_path": "impl/rust/lisla_lang/src/parse/string.rs", "rank": 35, "score": 68143.9690578654 }, { "content": "use ::lisla_lang::tag::*;\n\nuse ::lisla_lang::tree::*;\n\nuse ::types::core::*;\n\nuse ::lisla_lang::leaf::*;\n\nuse ::lisla_lang::from::error::*;\n\nuse ::lisla_core::error::*;\n\n\n\n#[derive(LislaNewtype)]\n\n#[derive(Debug, Clone)]\n\npub struct TypeArgument {\n\n pub value: ArrayTree<StringLeaf>\n\n}\n\n\n\n#[derive(LislaNewtype)]\n\n#[derive(Debug, Clone)]\n\npub struct TypePath {\n\n pub value: StringLeaf,\n\n}\n\n\n\n#[derive(LislaNewtype)]\n", "file_path": "impl/rust/lisla_type/src/types/lisla/_type.rs", "rank": 36, "score": 64803.59174602699 }, { "content": "#[derive(Debug, Clone)]\n\npub struct TypeName {\n\n pub value: StringLeaf,\n\n}\n\n\n\n#[derive(LislaUnion)]\n\n#[derive(Debug, Clone)]\n\npub enum TypeReferece {\n\n Primitive(TypePath),\n\n Generic(GenericTypeReferece),\n\n}\n\n\n\n#[derive(LislaTuple)]\n\n#[derive(Debug, Clone)]\n\npub struct GenericTypeReferece {\n\n pub name: WithTag<TypePath>,\n\n #[lisla(spreads_rest)]\n\n pub arguments: WithTag<ArrayBranch<WithTag<TypeArgument>>>,\n\n}\n\n\n\n#[derive(LislaTuple)]\n\n#[derive(Debug, Clone)]\n\npub struct TypeTypeParameterDeclaration {\n\n #[lisla(label = \"type\")]\n\n pub label0 : WithTag<Const>,\n\n pub name : WithTag<TypeName>,\n\n}\n", "file_path": "impl/rust/lisla_type/src/types/lisla/_type.rs", "rank": 37, "score": 64800.5119869158 }, { "content": "use ::lisla_lang::tag::*;\n\nuse ::lisla_lang::leaf::*;\n\nuse ::lisla_lang::from::*;\n\nuse ::lisla_lang::from::error::*;\n\nuse ::lisla_lang::tree::*;\n\nuse ::lisla_core::error::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Const {\n\n pub value: ArrayTree<StringLeaf>,\n\n}\n\n\n\nimpl FromArrayTree for Const {\n\n type Parameters = (ArrayTree<StringLeaf>);\n\n \n\n #[allow(unused_variables)]\n\n fn from_array_tree(\n\n config:&FromArrayTreeConfig, \n\n tree: WithTag<ArrayTree<StringLeaf>>,\n\n parameters: Self::Parameters,\n", "file_path": "impl/rust/lisla_type/src/types/core.rs", "rank": 38, "score": 62337.87264102887 }, { "content": " data: ArrayTree::Array(tree),\n\n tag\n\n },\n\n parameters,\n\n errors\n\n )\n\n }\n\n\n\n #[allow(unused_variables)]\n\n fn match_array_tree_string(\n\n config:&FromArrayTreeConfig, \n\n leaf:StringLeaf, \n\n tag:Tag,\n\n parameters: Self::Parameters,\n\n errors: &mut ErrorWrite<FromArrayTreeError>,\n\n ) -> bool {\n\n Self::match_array_tree(\n\n config,\n\n WithTag {\n\n data: ArrayTree::Leaf(leaf),\n\n tag\n\n },\n\n parameters,\n\n errors\n\n )\n\n }\n\n}\n", "file_path": "impl/rust/lisla_type/src/types/core.rs", "rank": 39, "score": 62337.024366319136 }, { "content": " fn match_array_tree(\n\n config:&FromArrayTreeConfig,\n\n tree:WithTag<ArrayTree<StringLeaf>>,\n\n parameters: Self::Parameters,\n\n errors:&mut ErrorWrite<FromArrayTreeError>\n\n ) -> bool {\n\n tree.data == parameters\n\n }\n\n\n\n #[allow(unused_variables)]\n\n fn match_array_tree_array(\n\n config:&FromArrayTreeConfig,\n\n tree:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>,\n\n tag:Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ErrorWrite<FromArrayTreeError>\n\n ) -> bool {\n\n Self::match_array_tree(\n\n config,\n\n WithTag {\n", "file_path": "impl/rust/lisla_type/src/types/core.rs", "rank": 40, "score": 62335.96784228474 }, { "content": " #[allow(unused_variables)]\n\n fn from_array_tree_string(\n\n config:&FromArrayTreeConfig, \n\n leaf:StringLeaf, \n\n tag:Tag,\n\n parameters: Self::Parameters,\n\n errors: &mut ErrorWrite<FromArrayTreeError>,\n\n ) -> Result<WithTag<Self>, ()> {\n\n Self::from_array_tree(\n\n config,\n\n WithTag {\n\n data: ArrayTree::Leaf(leaf),\n\n tag\n\n },\n\n parameters,\n\n errors\n\n )\n\n }\n\n\n\n #[allow(unused_variables)]\n", "file_path": "impl/rust/lisla_type/src/types/core.rs", "rank": 41, "score": 62333.78270061022 }, { "content": " \n\n #[allow(unused_variables)]\n\n fn from_array_tree_array(\n\n config:&FromArrayTreeConfig, \n\n tree:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>,\n\n tag:Tag,\n\n parameters: Self::Parameters,\n\n errors: &mut ErrorWrite<FromArrayTreeError>,\n\n ) -> Result<WithTag<Self>, ()> {\n\n Self::from_array_tree(\n\n config,\n\n WithTag {\n\n data: ArrayTree::Array(tree),\n\n tag\n\n },\n\n parameters,\n\n errors\n\n )\n\n }\n\n\n", "file_path": "impl/rust/lisla_type/src/types/core.rs", "rank": 42, "score": 62332.17182299311 }, { "content": " errors: &mut ErrorWrite<FromArrayTreeError>,\n\n ) -> Result<WithTag<Const>, ()> {\n\n if tree.data == parameters {\n\n Result::Ok(\n\n WithTag {\n\n data: Const { value: tree.data },\n\n tag: tree.tag,\n\n }\n\n )\n\n } else {\n\n errors.push(\n\n FromArrayTreeError::from(\n\n UnmatchedConstError {\n\n range: tree.tag.content_range\n\n }\n\n )\n\n );\n\n Result::Err(())\n\n }\n\n }\n", "file_path": "impl/rust/lisla_type/src/types/core.rs", "rank": 43, "score": 62330.45528979606 }, { "content": "pub mod core;\n\npub mod lisla;\n\npub mod standard;", "file_path": "impl/rust/lisla_type/src/types/mod.rs", "rank": 44, "score": 62317.87867854025 }, { "content": "pub mod _type;", "file_path": "impl/rust/lisla_type/src/types/lisla/mod.rs", "rank": 45, "score": 61483.43872369002 }, { "content": "", "file_path": "impl/rust/lisla_type/src/types/standard/mod.rs", "rank": 46, "score": 61475.95366620962 }, { "content": "use data::position::*;\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\npub enum FromArrayTreeError {\n\n CantBeString(CantBeStringError),\n\n CantBeArray(CantBeArrayError),\n\n NotEnoughArguments(NotEnoughArgumentsError),\n\n TooManyArguments(TooManyArgumentsError),\n\n UnmatchedConst(UnmatchedConstError),\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"can't be array\"]\n\n#[code = \"7a4886d9-0f6f-4c49-b9d0-a4d7fefc21c2\"]\n\npub struct CantBeArrayError {\n\n pub range: Option<Range>,\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/from/error.rs", "rank": 47, "score": 56476.4725986736 }, { "content": "#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"can't be string\"]\n\n#[code = \"6fc7339c-4709-4dd3-ad5b-1b544a5fc4b1\"]\n\npub struct CantBeStringError {\n\n pub range: Option<Range>,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"too many arguments\"]\n\n#[code = \"9270d030-db28-44c5-b448-dd8a6292688e\"]\n\npub struct TooManyArgumentsError {\n\n pub range: Option<Range>,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"not enough arguments\"]\n\n#[code = \"82d64f56-748b-45c7-b7bb-0660356fbe27\"]\n", "file_path": "impl/rust/lisla_lang/src/from/error.rs", "rank": 48, "score": 56472.66325998173 }, { "content": "pub struct NotEnoughArgumentsError {\n\n pub range: Option<Range>,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"unmatched label\"]\n\n#[code = \"ca06a2a2-7e82-4a48-b9c7-b88030bb7c72\"]\n\npub struct UnmatchedConstError {\n\n pub range: Option<Range>,\n\n}\n", "file_path": "impl/rust/lisla_lang/src/from/error.rs", "rank": 49, "score": 56472.19459592577 }, { "content": " fn code(&self) -> ::lisla_core::error::ErrorCode {\n\n ::lisla_core::error::ErrorHolder::child_error(self).code()\n\n }\n\n\n\n fn name(&self) -> String {\n\n ::lisla_core::error::ErrorHolder::child_error(self).name()\n\n }\n\n }\n\n\n\n // 子のエラーの位置情報をそのまま伝搬する\n\n impl ::lisla_core::data::position::Position for #name {\n\n fn range(&self)->Option<&Range> {\n\n ::lisla_core::error::ErrorHolder::child_error(self).range()\n\n }\n\n fn source_map(&self)->Option<&SourceMap> {\n\n ::lisla_core::error::ErrorHolder::child_error(self).source_map()\n\n }\n\n fn file_path(&self)->Option<&FilePathFromProjectRoot> {\n\n ::lisla_core::error::ErrorHolder::child_error(self).file_path()\n\n }\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 50, "score": 56469.939609761146 }, { "content": "use quote::Tokens;\n\nuse syn::*;\n\n\n\n\n\n// lisla_core::error::Errorを実装する\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 51, "score": 56469.52722861094 }, { "content": " fn project_root(&self)->Option<&ProjectRootPath> {\n\n ::lisla_core::error::ErrorHolder::child_error(self).project_root()\n\n }\n\n }\n\n\n\n // enumのErrorは、さらに子となるエラーを持つ\n\n impl ::lisla_core::error::ErrorHolder for #name {\n\n fn child_error(&self) -> &::lisla_core::error::Error {\n\n match self {\n\n #arms\n\n }\n\n }\n\n }\n\n\n\n #from_impl\n\n }\n\n}\n\n\n\n\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 52, "score": 56469.28511127082 }, { "content": " \"project_root\" => project_root = get_option_value(ast, \"project_root\", &field.ty),\n\n _ => ()\n\n }\n\n }\n\n }\n\n \n\n quote! {\n\n impl ::lisla_core::data::position::Position for #name {\n\n fn range(&self)->Option<&::lisla_core::data::position::Range> {\n\n #range\n\n }\n\n fn source_map(&self)->Option<&::lisla_core::data::position::SourceMap> {\n\n #source_map\n\n }\n\n fn file_path(&self)->Option<&::lisla_core::data::position::FilePathFromProjectRoot> {\n\n #file_path\n\n }\n\n fn project_root(&self)->Option<&::lisla_core::data::position::ProjectRootPath> {\n\n #project_root\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "impl/rust/lisla_derive/src/error.rs", "rank": 53, "score": 56467.75487516429 }, { "content": " tag:Tag,\n\n parameters: (),\n\n errors:&mut ErrorWrite<FromArrayTreeError>\n\n ) -> bool {\n\n false\n\n }\n\n\n\n #[allow(unused_variables)]\n\n fn match_array_tree_string(\n\n config:&FromArrayTreeConfig, \n\n data:StringLeaf, \n\n tag:Tag,\n\n parameters: (),\n\n errors: &mut ErrorWrite<FromArrayTreeError>,\n\n ) -> bool {\n\n true\n\n }\n\n}\n\n\n\nimpl Leaf for StringLeaf {\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/leaf/mod.rs", "rank": 54, "score": 56212.68227664942 }, { "content": "use tag::*;\n\nuse from::error::*;\n\nuse from::*;\n\nuse ::error::*;\n\nuse std::fmt::Debug;\n\nuse tree::*;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq)]\n\npub struct StringLeaf {\n\n pub string: String\n\n}\n\n\n\nimpl StringLeaf {\n\n pub fn new (string:String) -> Self {\n\n StringLeaf { string }\n\n }\n\n}\n\n\n\nimpl From<String> for StringLeaf {\n\n fn from(string:String) -> Self {\n", "file_path": "impl/rust/lisla_lang/src/leaf/mod.rs", "rank": 55, "score": 56210.159509968886 }, { "content": " StringLeaf::new(string)\n\n }\n\n}\n\n\n\nimpl FromArrayTree for StringLeaf {\n\n type Parameters = ();\n\n\n\n #[allow(unused_variables)]\n\n fn from_array_tree_array(\n\n config:&FromArrayTreeConfig,\n\n tree:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>,\n\n tag:Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ErrorWrite<FromArrayTreeError>\n\n ) -> Result<WithTag<Self>, ()> {\n\n errors.push(\n\n FromArrayTreeError::from(\n\n CantBeArrayError {\n\n range: tag.content_range\n\n }\n", "file_path": "impl/rust/lisla_lang/src/leaf/mod.rs", "rank": 56, "score": 56202.8555886753 }, { "content": " )\n\n );\n\n Result::Err(())\n\n }\n\n\n\n #[allow(unused_variables)]\n\n fn from_array_tree_string(\n\n config:&FromArrayTreeConfig, \n\n data:StringLeaf, \n\n tag:Tag,\n\n parameters: (),\n\n errors: &mut ErrorWrite<FromArrayTreeError>,\n\n ) -> Result<WithTag<StringLeaf>, ()> {\n\n Result::Ok(WithTag{ data, tag })\n\n }\n\n\n\n #[allow(unused_variables)]\n\n fn match_array_tree_array(\n\n config:&FromArrayTreeConfig,\n\n tree:ArrayBranch<WithTag<ArrayTree<StringLeaf>>>,\n", "file_path": "impl/rust/lisla_lang/src/leaf/mod.rs", "rank": 57, "score": 56201.97783625309 }, { "content": "use data::position::*;\n\nuse tree::*;\n\nuse std::cell::RefCell;\n\nuse parse::error::ParseStringError;\n\nuse std::fmt::Debug;\n\nuse leaf::*;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct WithTag<T:Debug + Clone> {\n\n pub data: T,\n\n pub tag: Tag,\n\n}\n\n\n\nimpl<T:Debug + Clone + PartialEq> Eq for WithTag<T> {\n\n}\n\n\n\nimpl<T:Debug + Clone + PartialEq> PartialEq for WithTag<T> {\n\n fn eq(&self, other: &Self) -> bool {\n\n false //self.data == other.data\n\n }\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 58, "score": 56129.81849864343 }, { "content": "#[derive(Debug, Clone)]\n\npub struct LeafTag {\n\n pub quote: Option<Quote>,\n\n pub raw_content: String,\n\n pub is_placeholder: bool,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct ArrayTag {\n\n pub footer_space: Option<Space>,\n\n}\n\n\n\n\n\n#[derive(Debug, Clone)]\n\npub enum DocumentState {\n\n None,\n\n Parsed(Result<Box<WithTag<ArrayBranch<WithTag<ArrayTree<StringLeaf>>>>>, ParseStringError>),\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 59, "score": 56116.12379133486 }, { "content": "}\n\n\n\nimpl<T:Debug + Clone> WithTag<T> {\n\n pub fn into<U:Debug + Clone + From<T>>(self) -> WithTag<U> {\n\n WithTag {\n\n data: From::from(self.data),\n\n tag: self.tag\n\n }\n\n }\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 60, "score": 56114.95743664156 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub struct Comment {\n\n pub keeping:bool,\n\n pub is_document:bool,\n\n pub content:String,\n\n}\n\n\n\nimpl Comment {\n\n pub fn new() -> Comment {\n\n Comment {\n\n keeping: false,\n\n is_document: false,\n\n content: String::new()\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Quote {\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 61, "score": 56111.017886236325 }, { "content": " kind: tag_kind\n\n }\n\n }\n\n\n\n pub fn empty() -> Tag {\n\n Tag {\n\n leading_space: Option::None,\n\n content_range: Option::None,\n\n document: RefCell::new(DocumentState::None),\n\n kind: Option::None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum TagKind {\n\n Leaf(LeafTag),\n\n Array(ArrayTag),\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 62, "score": 56110.32323642977 }, { "content": "pub struct Space {\n\n pub range: Range,\n\n pub lines: Vec<SpaceLine>,\n\n}\n\n\n\nimpl Space {\n\n pub fn add_indent(&mut self, character:char) {\n\n if let Option::Some(mut line) = self.lines.last_mut() {\n\n line.indent.push(character);\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SpaceLine {\n\n pub range:Range,\n\n pub indent:String,\n\n pub comment:Option<Comment>,\n\n pub newline:Option<NewlineKind>,\n\n}\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 63, "score": 56109.39509937441 }, { "content": " pub kind: QuoteKind,\n\n pub count: usize,\n\n}\n\n\n\nimpl Quote {\n\n pub fn new(kind:QuoteKind) -> Self {\n\n Quote {\n\n kind,\n\n count: 1, \n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy)]\n\npub enum NewlineKind {\n\n CrLf,\n\n Lf,\n\n Cr,\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 64, "score": 56106.489353681165 }, { "content": "#[derive(Debug, Clone, Copy)]\n\npub enum QuoteKind {\n\n Single,\n\n Double,\n\n}\n\n\n\nimpl QuoteKind {\n\n pub fn character(&self) -> char {\n\n match self {\n\n &QuoteKind::Single => '\\'',\n\n &QuoteKind::Double => '\\\"',\n\n }\n\n }\n\n}\n", "file_path": "impl/rust/lisla_lang/src/tag/mod.rs", "rank": 65, "score": 56106.47643426659 }, { "content": "#![feature(attr_literals)]\n\n#![feature(use_extern_macros)]\n\n#![feature(custom_attribute)]\n\n\n\n#[macro_use]\n\nextern crate lisla_derive;\n\n\n\n#[macro_use]\n\nextern crate lisla_type_derive;\n\n\n\n#[macro_use]\n\nextern crate lisla_lang;\n\nextern crate lisla_core;\n\n\n\npub mod error;\n\npub mod types;\n", "file_path": "impl/rust/lisla_type/src/lib.rs", "rank": 66, "score": 56034.47967313024 }, { "content": "\n\nimpl Display for Range {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n write!(\n\n formatter, \n\n \"{}-{}\",\n\n self.start(),\n\n self.end(),\n\n )?;\n\n \n\n Result::Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SourceMap {\n\n pub ranges: Vec<SourceRange>,\n\n}\n\n\n\nimpl SourceMap {\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 67, "score": 55905.57818341332 }, { "content": " } else {\n\n 0\n\n };\n\n self.ranges.push(SourceRange{ inner_start, range });\n\n }\n\n}\n\n\n\nimpl Display for SourceMap {\n\n fn fmt(&self, formatter: &mut fmt::Formatter) -> Result<(), fmt::Error> {\n\n for range in &self.ranges {\n\n range.range.fmt(formatter)?;\n\n }\n\n Result::Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct SourceRange { \n\n pub inner_start: usize,\n\n pub range: Range,\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 68, "score": 55904.00811816558 }, { "content": "\n\n#[derive(Debug, Clone, Ord, Eq, PartialEq, PartialOrd)]\n\npub struct Range {\n\n start:usize,\n\n length:usize,\n\n}\n\n\n\nimpl Range {\n\n pub fn start(&self)->usize { self.start }\n\n pub fn len(&self)->usize { self.length }\n\n pub fn end(&self)->usize { self.start + self.length }\n\n\n\n pub fn with_end(start:usize, end:usize) -> Range {\n\n Range { start, length: end - start }\n\n }\n\n \n\n pub fn with_length(start:usize, length:usize) -> Range {\n\n Range { start, length }\n\n }\n\n}\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 69, "score": 55902.6800673427 }, { "content": "} \n\n\n\nimpl SourceRange {\n\n pub fn inner_end(&self) -> usize {\n\n self.inner_start + self.range.len()\n\n }\n\n}\n\n\n\n//#[derive(Debug, Newtype)]\n\npub struct FilePathFromProjectRoot {\n\n pub value: String\n\n}\n\n\n\n//#[derive(Debug, Newtype)]\n\npub struct ProjectRootPath {\n\n pub value: String\n\n}\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 70, "score": 55897.47432051689 }, { "content": " pub fn new() -> SourceMap {\n\n SourceMap { ranges:Vec::new() }\n\n }\n\n\n\n pub fn local(&self, local_range:&Range) -> SourceMap {\n\n let mut result = SourceMap::new();\n\n\n\n let local_start = local_range.start();\n\n let start_index = match self.ranges.binary_search_by(|range| range.inner_start.cmp(&local_start)) {\n\n Result::Ok(index) => index,\n\n Result::Err(0) => 0,\n\n Result::Err(index) => index - 1,\n\n };\n\n\n\n let local_end = local_range.end();\n\n\n\n for index in start_index..self.ranges.len() {\n\n let range = &self.ranges[index];\n\n \n\n if range.range.end() < local_end {\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 71, "score": 55895.415173511465 }, { "content": " result.add_range(\n\n range.range.start + local_start,\n\n range.range.end(),\n\n );\n\n } else {\n\n result.add_range(\n\n range.range.start + local_start, \n\n range.range.start + local_end,\n\n );\n\n break;\n\n }\n\n }\n\n\n\n result\n\n }\n\n\n\n pub fn add_range(&mut self, start:usize, end:usize) {\n\n let range = Range::with_end(start, end);\n\n let inner_start = if let Option::Some(ref last_range) = self.ranges.last() {\n\n last_range.inner_end()\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 72, "score": 55895.143164295725 }, { "content": "pub mod position;\n\npub mod newtype;\n", "file_path": "impl/rust/lisla_core/src/data/mod.rs", "rank": 73, "score": 55894.626267235835 }, { "content": "use std::fmt::{self, Display};\n\n\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 74, "score": 55894.37745314656 }, { "content": "use std::fmt::Debug;\n\n\n", "file_path": "impl/rust/lisla_core/src/data/newtype.rs", "rank": 75, "score": 55894.37745314656 }, { "content": " \"**\".to_string(),\n\n },\n\n match (self.source_map(), self.range()) {\n\n (Option::Some(source_map), Option::Some(range)) =>\n\n format!(\"{}\", source_map.local(&range)),\n\n \n\n (Option::None, Option::Some(range)) =>\n\n format!(\"{}\", range),\n\n \n\n (Option::Some(source_map), Option::None) =>\n\n format!(\"{}\", source_map),\n\n \n\n (Option::None, Option::None) =>\n\n \"\".to_string(),\n\n }\n\n );\n\n\n\n result\n\n }\n\n}\n", "file_path": "impl/rust/lisla_core/src/data/position.rs", "rank": 76, "score": 55890.594334062436 }, { "content": "use template::error::PlaceholderCompleteError;\n\nuse data::position::*;\n\nuse tag::*;\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\npub enum ParseStringError {\n\n Parse(ParseError),\n\n PlaceholderComplete(PlaceholderCompleteError),\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\npub enum ParseError {\n\n NotUtf8(NotUtf8Error),\n\n Io(IoError),\n\n \n\n UnclosedQuote(UnclosedQuoteError),\n\n UnclosedArray(UnclosedArrayError),\n\n \n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 77, "score": 55444.1634566348 }, { "content": " \n\n fn push(&mut self, data:Error) {\n\n self.errors.push(data);\n\n }\n\n}\n\n\n\npub struct ErrorsWrapper<'a, Error> \n\n where Error: 'a\n\n{\n\n pub errors: &'a mut ErrorWrite<Error>,\n\n}\n\n\n\nimpl<'a, ErrorA, ErrorB> ErrorWrite<ErrorA> for ErrorsWrapper<'a, ErrorB> where \n\n ErrorB: From<ErrorA>\n\n{\n\n fn len(&self) -> usize {\n\n self.errors.len()\n\n }\n\n \n\n fn push(&mut self, data:ErrorA) {\n\n self.errors.push(ErrorB::from(data));\n\n }\n\n} \n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 78, "score": 55443.17917730574 }, { "content": "use super::*;\n\nuse data::position::Range;\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message(\"failed to bind variables to placeholder\")]\n\n#[code = \"2abc0deb-9fbb-4b11-9bcc-14a2a48105a9\"]\n\npub struct PlaceholderCompleteError {\n\n pub placeholder: Placeholder,\n\n pub range:Option<Range>,\n\n}\n", "file_path": "impl/rust/lisla_lang/src/template/error.rs", "rank": 79, "score": 55441.56123123756 }, { "content": " pub error: ::std::io::Error,\n\n}\n\n\n\nimpl Clone for IoErrorReason {\n\n fn clone(&self) -> IoErrorReason {\n\n IoErrorReason {\n\n error: ::std::io::Error::new(\n\n self.error.kind(),\n\n ::std::error::Error::description(&self.error)\n\n )\n\n }\n\n }\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"non utf8 character found\"]\n\n#[code = \"a2c38664-be73-47da-88f1-87107ae9f862\"]\n\npub struct NotUtf8Error {\n\n pub range: Range,\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 80, "score": 55439.35752711128 }, { "content": "#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"unclosed array found\"]\n\n#[code = \"c4d2ec90-3b8f-4a93-b4c8-bccd2b3ebb40\"]\n\npub struct UnclosedArrayError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"invalid escape sequence\"]\n\n#[code = \"38d42150-1047-428b-b68c-688c0c1b9e1b\"]\n\npub struct InvalidEscapeError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"blacklisted whitespace `\\\\u{{}}` is used\"]\n\n#[code = \"6f7adbd9-9b1c-4fda-a11a-eddc49d1bd19\"]\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 81, "score": 55438.81051058277 }, { "content": "\n\n pub fn ok(data:Data) -> Self {\n\n ResumableResult::new(Option::Some(data), Errors::new())\n\n }\n\n\n\n pub fn error(errors:Errors<Error>) -> Self {\n\n ResumableResult::new(Option::None, errors)\n\n }\n\n\n\n pub fn error_with_data(errors:Errors<Error>, data:Data) -> Self {\n\n ResumableResult::new(Option::Some(data), errors)\n\n }\n\n\n\n pub fn to_result(self) -> Result<Data, Errors<Error>> {\n\n match self.data {\n\n Option::Some(data) => {\n\n if self.errors.len() == 0 {\n\n Result::Ok(data)\n\n } else {\n\n Result::Err(self.errors)\n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 82, "score": 55438.04042790208 }, { "content": "use std::fmt::{self, Display, Debug};\n\nuse std::clone::Clone;\n\nuse data::position::Position;\n\n\n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 83, "score": 55437.35446507102 }, { "content": "\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"indent must be same\"]\n\n#[code = \"d4965a3b-4819-44ef-9657-c1188bd4bcae\"]\n\npub struct UnmatchedIndentError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"io error\"]\n\n#[code = \"ac0d0db7-751d-42a7-810e-fa2442b93287\"]\n\npub struct IoError {\n\n pub range: Range,\n\n pub reason: IoErrorReason, \n\n}\n\n\n\n#[derive(Debug)]\n\npub struct IoErrorReason {\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 84, "score": 55436.985965863365 }, { "content": "}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"invalid unicode\"]\n\n#[code = \"fc4db99b-0cb1-4674-8d11-9c961e9f15d3\"]\n\npub struct InvalidUnicodeError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"digit of unicode must be 1-6\"]\n\n#[code = \"dfe778ce-5f90-46f9-8bda-093c644976c8\"]\n\npub struct InvalidUnicodeDigitError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 85, "score": 55436.91214851795 }, { "content": "pub struct BlacklistedWhiteSpaceError {\n\n pub range: Range,\n\n pub character: char,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"too many closing brackets\"]\n\n#[code = \"3055cdc2-475e-4d4e-ac68-a586ede479d5\"]\n\npub struct TooManyClosingBracketsError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"too long closing quote\"]\n\n#[code = \"7afbcea2-0b6e-4abf-a945-8c1468f99ff7\"]\n\npub struct TooLongClosingQuoteError {\n\n pub range: Range,\n\n}\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 86, "score": 55436.57738452336 }, { "content": "#[message = \"invalid placeholder position\"]\n\n#[code = \"9fae0ca2-1489-4cd6-aaef-5c9a0f6b380d\"]\n\npub struct InvalidPlaceholderPositionError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"empty placeholder\"]\n\n#[code = \"25577736-d376-4444-8218-1aca1efadd60\"]\n\npub struct EmptyPlaceholderError {\n\n pub range: Range,\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"separater required\"]\n\n#[code = \"e333e39e-e455-44e2-9f36-3b3a6cea0af5\"]\n\npub struct SeparaterRequiredError {\n\n pub range: Range,\n\n}\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 87, "score": 55436.47343835026 }, { "content": " }\n\n }\n\n Option::None => Result::Err(self.errors),\n\n }\n\n }\n\n\n\n pub fn unwrap(self) -> Data {\n\n self.to_result().unwrap()\n\n }\n\n \n\n pub fn unwrap_err(self) -> Errors<Error> {\n\n self.to_result().unwrap_err()\n\n }\n\n}\n\n\n\n\n", "file_path": "impl/rust/lisla_core/src/error/mod.rs", "rank": 88, "score": 55436.324347317066 }, { "content": " BlacklistedWhiteSpace(BlacklistedWhiteSpaceError),\n\n TooManyClosingBrackets(TooManyClosingBracketsError),\n\n TooLongClosingQuote(TooLongClosingQuoteError),\n\n UnmatchedIndent(UnmatchedIndentError),\n\n \n\n InvalidPlaceholderPosition(InvalidPlaceholderPositionError),\n\n EmptyPlaceholder(EmptyPlaceholderError),\n\n\n\n SeparaterRequired(SeparaterRequiredError),\n\n}\n\n\n\n#[derive(Error)]\n\n#[derive(Debug, Clone)]\n\n#[message = \"unclosed quote found\"]\n\n#[code = \"7892a647-f507-4988-ae32-dc7380113fcd\"]\n\npub struct UnclosedQuoteError {\n\n pub range: Range,\n\n pub quote: Quote,\n\n}\n\n\n", "file_path": "impl/rust/lisla_lang/src/parse/error.rs", "rank": 89, "score": 55434.98685246699 }, { "content": " parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> bool {\n\n true\n\n }\n\n\n\n fn match_array_tree_string(\n\n config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n leaf: ::lisla_lang::leaf::StringLeaf,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> bool {\n\n false\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 90, "score": 55017.358267625554 }, { "content": " errors,\n\n )?;\n\n Result::Ok(\n\n ::lisla_lang::tag::WithTag {\n\n data: #name {\n\n value: with_tag.data\n\n },\n\n tag: with_tag.tag,\n\n }\n\n )\n\n }\n\n\n\n fn match_array_tree_array(\n\n config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n mut array: ::lisla_lang::tree::ArrayBranch<::lisla_lang::tag::WithTag<::lisla_lang::tree::ArrayTree<::lisla_lang::leaf::StringLeaf>>>,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> bool {\n\n <#value_type as ::lisla_lang::from::FromArrayTree>::match_array_tree_array(\n", "file_path": "impl/rust/lisla_type_derive/src/newtype.rs", "rank": 91, "score": 55016.681388215184 }, { "content": " config,\n\n array,\n\n tag,\n\n parameters,\n\n errors,\n\n )\n\n }\n\n\n\n fn match_array_tree_string(\n\n config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n leaf: ::lisla_lang::leaf::StringLeaf,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> bool {\n\n <#value_type as ::lisla_lang::from::FromArrayTree>::match_array_tree_string(\n\n config,\n\n leaf,\n\n tag,\n\n parameters,\n\n errors,\n\n )\n\n }\n\n }\n\n }\n\n}\n", "file_path": "impl/rust/lisla_type_derive/src/newtype.rs", "rank": 92, "score": 55016.07809492681 }, { "content": " data: #name {\n\n value: with_tag.data\n\n },\n\n tag: with_tag.tag,\n\n }\n\n )\n\n }\n\n\n\n fn from_array_tree_string(\n\n config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n leaf: ::lisla_lang::leaf::StringLeaf,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> Result<::lisla_lang::tag::WithTag<Self>, ()> {\n\n let with_tag = ::lisla_lang::from::FromArrayTree::from_array_tree_string(\n\n config,\n\n leaf,\n\n tag,\n\n parameters,\n", "file_path": "impl/rust/lisla_type_derive/src/newtype.rs", "rank": 93, "score": 55014.16837373509 }, { "content": " config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n mut array: ::lisla_lang::tree::ArrayBranch<::lisla_lang::tag::WithTag<::lisla_lang::tree::ArrayTree<::lisla_lang::leaf::StringLeaf>>>,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ErrorWrite<FromArrayTreeError>\n\n ) -> Result<::lisla_lang::tag::WithTag<Self>, ()> {\n\n let data = #name {\n\n #parameters\n\n };\n\n array.finish(config, &tag, errors)?;\n\n Result::Ok(\n\n ::lisla_lang::tag::WithTag {\n\n data,\n\n tag,\n\n } \n\n )\n\n }\n\n\n\n #[allow(unused_variables)]\n\n fn from_array_tree_string(\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 94, "score": 55013.46970992463 }, { "content": " quote! {\n\n impl ::lisla_lang::from::FromArrayTree for #name {\n\n type Parameters = ();\n\n\n\n fn from_array_tree_array(\n\n config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n mut array: ::lisla_lang::tree::ArrayBranch<::lisla_lang::tag::WithTag<::lisla_lang::tree::ArrayTree<::lisla_lang::leaf::StringLeaf>>>,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> Result<::lisla_lang::tag::WithTag<Self>, ()> {\n\n let with_tag = ::lisla_lang::from::FromArrayTree::from_array_tree_array(\n\n config,\n\n array,\n\n tag,\n\n parameters,\n\n errors,\n\n )?;\n\n Result::Ok(\n\n ::lisla_lang::tag::WithTag {\n", "file_path": "impl/rust/lisla_type_derive/src/newtype.rs", "rank": 95, "score": 55012.47602560462 }, { "content": " data,\n\n tag.clone(),\n\n (#children),\n\n errors\n\n )?\n\n }\n\n }\n\n )\n\n }\n\n }\n\n }\n\n }\n\n } else {\n\n panic!(\"#[derive(LislaTuple)] is only defined for struct, not for enum or tuple.\");\n\n }\n\n quote! {\n\n #[allow(unused_variables)]\n\n impl ::lisla_lang::from::FromArrayTree for #name {\n\n type Parameters = ();\n\n fn from_array_tree_array(\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 96, "score": 55011.094983430194 }, { "content": " config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n leaf: ::lisla_lang::leaf::StringLeaf,\n\n tag: ::lisla_lang::tag::Tag,\n\n parameters: Self::Parameters,\n\n errors:&mut ::lisla_lang::error::ErrorWrite<::lisla_lang::from::error::FromArrayTreeError>\n\n ) -> Result<WithTag<Self>, ()> {\n\n errors.push(\n\n FromArrayTreeError::from(\n\n CantBeStringError {\n\n range: tag.content_range\n\n }\n\n )\n\n );\n\n Result::Err(())\n\n }\n\n\n\n fn match_array_tree_array(\n\n config:& ::lisla_lang::from::FromArrayTreeConfig,\n\n mut array: ::lisla_lang::tree::ArrayBranch<::lisla_lang::tag::WithTag<::lisla_lang::tree::ArrayTree<::lisla_lang::leaf::StringLeaf>>>,\n\n tag: ::lisla_lang::tag::Tag,\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 97, "score": 55010.90173388511 }, { "content": " config,\n\n array.shift(config, &tag, errors)?,\n\n (#children),\n\n errors\n\n )?,\n\n }\n\n )\n\n }\n\n ShiftKind::Spreads(lit) => {\n\n let token = if let Option::Some(_lit) = lit {\n\n quote!{ array.split_off(config, #_lit, &tag, errors)? }\n\n } else {\n\n quote!{ array.split_off_rest(config, &tag, errors)? }\n\n };\n\n parameters.append(\n\n quote! {\n\n #ident : {\n\n let data = #token;\n\n ::lisla_lang::from::FromArrayTree::from_array_tree_array(\n\n config,\n", "file_path": "impl/rust/lisla_type_derive/src/_tuple.rs", "rank": 98, "score": 55006.11966144215 }, { "content": "#![feature(attr_literals)]\n\n#![recursion_limit=\"256\"]\n\n\n\nextern crate proc_macro;\n\nextern crate syn;\n\n\n\n#[macro_use]\n\nextern crate quote;\n\n\n\nuse proc_macro::TokenStream;\n\n\n\nmod structure;\n\nmod newtype;\n\nmod _tuple;\n\nmod union;\n\n\n\nuse structure::impl_structure;\n\nuse newtype::impl_newtype;\n\nuse _tuple::impl_tuple;\n\nuse union::impl_union;\n\n\n\n#[proc_macro_derive(LislaStruct, attributes(lisla))]\n", "file_path": "impl/rust/lisla_type_derive/src/lib.rs", "rank": 99, "score": 55002.28275320637 } ]
Rust
sulis_state/src/animation/ranged_attack_animation.rs
ThyWoof/sulis
e89eda94a1a72228224e1926d307aa4c9228bdcb
use std::cell::RefCell; use std::rc::Rc; use crate::{animation::Anim, entity_attack_handler::weapon_attack, AreaFeedbackText}; use crate::{script::ScriptEntitySet, EntityState, GameState, ScriptCallback}; use sulis_core::image::Image; use sulis_core::io::{DrawList, GraphicsRenderer}; use sulis_core::ui::animation_state; use sulis_core::util::{Offset, Rect, Scale}; pub(in crate::animation) fn update( attacker: &Rc<RefCell<EntityState>>, model: &mut RangedAttackAnimModel, frac: f32, ) { if frac > 1.0 { if !model.has_attacked { let cb_def_targets = ScriptEntitySet::new(&model.defender, &[Some(Rc::clone(attacker))]); let cb_att_targets = ScriptEntitySet::new(attacker, &[Some(Rc::clone(&model.defender))]); for cb in model.callbacks.iter() { cb.before_attack(&cb_def_targets); } let area_state = GameState::area_state(); let (defender_cbs, attacker_cbs) = { let mgr = GameState::turn_manager(); let mgr = mgr.borrow(); ( model.defender.borrow().callbacks(&mgr), attacker.borrow().callbacks(&mgr), ) }; attacker_cbs .iter() .for_each(|cb| cb.before_attack(&cb_att_targets)); defender_cbs .iter() .for_each(|cb| cb.before_defense(&cb_def_targets)); model.has_attacked = true; let result = weapon_attack(attacker, &model.defender); for entry in result { let (hit_kind, hit_flags, damage) = entry; let feedback = AreaFeedbackText::with_damage( &model.defender.borrow(), &area_state.borrow(), hit_kind, hit_flags, &damage, ); area_state.borrow_mut().add_feedback_text(feedback); for cb in model.callbacks.iter() { cb.after_attack(&cb_def_targets, hit_kind, damage.clone()); } attacker_cbs .iter() .for_each(|cb| cb.after_attack(&cb_att_targets, hit_kind, damage.clone())); defender_cbs .iter() .for_each(|cb| cb.after_defense(&cb_def_targets, hit_kind, damage.clone())); } } } else { model.cur_pos = ( frac * model.vec.0 + model.start_pos.0, frac * model.vec.1 + model.start_pos.1, ); } } pub(in crate::animation) fn cleanup(owner: &Rc<RefCell<EntityState>>) { if !GameState::is_combat_active() { let area_state = GameState::get_area_state(&owner.borrow().location.area_id).unwrap(); let mgr = GameState::turn_manager(); mgr.borrow_mut() .check_ai_activation(owner, &mut area_state.borrow_mut()); } } pub(in crate::animation) fn draw( model: &RangedAttackAnimModel, renderer: &mut dyn GraphicsRenderer, offset: Offset, scale: Scale, millis: u32, ) { if let Some(ref projectile) = model.projectile { let rect = Rect { x: model.cur_pos.0 + offset.x, y: model.cur_pos.1 + offset.y, w: projectile.get_width_f32(), h: projectile.get_height_f32(), }; let mut draw_list = DrawList::empty_sprite(); projectile.append_to_draw_list(&mut draw_list, &animation_state::NORMAL, rect, millis); draw_list.set_scale(scale); draw_list.rotate(model.angle); renderer.draw(draw_list); } } pub fn new( attacker: &Rc<RefCell<EntityState>>, defender: &Rc<RefCell<EntityState>>, callbacks: Vec<Box<dyn ScriptCallback>>, duration_millis: u32, ) -> Anim { let mut start_pos = ( (attacker.borrow().location.x + attacker.borrow().size.width / 2) as f32, (attacker.borrow().location.y + attacker.borrow().size.height / 2) as f32, ); let x = (defender.borrow().location.x + defender.borrow().size.width / 2) as f32 - start_pos.0; let y = (defender.borrow().location.y + defender.borrow().size.height / 2) as f32 - start_pos.1; let dist = (x * x + y * y).sqrt(); let projectile = attacker.borrow().actor.stats.get_ranged_projectile(); if let Some(ref projectile) = projectile { start_pos.0 -= projectile.get_width_f32() / 2.0; start_pos.1 -= projectile.get_height_f32() / 2.0; } let angle = y.atan2(x); let model = RangedAttackAnimModel { defender: Rc::clone(defender), angle, vec: (x, y), start_pos, cur_pos: (0.0, 0.0), has_attacked: false, projectile, callbacks, }; let millis = (duration_millis as f32 * dist) as u32; Anim::new_ranged_attack(attacker, millis, model) } pub(in crate::animation) struct RangedAttackAnimModel { defender: Rc<RefCell<EntityState>>, angle: f32, vec: (f32, f32), start_pos: (f32, f32), cur_pos: (f32, f32), pub(in crate::animation) has_attacked: bool, projectile: Option<Rc<dyn Image>>, callbacks: Vec<Box<dyn ScriptCallback>>, }
use std::cell::RefCell; use std::rc::Rc; use crate::{animation::Anim, entity_attack_handler::weapon_attack, AreaFeedbackText}; use crate::{script::ScriptEntitySet, EntityState, GameState, ScriptCallback}; use sulis_core::image::Image; use sulis_core::io::{DrawList, GraphicsRenderer}; use sulis_core::ui::animation_state; use sulis_core::util::{Offset, Rect, Scale}; pub(in crate::animation) fn update( attacker: &Rc<RefCell<EntityState>>, model: &mut RangedAttackAnimModel, frac: f32, ) { if frac > 1.0 { if !model.has_attacked { let cb_def_targets = ScriptEntitySet::new(&model.defender, &[Some(Rc::clone(attacker))]); let cb_att_targets = ScriptEntitySet::new(attacker, &[Some(Rc::clone(&model.defender))]); for cb in model.callbacks.iter() { cb.before_attack(&cb_def_targets); } let area_state = GameState::area_state(); let (defender_cbs, attacker_cbs) = { let mgr = GameState::turn_manager(); let mgr = mgr.borrow(); ( model.defender.borrow().callbacks(&mgr), attacker.borrow().callbacks(&mgr), ) }; attacker_cbs .iter() .for_each(|cb| cb.before_attack(&cb_att_targets)); defender_cbs .iter() .for_each(|cb| cb.before_defense(&cb_def_targets)); model.has_attacked = true; let result = weapon_attack(attacker, &model.defender); for entry in result { let (hit_kind, hit_flags, damage) = entry; let feedback = AreaFeedbackText::with_damage( &model.defender.borrow(), &area_state.borrow(), hit_kind, hit_flags, &damage, ); area_state.borrow_mut().add_feedback_text(feedback); for cb in model.callbacks.iter() { cb.after_attack(&cb_def_targets, hit_kind, damage.clone()); } attacker_cbs .iter() .for_each(|cb| cb.after_attack(&cb_att_targets, hit_kind, damage.clone())); defender_cbs .iter() .for_each(|cb| cb.after_defense(&cb_def_targets, hit_kind, damage.clone())); } } } else { model.cur_pos = ( frac * model.vec.0 + model.start_pos.0, frac * model.vec.1 + model.start_pos.1, ); } } pub(in crate::animation) fn cleanup(owner: &Rc<RefCell<EntityState>>) {
} pub(in crate::animation) fn draw( model: &RangedAttackAnimModel, renderer: &mut dyn GraphicsRenderer, offset: Offset, scale: Scale, millis: u32, ) { if let Some(ref projectile) = model.projectile { let rect = Rect { x: model.cur_pos.0 + offset.x, y: model.cur_pos.1 + offset.y, w: projectile.get_width_f32(), h: projectile.get_height_f32(), }; let mut draw_list = DrawList::empty_sprite(); projectile.append_to_draw_list(&mut draw_list, &animation_state::NORMAL, rect, millis); draw_list.set_scale(scale); draw_list.rotate(model.angle); renderer.draw(draw_list); } } pub fn new( attacker: &Rc<RefCell<EntityState>>, defender: &Rc<RefCell<EntityState>>, callbacks: Vec<Box<dyn ScriptCallback>>, duration_millis: u32, ) -> Anim { let mut start_pos = ( (attacker.borrow().location.x + attacker.borrow().size.width / 2) as f32, (attacker.borrow().location.y + attacker.borrow().size.height / 2) as f32, ); let x = (defender.borrow().location.x + defender.borrow().size.width / 2) as f32 - start_pos.0; let y = (defender.borrow().location.y + defender.borrow().size.height / 2) as f32 - start_pos.1; let dist = (x * x + y * y).sqrt(); let projectile = attacker.borrow().actor.stats.get_ranged_projectile(); if let Some(ref projectile) = projectile { start_pos.0 -= projectile.get_width_f32() / 2.0; start_pos.1 -= projectile.get_height_f32() / 2.0; } let angle = y.atan2(x); let model = RangedAttackAnimModel { defender: Rc::clone(defender), angle, vec: (x, y), start_pos, cur_pos: (0.0, 0.0), has_attacked: false, projectile, callbacks, }; let millis = (duration_millis as f32 * dist) as u32; Anim::new_ranged_attack(attacker, millis, model) } pub(in crate::animation) struct RangedAttackAnimModel { defender: Rc<RefCell<EntityState>>, angle: f32, vec: (f32, f32), start_pos: (f32, f32), cur_pos: (f32, f32), pub(in crate::animation) has_attacked: bool, projectile: Option<Rc<dyn Image>>, callbacks: Vec<Box<dyn ScriptCallback>>, }
if !GameState::is_combat_active() { let area_state = GameState::get_area_state(&owner.borrow().location.area_id).unwrap(); let mgr = GameState::turn_manager(); mgr.borrow_mut() .check_ai_activation(owner, &mut area_state.borrow_mut()); }
if_condition
[]
Rust
rs/crypto/internal/crypto_service_provider/src/server/local_csp_server/basic_sig/tests.rs
LaudateCorpus1/ic
e05b8568bcef4147b5999d2c489f875afde2f8bb
use crate::api::CspSigner; use crate::imported_test_utils::ed25519::csp_testvec; use crate::secret_key_store::test_utils::TempSecretKeyStore; use crate::secret_key_store::SecretKeyStore; use crate::server::api::{ BasicSignatureCspServer, CspBasicSignatureError, CspBasicSignatureKeygenError, }; use crate::server::local_csp_server::LocalCspServer; use crate::Csp; use ic_crypto_internal_test_vectors::ed25519::Ed25519TestVector::RFC8032_ED25519_SHA_ABC; use ic_types::crypto::{AlgorithmId, KeyId}; use ic_types::NumberOfNodes; use rand::{thread_rng, Rng, SeedableRng}; use rand_chacha::ChaChaRng; use strum::IntoEnumIterator; #[test] fn should_generate_key_ok() { let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(thread_rng().gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; assert!(csp_server.gen_key_pair(AlgorithmId::Ed25519).is_ok()); } #[test] fn should_fail_to_generate_key_for_wrong_algorithm_id() { let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(thread_rng().gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; for algorithm_id in AlgorithmId::iter() { if algorithm_id != AlgorithmId::Ed25519 { assert_eq!( csp_server.gen_key_pair(algorithm_id).unwrap_err(), CspBasicSignatureKeygenError::UnsupportedAlgorithm { algorithm: algorithm_id, } ); } } } #[test] fn should_correctly_sign_compared_to_testvec() { let mut rng = thread_rng(); let key_id = rng.gen::<[u8; 32]>(); let (sk, _pk, msg, sig) = csp_testvec(RFC8032_ED25519_SHA_ABC); let csp_server = { let mut key_store = TempSecretKeyStore::new(); key_store .insert(KeyId::from(key_id), sk, None) .expect("failed to insert key into SKS"); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; assert_eq!( csp_server .sign(AlgorithmId::Ed25519, &msg, KeyId::from(key_id)) .expect("failed to create signature"), sig ); } #[test] fn should_sign_ok_with_generated_key() { let mut rng = thread_rng(); let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let (key_id, _csp_pub_key) = csp_server .gen_key_pair(AlgorithmId::Ed25519) .expect("failed to generate keys"); let msg_len: usize = rng.gen_range(0, 1024); let msg: Vec<u8> = (0..msg_len).map(|_| rng.gen::<u8>()).collect(); assert!(csp_server.sign(AlgorithmId::Ed25519, &msg, key_id).is_ok()); } #[test] fn should_sign_verifiably_with_generated_key() { let mut rng = thread_rng(); let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let (key_id, csp_pub_key) = csp_server .gen_key_pair(AlgorithmId::Ed25519) .expect("failed to generate keys"); let msg_len: usize = rng.gen_range(0, 1024); let msg: Vec<u8> = (0..msg_len).map(|_| rng.gen::<u8>()).collect(); let sig = csp_server .sign(AlgorithmId::Ed25519, &msg, key_id) .expect("failed to generate signature"); let verifier = { let dummy_key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); Csp::of(csprng, dummy_key_store) }; assert!(verifier .verify(&sig, &msg, AlgorithmId::Ed25519, csp_pub_key) .is_ok()); } #[test] fn should_fail_to_sign_with_unsupported_algorithm_id() { let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(thread_rng().gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let (key_id, _csp_pub_key) = csp_server .gen_key_pair(AlgorithmId::Ed25519) .expect("failed to generate keys"); let msg = [31; 41]; for algorithm_id in AlgorithmId::iter() { if algorithm_id != AlgorithmId::Ed25519 { assert_eq!( csp_server.sign(algorithm_id, &msg, key_id).unwrap_err(), CspBasicSignatureError::UnsupportedAlgorithm { algorithm: algorithm_id, } ); } } } #[test] fn should_fail_to_sign_if_secret_key_in_store_has_wrong_type() { use crate::server::api::ThresholdSignatureCspServer; let mut rng = thread_rng(); let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let threshold = NumberOfNodes::from(1); let (_pub_coeffs, key_ids) = csp_server .threshold_keygen_for_test(AlgorithmId::ThresBls12_381, threshold, &[true]) .expect("failed to generate threshold sig keys"); let key_id = key_ids[0].expect("threshold sig key not generated"); let msg_len: usize = rng.gen_range(0, 1024); let msg: Vec<u8> = (0..msg_len).map(|_| rng.gen::<u8>()).collect(); let result = csp_server.sign(AlgorithmId::Ed25519, &msg, key_id); assert_eq!( result.unwrap_err(), CspBasicSignatureError::WrongSecretKeyType { algorithm: AlgorithmId::ThresBls12_381 } ); }
use crate::api::CspSigner; use crate::imported_test_utils::ed25519::csp_testvec; use crate::secret_key_store::test_utils::TempSecretKeyStore; use crate::secret_key_store::SecretKeyStore; use crate::server::api::{ BasicSignatureCspServer, CspBasicSignatureError, CspBasicSignatureKeygenError, }; use crate::server::local_csp_server::LocalCspServer; use crate::Csp; use ic_crypto_internal_test_vectors::ed25519::Ed25519TestVector::RFC8032_ED25519_SHA_ABC; use ic_types::crypto::{AlgorithmId, KeyId}; use ic_types::NumberOfNodes; use rand::{thread_rng, Rng, SeedableRng}; use rand_chacha::ChaChaRng; use strum::IntoEnumIterator; #[test]
#[test] fn should_fail_to_generate_key_for_wrong_algorithm_id() { let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(thread_rng().gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; for algorithm_id in AlgorithmId::iter() { if algorithm_id != AlgorithmId::Ed25519 { assert_eq!( csp_server.gen_key_pair(algorithm_id).unwrap_err(), CspBasicSignatureKeygenError::UnsupportedAlgorithm { algorithm: algorithm_id, } ); } } } #[test] fn should_correctly_sign_compared_to_testvec() { let mut rng = thread_rng(); let key_id = rng.gen::<[u8; 32]>(); let (sk, _pk, msg, sig) = csp_testvec(RFC8032_ED25519_SHA_ABC); let csp_server = { let mut key_store = TempSecretKeyStore::new(); key_store .insert(KeyId::from(key_id), sk, None) .expect("failed to insert key into SKS"); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; assert_eq!( csp_server .sign(AlgorithmId::Ed25519, &msg, KeyId::from(key_id)) .expect("failed to create signature"), sig ); } #[test] fn should_sign_ok_with_generated_key() { let mut rng = thread_rng(); let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let (key_id, _csp_pub_key) = csp_server .gen_key_pair(AlgorithmId::Ed25519) .expect("failed to generate keys"); let msg_len: usize = rng.gen_range(0, 1024); let msg: Vec<u8> = (0..msg_len).map(|_| rng.gen::<u8>()).collect(); assert!(csp_server.sign(AlgorithmId::Ed25519, &msg, key_id).is_ok()); } #[test] fn should_sign_verifiably_with_generated_key() { let mut rng = thread_rng(); let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let (key_id, csp_pub_key) = csp_server .gen_key_pair(AlgorithmId::Ed25519) .expect("failed to generate keys"); let msg_len: usize = rng.gen_range(0, 1024); let msg: Vec<u8> = (0..msg_len).map(|_| rng.gen::<u8>()).collect(); let sig = csp_server .sign(AlgorithmId::Ed25519, &msg, key_id) .expect("failed to generate signature"); let verifier = { let dummy_key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); Csp::of(csprng, dummy_key_store) }; assert!(verifier .verify(&sig, &msg, AlgorithmId::Ed25519, csp_pub_key) .is_ok()); } #[test] fn should_fail_to_sign_with_unsupported_algorithm_id() { let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(thread_rng().gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let (key_id, _csp_pub_key) = csp_server .gen_key_pair(AlgorithmId::Ed25519) .expect("failed to generate keys"); let msg = [31; 41]; for algorithm_id in AlgorithmId::iter() { if algorithm_id != AlgorithmId::Ed25519 { assert_eq!( csp_server.sign(algorithm_id, &msg, key_id).unwrap_err(), CspBasicSignatureError::UnsupportedAlgorithm { algorithm: algorithm_id, } ); } } } #[test] fn should_fail_to_sign_if_secret_key_in_store_has_wrong_type() { use crate::server::api::ThresholdSignatureCspServer; let mut rng = thread_rng(); let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(rng.gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; let threshold = NumberOfNodes::from(1); let (_pub_coeffs, key_ids) = csp_server .threshold_keygen_for_test(AlgorithmId::ThresBls12_381, threshold, &[true]) .expect("failed to generate threshold sig keys"); let key_id = key_ids[0].expect("threshold sig key not generated"); let msg_len: usize = rng.gen_range(0, 1024); let msg: Vec<u8> = (0..msg_len).map(|_| rng.gen::<u8>()).collect(); let result = csp_server.sign(AlgorithmId::Ed25519, &msg, key_id); assert_eq!( result.unwrap_err(), CspBasicSignatureError::WrongSecretKeyType { algorithm: AlgorithmId::ThresBls12_381 } ); }
fn should_generate_key_ok() { let csp_server = { let key_store = TempSecretKeyStore::new(); let csprng = ChaChaRng::from_seed(thread_rng().gen::<[u8; 32]>()); LocalCspServer::new_for_test(csprng, key_store) }; assert!(csp_server.gen_key_pair(AlgorithmId::Ed25519).is_ok()); }
function_block-full_function
[ { "content": "/// Generate a random `IDkgId`.\n\n///\n\n/// Note: There is a proptest strategy for `IDkgId` which is useful in many\n\n/// circumstances but cumbersome in others. Please use the appropriate method\n\n/// for each circumstance.\n\npub fn random_dkg_id<R: Rng>(rng: &mut R) -> IDkgId {\n\n let instance_id = Height::from(rng.gen::<u64>());\n\n let subnet_id = SubnetId::from(PrincipalId::new_subnet_test_id(rng.gen::<u64>()));\n\n IDkgId {\n\n instance_id,\n\n subnet_id,\n\n }\n\n}\n", "file_path": "rs/crypto/test_utils/src/dkg.rs", "rank": 0, "score": 143465.71945288093 }, { "content": "fn csprng() -> impl CryptoRng + Rng + Clone {\n\n ChaCha20Rng::seed_from_u64(42)\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/signer/tests.rs", "rank": 1, "score": 139604.79409257593 }, { "content": "pub fn state_manager_restart_test<Test>(test: Test)\n\nwhere\n\n Test: FnOnce(StateManagerImpl, Box<dyn Fn(StateManagerImpl) -> StateManagerImpl>),\n\n{\n\n let tmp = Builder::new().prefix(\"test\").tempdir().unwrap();\n\n let config = Config::new(tmp.path().into());\n\n let own_subnet = subnet_test_id(42);\n\n let verifier: Arc<dyn Verifier> = Arc::new(FakeVerifier::new());\n\n\n\n with_test_replica_logger(|log| {\n\n let make_state_manager = move || {\n\n let metrics_registry = MetricsRegistry::new();\n\n\n\n StateManagerImpl::new(\n\n Arc::clone(&verifier),\n\n own_subnet,\n\n SubnetType::Application,\n\n log.clone(),\n\n &metrics_registry,\n\n &config,\n", "file_path": "rs/state_manager/tests/common/mod.rs", "rank": 2, "score": 135565.78487463627 }, { "content": "#[allow(clippy::needless_range_loop)]\n\nfn random_graph<T: Rng>(num_nodes: usize, degree: usize, rng: &mut T) -> Vec<Vec<usize>> {\n\n let mut distances = vec![vec![num_nodes; num_nodes]; num_nodes];\n\n for i in 0..num_nodes {\n\n distances[i][i] = 0;\n\n let mut indices: Vec<_> = (0..num_nodes).collect();\n\n indices.remove(i);\n\n indices.shuffle(rng);\n\n for j in 0..degree {\n\n distances[i][indices[j]] = 1;\n\n }\n\n }\n\n distances\n\n}\n\n\n", "file_path": "rs/consensus/tests/framework/delivery.rs", "rank": 3, "score": 130008.46186634233 }, { "content": "fn state_manager_crash_test<Fixture, Test>(fixture: Fixture, test: Test)\n\nwhere\n\n Fixture: FnOnce(StateManagerImpl) + std::panic::UnwindSafe,\n\n Test: FnOnce(&MetricsRegistry, StateManagerImpl),\n\n{\n\n let tmp = Builder::new().prefix(\"test\").tempdir().unwrap();\n\n let config = Config::new(tmp.path().into());\n\n with_test_replica_logger(|log| {\n\n std::panic::catch_unwind(|| {\n\n fixture(StateManagerImpl::new(\n\n Arc::new(FakeVerifier::new()),\n\n subnet_test_id(42),\n\n SubnetType::Application,\n\n log.clone(),\n\n &MetricsRegistry::new(),\n\n &config,\n\n ic_types::malicious_flags::MaliciousFlags::default(),\n\n ));\n\n })\n\n .expect_err(\"Crash test fixture did not crash\");\n", "file_path": "rs/state_manager/tests/state_manager.rs", "rank": 4, "score": 129980.97572090101 }, { "content": "pub fn dummy_csprng() -> impl CryptoRng + Rng + Clone {\n\n ChaChaRng::seed_from_u64(42)\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/tls_stub/test_utils.rs", "rank": 5, "score": 129467.80052861107 }, { "content": "fn csprng_seeded_with(seed: u64) -> impl CryptoRng + Rng + Clone {\n\n ChaCha20Rng::seed_from_u64(seed)\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/keygen/tests.rs", "rank": 6, "score": 129078.82299224206 }, { "content": "// Generate random data structures:\n\n// Alternatively we could implement Distribution for all of these types.\n\n// Deriving Rand may be enough for many. See: https://stackoverflow.com/questions/48490049/how-do-i-choose-a-random-value-from-an-enum\n\npub fn random_height(rng: &mut ChaCha20Rng) -> Height {\n\n Height::from(rng.gen::<u64>())\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/threshold/ni_dkg/tests/fixtures.rs", "rank": 7, "score": 126072.70284029216 }, { "content": "/// Creates a public key and PoP for a given secret key\n\npub fn create_ephemeral_public_key<R: Rng + CryptoRng>(\n\n mut rng: &mut R,\n\n dkg_id: IDkgId,\n\n secret_key_bytes: &EphemeralSecretKeyBytes,\n\n sender: &[u8],\n\n) -> Result<(EphemeralPublicKeyBytes, EphemeralPopBytes), DkgCreateEphemeralError> {\n\n let secret_key = EphemeralSecretKey::try_from(secret_key_bytes)\n\n .map_err(DkgCreateEphemeralError::MalformedSecretKeyError)?;\n\n let PopData {\n\n public_key_bytes,\n\n pop,\n\n ..\n\n } = create_pop_data(&mut rng, dkg_id, &secret_key, sender);\n\n Ok((public_key_bytes, EphemeralPopBytes::from(pop)))\n\n}\n\n\n\nmod test {\n\n use super::*;\n\n\n\n /// By design, it is hard to determine why a PoP verification has failed.\n", "file_path": "rs/crypto/internal/crypto_lib/threshold_sig/bls12_381/src/dkg/secp256k1/ephemeral_key/tests.rs", "rank": 8, "score": 125418.6999911051 }, { "content": "pub fn random_subnet_id(rng: &mut ChaCha20Rng) -> SubnetId {\n\n subnet_test_id(rng.gen::<u64>())\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/threshold/ni_dkg/tests/fixtures.rs", "rank": 9, "score": 122918.6136099535 }, { "content": "pub fn random_algorithm_id(rng: &mut ChaCha20Rng) -> AlgorithmId {\n\n AlgorithmId::iter()\n\n .choose(rng)\n\n .expect(\"Could not choose an AlgorithmId\")\n\n}\n\n\n\n/// A single node with its CSP\n\npub struct MockNode {\n\n pub node_id: NodeId,\n\n pub csp: Csp<ChaCha20Rng, VolatileSecretKeyStore, VolatileSecretKeyStore>,\n\n}\n\nimpl MockNode {\n\n pub fn random(rng: &mut ChaCha20Rng) -> Self {\n\n let node_id = node_test_id(rng.gen::<u64>());\n\n Self::from_node_id(rng, node_id)\n\n }\n\n pub fn from_node_id(rng: &mut ChaCha20Rng, node_id: NodeId) -> Self {\n\n let csprng = ChaCha20Rng::from_seed(rng.gen::<[u8; 32]>());\n\n let csp = Csp::of(csprng, VolatileSecretKeyStore::new());\n\n Self { node_id, csp }\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/threshold/ni_dkg/tests/fixtures.rs", "rank": 10, "score": 122918.6136099535 }, { "content": "// Generate random data structures:\n\n// Alternatively we could implement Distribution for all of these types.\n\n// Deriving Rand may be enough for many. See: https://stackoverflow.com/questions/48490049/how-do-i-choose-a-random-value-from-an-enum\n\npub fn random_height(rng: &mut ChaCha20Rng) -> Height {\n\n Height::from(rng.gen::<u64>())\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/server/local_csp_server/ni_dkg/tests/fixtures.rs", "rank": 11, "score": 121423.87829045141 }, { "content": "pub fn random_ni_dkg_tag(rng: &mut ChaCha20Rng) -> NiDkgTag {\n\n NiDkgTag::iter()\n\n .choose(rng)\n\n .expect(\"Could not choose a NiDkgTag\")\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/threshold/ni_dkg/tests/fixtures.rs", "rank": 12, "score": 119980.175258886 }, { "content": "pub fn random_ni_dkg_id(rng: &mut ChaCha20Rng) -> NiDkgId {\n\n NiDkgId {\n\n start_block_height: random_height(rng),\n\n dealer_subnet: random_subnet_id(rng),\n\n target_subnet: NiDkgTargetSubnet::Local,\n\n dkg_tag: random_ni_dkg_tag(rng),\n\n }\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/threshold/ni_dkg/tests/fixtures.rs", "rank": 13, "score": 119980.175258886 }, { "content": "pub fn random_algorithm_id(rng: &mut ChaCha20Rng) -> AlgorithmId {\n\n AlgorithmId::iter()\n\n .choose(rng)\n\n .expect(\"Could not choose an AlgorithmId\")\n\n}\n\n\n\n/// A single node with its CSP\n\npub struct MockNode {\n\n pub node_id: NodeId,\n\n pub fs_key_id: KeyId,\n\n pub csp_server: LocalCspServer<ChaCha20Rng, VolatileSecretKeyStore, VolatileSecretKeyStore>,\n\n}\n\nimpl MockNode {\n\n pub fn random(rng: &mut ChaCha20Rng) -> Self {\n\n let node_id = node_test_id(rng.gen::<u64>());\n\n Self::from_node_id(rng, node_id)\n\n }\n\n pub fn from_node_id(rng: &mut ChaCha20Rng, node_id: NodeId) -> Self {\n\n let csprng = ChaCha20Rng::from_seed(rng.gen::<[u8; 32]>());\n\n let csp_server = LocalCspServer::new_for_test(csprng, VolatileSecretKeyStore::new());\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/server/local_csp_server/ni_dkg/tests/fixtures.rs", "rank": 14, "score": 118584.8770164942 }, { "content": "pub fn random_subnet_id(rng: &mut ChaCha20Rng) -> SubnetId {\n\n subnet_test_id(rng.gen::<u64>())\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/server/local_csp_server/ni_dkg/tests/fixtures.rs", "rank": 15, "score": 118584.8770164942 }, { "content": "pub fn random_ni_dkg_id(rng: &mut ChaCha20Rng) -> NiDkgId {\n\n NiDkgId {\n\n start_block_height: random_height(rng),\n\n dealer_subnet: random_subnet_id(rng),\n\n target_subnet: NiDkgTargetSubnet::Local,\n\n dkg_tag: random_ni_dkg_tag(rng),\n\n }\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/server/local_csp_server/ni_dkg/tests/fixtures.rs", "rank": 16, "score": 115929.87431263055 }, { "content": "pub fn random_ni_dkg_tag(rng: &mut ChaCha20Rng) -> NiDkgTag {\n\n NiDkgTag::iter()\n\n .choose(rng)\n\n .expect(\"Could not choose a NiDkgTag\")\n\n}\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/server/local_csp_server/ni_dkg/tests/fixtures.rs", "rank": 17, "score": 115929.87431263055 }, { "content": "pub fn keypair_from_rng<R: Rng + CryptoRng>(rng: &mut R) -> (SecretKey, PublicKey) {\n\n // random_bls12_381_scalar uses rejection sampling to ensure a uniform\n\n // distribution.\n\n let secret_key = random_bls12_381_scalar(rng);\n\n let public_key = G2Projective::generator() * secret_key;\n\n (secret_key, public_key)\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/multi_sig/bls12_381/src/crypto.rs", "rank": 18, "score": 109786.16289054371 }, { "content": "/// Generates a keypair using the given `rng`.\n\npub fn keypair_from_rng<R: Rng + CryptoRng>(rng: &mut R) -> (SecretKeyBytes, PublicKeyBytes) {\n\n let (secret_key, public_key) = crypto::keypair_from_rng(rng);\n\n (secret_key.into(), public_key.into())\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/multi_sig/bls12_381/src/api.rs", "rank": 19, "score": 108506.50769579531 }, { "content": "fn setup_chunking_instance_and_witness(rng: &mut impl RAND) -> (ChunkingInstance, ChunkingWitness) {\n\n let g1 = ECP::generator();\n\n let spec_p = BIG::new_ints(&rom::CURVE_ORDER);\n\n let n = 28;\n\n let spec_m = 16;\n\n let mut y = Vec::new();\n\n for _i in 1..n + 1 {\n\n y.push(g1.mul(&BIG::randomnum(&spec_p, rng)));\n\n }\n\n let mut r = Vec::new();\n\n let mut rr = Vec::new();\n\n for _i in 0..spec_m {\n\n let r_i = BIG::randomnum(&spec_p, rng);\n\n rr.push(g1.mul(&r_i));\n\n r.push(r_i);\n\n }\n\n let bb = BIG::new_int(CHUNK_SIZE);\n\n let mut s = Vec::new();\n\n let mut chunk = Vec::new();\n\n for y_i in &y {\n", "file_path": "rs/crypto/internal/crypto_lib/fs_ni_dkg/tests/nizk.rs", "rank": 20, "score": 105549.91074674507 }, { "content": "/// Generates an Ed25519 keypair.\n\npub fn keypair_from_rng<R: Rng + CryptoRng>(\n\n csprng: &mut R,\n\n) -> (types::SecretKeyBytes, types::PublicKeyBytes) {\n\n let keypair = ed25519_dalek::Keypair::generate(csprng);\n\n let sk = types::SecretKeyBytes(SecretArray::new_and_dont_zeroize_argument(\n\n keypair.secret.as_bytes(),\n\n ));\n\n let pk = types::PublicKeyBytes(keypair.public.to_bytes());\n\n (sk, pk)\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/basic_sig/ed25519/src/api.rs", "rank": 21, "score": 104828.33193099083 }, { "content": "fn random_bytes<R: Rng>(n: u128, rng: &mut R) -> Vec<u8> {\n\n (0..n).map(|_| rng.gen::<u8>()).collect()\n\n}\n\n\n", "file_path": "rs/crypto/benches/hash.rs", "rank": 22, "score": 103009.82407228698 }, { "content": "// Helper to run the persistence tests below.\n\n// It creates the config and logger that is passed to the instances and then\n\n// makes sure that the the databases are destroyed before the test fails.\n\nfn run_test<T>(_test_name: &str, test: T)\n\nwhere\n\n T: FnOnce(&mut ConsensusPoolImpl),\n\n{\n\n ic_test_utilities::artifact_pool_config::with_test_pool_config(|pool_config| {\n\n let mut consensus_pool = ConsensusPoolImpl::new_from_cup_without_bytes(\n\n subnet_test_id(0),\n\n make_genesis(ic_types::consensus::dkg::Summary::fake()),\n\n pool_config,\n\n ic_metrics::MetricsRegistry::new(),\n\n no_op_logger(),\n\n );\n\n test(&mut consensus_pool);\n\n })\n\n}\n\n\n", "file_path": "rs/artifact_pool/benches/load_blocks.rs", "rank": 23, "score": 102085.79469521737 }, { "content": "/// Helper to run a single test with dependency setup.\n\nfn run_test<T>(_test_name: &str, test: T)\n\nwhere\n\n T: FnOnce(Arc<FastForwardTimeSource>, &mut IngressPoolImpl, &mut IngressManager),\n\n{\n\n let mut ingress_hist_reader = Box::new(MockIngressHistory::new());\n\n ingress_hist_reader\n\n .expect_get_status_at_height()\n\n .returning(|_| Ok(Box::new(|_| IngressStatus::Unknown)));\n\n let subnet_id = subnet_test_id(0);\n\n let runtime = tokio::runtime::Runtime::new().unwrap();\n\n let registry = setup_registry(subnet_id, runtime.handle().clone());\n\n let consensus_pool_cache = Arc::new(MockConsensusCache::new());\n\n let mut state_manager = MockStateManager::new();\n\n state_manager.expect_get_state_at().return_const(Ok(\n\n ic_interfaces::state_manager::Labeled::new(\n\n Height::new(0),\n\n Arc::new(ReplicatedStateBuilder::default().build()),\n\n ),\n\n ));\n\n\n", "file_path": "rs/ingress_manager/benches/build_payload.rs", "rank": 24, "score": 102085.12765152083 }, { "content": "/// Helper to run a single test with dependency setup.\n\nfn run_test<T>(_test_name: &str, test: T)\n\nwhere\n\n T: FnOnce(\n\n Arc<FastForwardTimeSource>,\n\n ArtifactPoolConfig,\n\n ReplicaLogger,\n\n &SimulatedIngressHistory,\n\n &mut IngressManager,\n\n ),\n\n{\n\n ic_test_utilities::with_test_replica_logger(|log| {\n\n ic_test_utilities::artifact_pool_config::with_test_pool_config(|pool_config| {\n\n let time_source = FastForwardTimeSource::new();\n\n // Set initial time to non-zero\n\n time_source\n\n .set_time(mock_time() + Duration::from_secs(1))\n\n .unwrap();\n\n let (history, ingress_hist_reader) = SimulatedIngressHistory::new(time_source.clone());\n\n let history = Arc::new(history);\n\n let history_cl = history.clone();\n", "file_path": "rs/ingress_manager/benches/handle_ingress.rs", "rank": 25, "score": 102085.12765152083 }, { "content": "// Returns a random element of FP12 of order CURVE_ORDER (i.e. call fexp()\n\n// before returning).\n\n// Our tests call FP12::pow(), which only works on elements of order\n\n// CURVE_ORDER.\n\nfn fp12_rand(rng: &mut impl RAND) -> miracl_core::bls12381::fp12::FP12 {\n\n use miracl_core::bls12381::fp12::FP12;\n\n use miracl_core::bls12381::fp4::FP4;\n\n use miracl_core::bls12381::pair;\n\n pair::fexp(&FP12::new_fp4s(\n\n &FP4::new_rand(rng),\n\n &FP4::new_rand(rng),\n\n &FP4::new_rand(rng),\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/fs_ni_dkg/tests/forward_secure.rs", "rank": 26, "score": 101915.79984155115 }, { "content": "fn setup_pop_instance_and_witness(rng: &mut impl RAND) -> (EncryptionKeyInstance, BIG) {\n\n let g1 = ECP::generator();\n\n let witness = BIG::randomnum(&curve_order(), rng);\n\n let public_key = g1.mul(&witness);\n\n let associated_data = {\n\n let mut vec = vec![];\n\n for _i in 0..10 {\n\n vec.push(rng.getbyte());\n\n }\n\n vec\n\n };\n\n\n\n let instance = EncryptionKeyInstance {\n\n g1_gen: g1,\n\n public_key,\n\n associated_data,\n\n };\n\n\n\n (instance, witness)\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/fs_ni_dkg/src/encryption_key_pop/tests.rs", "rank": 27, "score": 101212.36423821477 }, { "content": "#[test]\n\n#[should_panic]\n\nfn should_panic_when_panicking_registry_is_used() {\n\n let registry = registry_panicking_on_usage();\n\n let key = make_crypto_node_key(NODE_1, KeyPurpose::QueryResponseSigning);\n\n let _ = registry.get_value(&key, REG_V1);\n\n}\n\n\n", "file_path": "rs/crypto/src/sign/tests.rs", "rank": 28, "score": 100477.0109998335 }, { "content": "#[test]\n\nfn should_offer_methods_of_rng_trait() {\n\n use rand::Rng;\n\n\n\n let mut rng = Csprng::from_seed([42; 32]);\n\n\n\n assert_eq!(rng.gen::<u32>(), 1_176_443_288);\n\n}\n\n\n", "file_path": "rs/crypto/src/prng/tests.rs", "rank": 29, "score": 100472.73817158924 }, { "content": "#[test]\n\nfn should_verify_valid_signature_using_crypto_for_verification() {\n\n let message = MessageId::from([42; 32]);\n\n let dummy_registry = FakeRegistryClient::new(Arc::new(ProtoRegistryDataProvider::new()));\n\n let (signature, public_key) = ed25519_signature_and_public_key(&message);\n\n let crypto = CryptoComponent::new_for_verification_only(Arc::new(dummy_registry));\n\n\n\n assert!(crypto\n\n .verify_basic_sig_by_public_key(&signature, &message, &public_key)\n\n .is_ok());\n\n}\n\n\n\n/// This is a smoke test ensuring that `CryptoComponentForVerificationOnly`\n\n/// actually checks signatures and does not simply return `Ok`.\n", "file_path": "rs/crypto/tests/crypto_for_verification_only.rs", "rank": 30, "score": 94963.9417448363 }, { "content": "/// Run an artifact manager test, which is a function that takes an\n\n/// ArtifactManager object as input, which is already setup with\n\n/// ingress pool, consensus pool and consensus client (using MockConsensus).\n\npub fn run_test<F: Fn(Arc<dyn ArtifactManager>)>(test: F) {\n\n ic_test_utilities::artifact_pool_config::with_test_pool_config(|pool_config| {\n\n let manager = setup_manager(pool_config);\n\n test(manager)\n\n })\n\n}\n", "file_path": "rs/artifact_manager/tests/setup.rs", "rank": 31, "score": 94525.55869396139 }, { "content": "fn random_bytes_chunked<R: Rng>(n: u128, chunk_size: u128, rng: &mut R) -> Vec<Vec<u8>> {\n\n assert_eq!(n % chunk_size, 0, \"partial chunks currently not supported\");\n\n let mut chunks: Vec<Vec<u8>> = vec![];\n\n for _ in 0..(n / chunk_size) {\n\n let chunk: Vec<u8> = random_bytes(chunk_size, rng);\n\n chunks.push(chunk);\n\n }\n\n chunks\n\n}\n\n\n", "file_path": "rs/crypto/benches/hash.rs", "rank": 32, "score": 93642.41799436313 }, { "content": "/// Generates an ephemeral key pair, with pop.\n\n///\n\n/// # Arguments\n\n/// * `rng` - a cryptographically secure random number generator.\n\n/// * `dkg_id` - the DKG this ephemeral key is to be used for.\n\n/// * `sender` - a name to identify the current node.\n\n/// # Returns\n\n/// * `EphemeralSecretKeyBytes` - a key that needs to be kept secret by the\n\n/// current node and used for the DKG protocol. It is not needed once the\n\n/// threshold keys have been derived.\n\n/// * `EphemeralPublicKeyBytes and EphemeralPopBytes` - the corresponding public\n\n/// key and proof of possession.\n\n/// # Panics\n\n/// This method is not expected to panic.\n\npub fn create_ephemeral<R: Rng + CryptoRng>(\n\n mut rng: &mut R,\n\n dkg_id: IDkgId,\n\n sender: &[u8],\n\n) -> (\n\n EphemeralSecretKeyBytes,\n\n EphemeralPublicKeyBytes,\n\n EphemeralPopBytes,\n\n) {\n\n let secret_key = EphemeralSecretKey::random(rng);\n\n let PopData {\n\n public_key_bytes,\n\n pop,\n\n ..\n\n } = create_pop_data(&mut rng, dkg_id, &secret_key, sender);\n\n (\n\n EphemeralSecretKeyBytes::from(secret_key),\n\n public_key_bytes,\n\n EphemeralPopBytes::from(pop),\n\n )\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/threshold_sig/bls12_381/src/dkg/secp256k1/ephemeral_key.rs", "rank": 33, "score": 92991.58248289183 }, { "content": "/// Creates an ephemeral PoP and significant intermediate values.\n\nfn create_pop_data<'a, R: Rng + CryptoRng>(\n\n rng: &mut R,\n\n dkg_id: IDkgId,\n\n secret_key: &EphemeralSecretKey,\n\n sender: &'a [u8],\n\n) -> PopData<'a> {\n\n let public_key = EphemeralPublicKey::from(secret_key);\n\n let public_key_bytes = EphemeralPublicKeyBytes::from(&public_key);\n\n\n\n // The plain Schnorr pop would require rewinding in the security proof to\n\n // ensure we can properly simulate dealings with dishonest receivers. The\n\n // following element allows to implement a \"strong DH\"-oracle-type\n\n // computation in the reduction to CDH.\n\n\n\n // spec_h == spec: H is a hash over the inputs, so effectively it captures what\n\n // the PoP is making a proof about.\n\n let h_digest = HDigest::<'a> {\n\n dkg_id,\n\n public_key_bytes,\n\n sender,\n", "file_path": "rs/crypto/internal/crypto_lib/threshold_sig/bls12_381/src/dkg/secp256k1/ephemeral_key.rs", "rank": 34, "score": 92981.24335082815 }, { "content": "#[test]\n\nfn should_produce_same_sha256_digest_as_if_openssl_sha256_was_used_directly() {\n\n let context = TestContext::new(b\"context\");\n\n\n\n let mut lib_state = Sha256::new_with_context(&context);\n\n lib_state.write(b\"some data!\");\n\n let lib_digest = lib_state.finish();\n\n\n\n let mut openssl_state = openssl::sha::Sha256::new();\n\n openssl_state.update(context.as_bytes());\n\n openssl_state.update(b\"some data!\");\n\n let openssl_digest = openssl_state.finish();\n\n\n\n assert_eq!(lib_digest, openssl_digest);\n\n}\n\n\n", "file_path": "rs/crypto/sha/tests/sha256.rs", "rank": 35, "score": 92428.20088570274 }, { "content": "#[test]\n\nfn should_use_unique_separator_byte_per_randomness_purpose() {\n\n let mut set = BTreeSet::new();\n\n\n\n // ensure separator bytes are unique\n\n assert!(set.insert(COMMITTEE_SAMPLING_SEPARATOR_BYTE));\n\n assert!(set.insert(BLOCKMAKER_RANKING_SEPARATOR_BYTE));\n\n assert!(set.insert(DKG_COMMITTEE_SAMPLING_SEPARATOR_BYTE));\n\n assert!(set.insert(EXECUTION_THREAD_SEPARATOR_BYTE));\n\n\n\n // ensure there is a separator for each purpose\n\n assert_eq!(set.len(), RandomnessPurpose::count());\n\n}\n\n\n", "file_path": "rs/crypto/src/prng/tests.rs", "rank": 36, "score": 92428.20088570274 }, { "content": "#[test]\n\nfn should_fail_verification_on_invalid_signature_using_crypto_for_verification() {\n\n let message = MessageId::from([42; 32]);\n\n let dummy_registry = FakeRegistryClient::new(Arc::new(ProtoRegistryDataProvider::new()));\n\n let (signature, public_key) = ed25519_signature_and_public_key(&message);\n\n let crypto = CryptoComponent::new_for_verification_only(Arc::new(dummy_registry));\n\n\n\n let different_message = MessageId::from([1; 32]);\n\n assert_ne!(message, different_message);\n\n assert!(crypto\n\n .verify_basic_sig_by_public_key(&signature, &different_message, &public_key)\n\n .unwrap_err()\n\n .is_signature_verification_error());\n\n}\n", "file_path": "rs/crypto/tests/crypto_for_verification_only.rs", "rank": 37, "score": 92428.20088570274 }, { "content": "#[test]\n\n#[should_panic]\n\npub fn should_panic_when_panicking_secret_key_store_is_used() {\n\n let sks = secret_key_store_panicking_on_usage();\n\n let _ = sks.get(&KeyId::from(KEY_ID));\n\n}\n\n\n", "file_path": "rs/crypto/src/sign/tests.rs", "rank": 38, "score": 92428.11626967434 }, { "content": "#[test]\n\nfn can_use_buffer_to_modify_page_map() {\n\n let page_1 = [1u8; PAGE_SIZE];\n\n let page_3 = [3u8; PAGE_SIZE];\n\n let pages = &[(PageIndex::new(1), &page_1), (PageIndex::new(3), &page_3)];\n\n let mut page_map = PageMap::default();\n\n page_map.update(pages);\n\n\n\n let n = 4 * PAGE_SIZE;\n\n let mut vec_buf = vec![0u8; n];\n\n vec_buf[PAGE_SIZE..2 * PAGE_SIZE].copy_from_slice(&page_1);\n\n vec_buf[3 * PAGE_SIZE..4 * PAGE_SIZE].copy_from_slice(&page_3);\n\n\n\n let mut buf = Buffer::new(page_map);\n\n\n\n let mut read_buf = vec![0u8; n];\n\n\n\n buf.read(&mut read_buf[..], 0);\n\n assert_eq!(read_buf, vec_buf);\n\n\n\n for offset in 0..n {\n", "file_path": "rs/replicated_state/src/page_map/tests.rs", "rank": 39, "score": 90024.36713472052 }, { "content": "#[test]\n\nfn uses_latest_certified_state_to_decode_certified_streams() {\n\n state_manager_test(|_metrics, state_manager| {\n\n let (_height, state) = state_manager.take_tip();\n\n state_manager.commit_and_certify(state, height(1), CertificationScope::Metadata);\n\n\n\n let subnet = subnet_test_id(42);\n\n\n\n // no streams yet\n\n assert_eq!(\n\n state_manager.encode_certified_stream_slice(subnet, None, None, None, None),\n\n Err(EncodeStreamError::NoStreamForSubnet(subnet))\n\n );\n\n\n\n certify_height(&state_manager, height(1));\n\n\n\n let (_height, mut state) = state_manager.take_tip();\n\n state.modify_streams(|streams| {\n\n streams.insert(subnet, Stream::default());\n\n });\n\n\n", "file_path": "rs/state_manager/tests/state_manager.rs", "rank": 40, "score": 90024.36713472052 }, { "content": "pub fn mock_random_number_generator() -> Box<dyn RngCore> {\n\n Box::new(StdRng::from_seed([0u8; 32]))\n\n}\n", "file_path": "rs/test_utilities/src/crypto.rs", "rank": 41, "score": 87731.49442114713 }, { "content": "pub fn random_bls12_381_scalar<R: RngCore>(rng: &mut R) -> Scalar {\n\n loop {\n\n let mut repr = [0u64; 4];\n\n for r in repr.iter_mut() {\n\n *r = rng.next_u64();\n\n }\n\n\n\n /*\n\n Since the modulus is 255 bits, we clear out the most significant bit to\n\n reduce number of repetitions for the rejection sampling.\n\n\n\n (This also matches the logic used in the old version of zcrypto/pairing,\n\n which we are attempting to maintain bit-for-bit compatability with)\n\n */\n\n repr[3] &= 0xffffffffffffffff >> 1;\n\n\n\n let mut repr8 = [0u8; 32];\n\n repr8[..8].copy_from_slice(&repr[0].to_le_bytes());\n\n repr8[8..16].copy_from_slice(&repr[1].to_le_bytes());\n\n repr8[16..24].copy_from_slice(&repr[2].to_le_bytes());\n", "file_path": "rs/crypto/internal/crypto_lib/bls12_381/common/src/hash.rs", "rank": 42, "score": 86367.63602790784 }, { "content": "#[test]\n\nfn should_produce_same_sha256_digest_as_if_openssl_sha256_was_used_directly() {\n\n let context = TestContext::new(b\"context\");\n\n\n\n let mut lib_state = Sha256::new_with_context(&context);\n\n lib_state.write(b\"some data!\");\n\n let lib_digest = lib_state.finish();\n\n\n\n let mut openssl_state = openssl::sha::Sha256::new();\n\n openssl_state.update(context.as_bytes());\n\n openssl_state.update(b\"some data!\");\n\n let openssl_digest = openssl_state.finish();\n\n\n\n assert_eq!(lib_digest, openssl_digest);\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_lib/sha2/tests/sha256.rs", "rank": 43, "score": 85573.28651654729 }, { "content": "#[test]\n\n#[should_panic]\n\nfn should_panic_when_panicking_secret_key_store_is_used() {\n\n let sks = secret_key_store_panicking_on_usage();\n\n let _ = sks.get(&KeyId::from(KEY_ID));\n\n}\n\n\n\nmod multi {\n\n use super::*;\n\n use crate::api::CspKeyGenerator;\n\n use crate::secret_key_store::volatile_store::VolatileSecretKeyStore;\n\n\n\n #[test]\n\n fn pop_verifies() {\n\n let csp = Csp::of(\n\n ChaCha20Rng::seed_from_u64(42),\n\n VolatileSecretKeyStore::new(),\n\n );\n\n let (_key_id, public_key, pop) = csp\n\n .gen_key_pair_with_pop(AlgorithmId::MultiBls12_381)\n\n .expect(\"Failed to generate key pair with PoP\");\n\n assert!(csp\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/signer/tests.rs", "rank": 44, "score": 85573.20190051889 }, { "content": "fn hard_coded_key_id_for_instance_1_and_subnet_2() -> KeyId {\n\n KeyId::from(hex_to_32_bytes(\n\n \"14ab5dba1e76a93f0014097319608cf76a88c16262d4f697704036a285f6f36d\",\n\n ))\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/types/conversions/dkg_id_to_key_id/tests.rs", "rank": 45, "score": 83250.37954016298 }, { "content": "fn generate_rsa_key_and_sig(rng: &mut OsRng, bytes_to_sign: &[u8]) -> (Vec<u8>, Vec<u8>) {\n\n use ic_crypto_internal_basic_sig_rsa_pkcs1 as basic_sig_rsa;\n\n use ic_crypto_sha::Sha256;\n\n use rsa::{Hash, PaddingScheme, PublicKeyParts, RSAPrivateKey};\n\n\n\n let bitlength = 2048; // minimum allowed\n\n\n\n let priv_key = RSAPrivateKey::new(rng, bitlength).expect(\"failed to generate RSA key\");\n\n\n\n let pub_key_bytes = basic_sig_rsa::RsaPublicKey::from_components(\n\n &priv_key.to_public_key().e().to_bytes_be(),\n\n &priv_key.to_public_key().n().to_bytes_be(),\n\n )\n\n .expect(\"failed to convert RSA key to internal type for serialization\")\n\n .as_der()\n\n .to_vec();\n\n\n\n let signature = priv_key\n\n .sign(\n\n PaddingScheme::PKCS1v15Sign {\n\n hash: Some(Hash::SHA2_256),\n\n },\n\n &Sha256::hash(bytes_to_sign),\n\n )\n\n .expect(\"failed signing with RSA key\");\n\n\n\n (signature, pub_key_bytes)\n\n}\n", "file_path": "rs/crypto/benches/basic_sig.rs", "rank": 46, "score": 83123.86578397913 }, { "content": "fn keygen(num_signers: usize, mut rng: &mut StdRng) -> Vec<(SecretKeyBytes, PublicKeyBytes)> {\n\n (0..num_signers)\n\n .map(|_| multi::keypair_from_rng(&mut rng))\n\n .collect()\n\n}\n", "file_path": "rs/crypto/src/cli/clib/multi_sig/bench.rs", "rank": 47, "score": 79823.24895351619 }, { "content": "fn make_key_id(seed: u64) -> KeyId {\n\n KeyId::from(ChaCha20Rng::seed_from_u64(seed).gen::<[u8; 32]>())\n\n}\n\n\n", "file_path": "rs/crypto/internal/crypto_service_provider/src/secret_key_store/test_utils.rs", "rank": 48, "score": 78295.2401036312 }, { "content": "#[test]\n\nfn test_statesync_test_canisters() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n println!(\"Start installing statesync test canister\");\n\n let canister = proj\n\n .cargo_bin(\"statesync-test-canister\")\n\n .install(&r)\n\n .with_memory_allocation(8 * 1024 * 1024 * 1024) // 8GiB\n\n .bytes(Vec::new())\n\n .await?;\n\n println!(\"Installed statesync test canister\");\n\n\n\n let mut res: Result<u8, String> = canister\n\n .query_(\"read_state\", dfn_json::json, 0_usize)\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n res,\n\n Ok(0),\n", "file_path": "rs/rust_canisters/statesync_test/test/test.rs", "rank": 49, "score": 77969.82367280977 }, { "content": "/// Uses a crypto component to generate a TLS certificate\n\nfn generate_cert_using_temp_crypto(node_id: NodeId) -> X509PublicKeyCert {\n\n let unused_dummy_registry = Arc::new(FakeRegistryClient::new(Arc::clone(&Arc::new(\n\n ProtoRegistryDataProvider::new(),\n\n )) as Arc<_>));\n\n let (_crypto, cert) = temp_crypto_component_with_tls_keys(unused_dummy_registry, node_id);\n\n cert.to_proto()\n\n}\n\n\n", "file_path": "rs/crypto/tests/tls_handshake_stub.rs", "rank": 50, "score": 77872.54793155 }, { "content": "/// Uses a crypto component to generate a TLS certificate\n\nfn generate_cert_using_temp_crypto(node_id: NodeId) -> X509PublicKeyCert {\n\n let unused_dummy_registry = Arc::new(FakeRegistryClient::new(Arc::clone(&Arc::new(\n\n ProtoRegistryDataProvider::new(),\n\n )) as Arc<_>));\n\n let (_crypto, cert) = temp_crypto_component_with_tls_keys(unused_dummy_registry, node_id);\n\n cert.to_proto()\n\n}\n\n\n", "file_path": "rs/crypto/tests/tls_handshake_openssl.rs", "rank": 51, "score": 77872.54793155 }, { "content": "fn with_memory_accessor<F>(wasm_pages: i32, test: F)\n\nwhere\n\n F: FnOnce(MemoryAccessor),\n\n{\n\n with_hypervisor(|hypervisor, tmp_path| {\n\n let memory_accessor = MemoryAccessor::new(wasm_pages, hypervisor, tmp_path);\n\n test(memory_accessor);\n\n });\n\n}\n\n\n", "file_path": "rs/execution_environment/testgrid/common/hypervisor.rs", "rank": 52, "score": 77739.38661950541 }, { "content": "use canister_test::*;\n\n\n\n#[test]\n", "file_path": "rs/rust_canisters/statesync_test/test/test.rs", "rank": 53, "score": 74149.60705985157 }, { "content": " \"Queried first element of state vector, should have been 0, was {:?}\",\n\n res\n\n );\n\n\n\n res = canister\n\n .update_(\"change_state\", dfn_json::json, 33_u32)\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n res,\n\n Ok(1),\n\n \"Changed state for the first time, result should have been 1, was {:?}\",\n\n res\n\n );\n\n\n\n res = canister\n\n .query_(\"read_state\", dfn_json::json, 0_usize)\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n res,\n\n Ok(20),\n\n \"Queried 0th element of state vector, should be 20 for seed 33, was {:?}\",\n\n res\n\n );\n\n Ok(())\n\n })\n\n}\n", "file_path": "rs/rust_canisters/statesync_test/test/test.rs", "rank": 54, "score": 74134.71276005472 }, { "content": "// From the on_message() handler\n\nstruct TestMessage {\n\n flow_id: FlowId,\n\n payload: TransportPayload,\n\n}\n\n\n", "file_path": "rs/transport/src/tests/test_client.rs", "rank": 55, "score": 73068.33700104151 }, { "content": "struct TestClient {\n\n transport: Arc<dyn Transport>,\n\n client_type: TransportClientType,\n\n _event_handler: Arc<TestClientEventHandler>,\n\n prev: NodeId,\n\n next: NodeId,\n\n prev_node_record: NodeRecord,\n\n next_node_record: NodeRecord,\n\n receiver: MpscReceiver,\n\n active_flows: Arc<Mutex<HashSet<TransportFlowInfo>>>,\n\n registry_version: RegistryVersion,\n\n log: ReplicaLogger,\n\n active: Arc<AtomicBool>,\n\n}\n\n\n\nimpl TestClient {\n\n fn new(\n\n transport: Arc<dyn Transport>,\n\n registry_node_list: &[(NodeId, NodeRecord)],\n\n prev: &NodeId,\n", "file_path": "rs/transport/src/tests/test_client.rs", "rank": 56, "score": 73068.33700104151 }, { "content": "#[test]\n\nfn transaction_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let minting_account = create_sender(0);\n\n let acc1 = create_sender(1);\n\n let acc2 = create_sender(2);\n\n\n\n // Amount is the send amount + the fee + the amount we burn\n\n let acc1_start_amount = 500 + TRANSACTION_FEE.get_e8s();\n\n let acc2_start_amount = MIN_BURN_AMOUNT.get_e8s();\n\n\n\n let mut accounts = HashMap::new();\n\n accounts.insert(\n\n acc1.get_principal_id().into(),\n\n ICPTs::from_e8s(acc1_start_amount),\n\n );\n\n accounts.insert(\n\n acc2.get_principal_id().into(),\n\n ICPTs::from_e8s(acc2_start_amount),\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 57, "score": 72039.31365901252 }, { "content": "#[test]\n\nfn test_get_status() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n let lifeline =\n\n set_up_lifeline_canister(&runtime, LifelineCanisterInitPayloadBuilder::new().build())\n\n .await;\n\n\n\n // Create some NNS canister to be owned by the lifeline\n\n let universal = set_up_universal_canister(&runtime).await;\n\n universal\n\n .set_controller(lifeline.canister_id().get())\n\n .await\n\n .unwrap();\n\n\n\n // Get the status of an NNS canister\n\n let status: CanisterStatusResult = lifeline\n\n .update_(\n\n \"canister_status\",\n\n candid,\n\n (CanisterIdRecord::from(universal.canister_id()),),\n\n )\n\n .await\n\n .unwrap();\n\n assert_eq!(status.controller(), lifeline.canister_id().get());\n\n\n\n Ok(())\n\n });\n\n}\n", "file_path": "rs/nns/handlers/lifeline/tests/test.rs", "rank": 58, "score": 72039.31365901252 }, { "content": "#[test]\n\nfn test_get_status() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n let root =\n\n set_up_root_canister(&runtime, RootCanisterInitPayloadBuilder::new().build()).await;\n\n\n\n // Create some NNS canister to be own by the root\n\n let universal = set_up_universal_canister(&runtime).await;\n\n universal\n\n .set_controller(root.canister_id().get())\n\n .await\n\n .unwrap();\n\n\n\n // Get the status of an NNS canister\n\n let status: CanisterStatusResult = root\n\n .update_(\n\n \"canister_status\",\n\n candid,\n\n (CanisterIdRecord::from(universal.canister_id()),),\n\n )\n\n .await\n\n .unwrap();\n\n assert_eq!(status.controller(), root.canister_id().get());\n\n\n\n Ok(())\n\n });\n\n}\n\n\n", "file_path": "rs/nns/handlers/root/tests/test.rs", "rank": 59, "score": 72039.31365901252 }, { "content": "#[test]\n\nfn upgrade_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let accounts = make_accounts(5, 4);\n\n\n\n let mut ledger = proj\n\n .cargo_bin(\"ledger-canister\")\n\n .install_(\n\n &r,\n\n CandidOne(LedgerCanisterInitPayload::new(\n\n CanisterId::from_u64(0).into(),\n\n accounts,\n\n None,\n\n None,\n\n None,\n\n HashSet::new(),\n\n )),\n\n )\n\n .await?;\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 60, "score": 72039.31365901252 }, { "content": "#[test]\n\nfn reverse_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let canister = proj.cargo_bin(\"wasm\").install_(&r, Vec::new()).await?;\n\n\n\n let res = canister.query_(\"reverse\", bytes, vec![0, 1, 2, 3]).await?;\n\n\n\n assert_eq!(res, vec![3, 2, 1, 0]);\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "rs/rust_canisters/dfn_core/test/test.rs", "rank": 61, "score": 72039.31365901252 }, { "content": "#[test]\n\nfn notify_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n let mut accounts = HashMap::new();\n\n let sender = create_sender(100);\n\n accounts.insert(\n\n sender.get_principal_id().into(),\n\n ICPTs::from_icpts(100).unwrap(),\n\n );\n\n\n\n let test_canister = proj\n\n .cargo_bin(\"test-notified\")\n\n .install_(&r, Vec::new())\n\n .await?;\n\n\n\n let test_canister_2 = proj\n\n .cargo_bin(\"test-notified\")\n\n .install_(&r, Vec::new())\n\n .await?;\n\n\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 62, "score": 72039.31365901252 }, { "content": "#[ignore]\n\n#[test]\n\nfn candid_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let canister = proj\n\n .cargo_bin(\"wasm\")\n\n .install_(&r, BytesS(Vec::new()))\n\n .await?;\n\n\n\n let inp: (&str, u16) = (\"David\", 28);\n\n let res: String = canister.query_(\"greeting\", candid, inp).await?;\n\n\n\n assert_eq!(\"Hello David, you are 28 years old\", &res);\n\n\n\n let inp: (u16, u16, u16, u16) = (1, 2, 3, 4);\n\n let res: (u16, u16) = canister.query_(\"sum\", candid, inp).await?;\n\n\n\n assert_eq!((3, 7), res);\n\n Ok(())\n\n });\n\n}\n", "file_path": "rs/rust_canisters/dfn_candid/test/test.rs", "rank": 63, "score": 72039.22904298412 }, { "content": "#[test]\n\nfn test_instrument_module_with_exported_global() {\n\n let output = instrument(\n\n &BinaryEncodedWasm::new(\n\n wabt::wat2wasm(\n\n r#\"\n\n (module\n\n (func $run (export \"run\") (result i32)\n\n (global.get $counter)\n\n )\n\n (global $counter\n\n (export \"my_global_counter\")\n\n (mut i32) (i32.const 123)\n\n )\n\n )\"#,\n\n )\n\n .unwrap(),\n\n ),\n\n &InstructionCostTable::new(),\n\n )\n\n .unwrap();\n\n\n\n let result = wasmtime_simple::wasmtime_instantiate_and_call_run(&output.binary);\n\n assert_eq!(result[0].i32().unwrap(), 123);\n\n}\n", "file_path": "rs/embedders/tests/misc_tests.rs", "rank": 64, "score": 71032.15553054321 }, { "content": "#[test]\n\nfn sub_account_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let mut initial_values = HashMap::new();\n\n\n\n let sub_account = |x| Some(Subaccount([x; 32]));\n\n\n\n let sender = create_sender(100);\n\n\n\n initial_values.insert(\n\n AccountIdentifier::new(sender.get_principal_id(), sub_account(1)),\n\n ICPTs::from_icpts(10).unwrap(),\n\n );\n\n let from_subaccount = sub_account(1);\n\n let mut send_whitelist = HashSet::new();\n\n send_whitelist.insert(CanisterId::new(sender.get_principal_id()).unwrap());\n\n let ledger_canister = proj\n\n .cargo_bin(\"ledger-canister\")\n\n .install_(\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 65, "score": 71032.15553054321 }, { "content": "#[test]\n\nfn test_transfer_candid() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let minting_account = create_sender(0);\n\n let acc1 = create_sender(1);\n\n let acc2 = create_sender(2);\n\n let acc3 = create_sender(3);\n\n\n\n let acc1_address: AccountIdentifier = acc1.get_principal_id().into();\n\n let acc2_address: AccountIdentifier = acc2.get_principal_id().into();\n\n let acc3_address: AccountIdentifier = acc3.get_principal_id().into();\n\n\n\n let mut accounts = HashMap::new();\n\n accounts.insert(acc1_address, ICPTs::from_e8s(1_000_000_000));\n\n accounts.insert(acc2_address, ICPTs::from_e8s(1_000_000_000));\n\n\n\n let ledger = proj\n\n .cargo_bin(\"ledger-canister\")\n\n .install_(\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 66, "score": 71032.15553054321 }, { "content": "#[test]\n\nfn get_block_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n // For printing and comparing blocks since they're now hidden behind a\n\n // trait\n\n let blk = |b: &ledger_canister::Block| (b.transaction().into_owned(), b.timestamp());\n\n\n\n let minting_account = create_sender(0);\n\n\n\n // This is how many blocks we want to generate for this test.\n\n // Generating blocks is done by proxy, that is, by creating multiple\n\n // accounts (since each account will generate a Mint transaction).\n\n let num_blocks = 32u64;\n\n\n\n // Generate initial blocks just below the archive threshold\n\n let accounts = make_accounts(num_blocks - 1, 1);\n\n\n\n // With a target of 32 accounts and 8 blocks per archive we should\n\n // generate multiple archive nodes\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 67, "score": 71032.15553054321 }, { "content": "#[test]\n\nfn certification_api_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let canister = proj.cargo_bin(\"wasm\").install_(&r, Vec::new()).await?;\n\n\n\n let _ = canister\n\n .update_(\"set_certified_data\", bytes, vec![0u8; 32])\n\n .await?;\n\n\n\n let _ = canister.query_(\"get_certificate\", bytes, vec![]).await?;\n\n\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "rs/rust_canisters/dfn_core/test/test.rs", "rank": 68, "score": 71032.15553054321 }, { "content": "#[test]\n\nfn notify_timeout_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n let mut accounts = HashMap::new();\n\n let sender = create_sender(100);\n\n accounts.insert(\n\n sender.get_principal_id().into(),\n\n ICPTs::from_icpts(100).unwrap(),\n\n );\n\n\n\n let test_canister = proj\n\n .cargo_bin(\"test-notified\")\n\n .install_(&r, Vec::new())\n\n .await?;\n\n\n\n let minting_account = create_sender(0);\n\n\n\n let mut send_whitelist = HashSet::new();\n\n send_whitelist.insert(test_canister.canister_id());\n\n\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 69, "score": 71032.15553054321 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Hash)]\n\nenum TestClientErrorCode {\n\n TransportError(TransportErrorCode),\n\n MessageMismatch,\n\n NotAllFlowsUp,\n\n Timeout,\n\n UnknownFailure,\n\n}\n\n\n", "file_path": "rs/transport/src/tests/test_client.rs", "rank": 70, "score": 71025.04621992368 }, { "content": "struct TestClientEventHandler {\n\n sender: MpscSender,\n\n active_flows: Arc<Mutex<HashSet<TransportFlowInfo>>>,\n\n log: ReplicaLogger,\n\n}\n\n\n\nimpl TestClientEventHandler {\n\n fn on_message(&self, flow_id: FlowId, message: TransportPayload) -> Option<TransportPayload> {\n\n tokio::task::block_in_place(move || {\n\n self.sender\n\n .send(TestMessage {\n\n flow_id,\n\n payload: message,\n\n })\n\n .expect(\"on_message(): failed to send\")\n\n });\n\n\n\n None\n\n }\n\n\n", "file_path": "rs/transport/src/tests/test_client.rs", "rank": 71, "score": 71025.04621992368 }, { "content": "pub fn test_vec(id: TestVectorId) -> TestVector {\n\n match id {\n\n TestVectorId::STABILITY_1 => {\n\n // The test data for this vector comes from a real idp_service canister.\n\n TestVector {\n\n signature: b\"\\xd9\\xd9\\xf7\\xa2kcertificateY\\x01\\x8b\\xd9\\xd9\\xf7\\xa2dtree\\x83\\x01\\x83\\x01\\x83\\x01\\x83\\x02Hcanister\\x83\\x01\\x82\\x04X \\xcb\\xd2\\x96\\x8a\\xb7\\xb38?\\x8e\\x1c\\x81\\xe6\\xec(\\x11\\xb0\\x87?!\\xea;z\\xd5\\xa7i\\xeb\\x900\\xce\\x028n\\x83\\x02J\\x00\\x00\\x00\\x00\\x00\\x00\\x00\\x08\\x01\\x01\\x83\\x01\\x83\\x01\\x83\\x02Ncertified_data\\x82\\x03X \\xf3\\xe9\\x0c[F\\xe5\\xed?\\xca\\x88H\\xf2\\xe7\\x16Q\\xd4C\\x9aI\\xa2R\\xb8;A}\\x06G\\xf3 \\x5c\\x07s\\x82\\x04X \\xd0\\x8a\\xa5\\x8br\\x01}\\xd5\\xe5\\x14\\x9a\\xc2.F\\xa7\\x86\\xb6RN\\xc1\\xd8uj\\xcf\\x88\\xb9\\xe3|\\x14\\xef\\x8f\\xe2\\x82\\x04X \\xa2\\xa8r\\x0a\\xbb\\xe0\\xbde\\x0f\\x04\\xa6o{\\xfa\\xde\\xf7F\\xb4t\\xef)\\xd4\\x1d\\x12k\\xd1\\xea#6\\x15\\xad\\x18\\x82\\x04X \\x99\\xe5\\x9e\\x17\\xdc\\xe9\\xa1?\\xef\\xc9\\x22\\xd2\\x0a\\x90\\xdb\\x03ow5'\\x88\\xe9\\x9di\\x8b6@\\xd4\\xfa\\x88w$\\x82\\x04X E%X\\xea\\x92\\xc9Q\\xba\\xf2\\xbf5\\xb2\\x94\\x00^\\xde\\xb8\\xc3\\xbf\\x94\\xb6\\xaf\\x9bk\\x92\\x83\\x9d\\xd2h\\xa7\\xcaZ\\x83\\x01\\x82\\x04X \\xb4\\xac\\xde@n\\xf5\\x95\\xb7\\xb3\\xc3\\xf1be\\xb9\\xb3e\\xbe\\xe3\\x82\\x94\\xa1\\xb7\\xa5\\x9a\\x9dQ\\xd6B*\\xb9\\x98y\\x83\\x02Dtime\\x82\\x03I\\xd8\\xfb\\x9a\\xaf\\xea\\x8c\\x96\\xbb\\x16isignatureX0\\xb1Y\\xa3\\xd6_\\x08\\x22y\\xff?Q]\\x0f\\xe7\\xe8XC\\x02\\xb3k\\xcc\\x9ci8xH=O\\x1d\\x07\\xb3\\x5ci\\x1a\\xc5\\xdf\\x09\\xbf\\x96C\\xca\\xfb\\x22\\xca\\xbb0\\x07ndtree\\x83\\x02Csig\\x83\\x02X 9\\xe5\\xb4\\x83\\x0dM\\x9c\\x14\\xdbsh\\xa9[e\\xd5F>\\xa3\\xd0\\x95 77#C\\x0c\\x03\\xa5\\xa4S\\xb5\\xdf\\x83\\x02X \\x8f\\x7f\\x1d\\x02\\xee_\\xf0\\xcd?;]\\xd1\\xd8r\\xd0\\x04\\xdd\\xe7\\xf9\\x18{\\xc19\\xd2\\x07\\xab\\x09\\x1d\\xbdU\\xe2t\\x82\\x03@\".to_vec(),\n\n canister_id: \"qoctq-giaaa-aaaaa-aaaea-cai\".to_string(),\n\n seed: b\"10000\".to_vec(),\n\n delegation_pubkey: b\"MY PUBLIC KEY\".to_vec(),\n\n delegation_exp: 1_650_114_196_974_266_000,\n\n root_pubkey_der: base64::decode(&\"MIGCMB0GDSsGAQQBgtx8BQMBAgEGDCsGAQQBgtx8BQMCAQNhAJN9lndC9PmwG44m08nlPFolGoNYavxz8FS6wa7WDBsR56ZnfsCyYIXNwdOa1MjctQLtFPVK9EDR2CkHWx6fbnLeV+uyOEphXQs+Lzpq9FFlMt5xOipXRXpmtosKfTT4Tg==\".to_string()).unwrap(),\n\n }\n\n }\n\n }\n\n}\n", "file_path": "rs/crypto/internal/test_vectors/src/iccsa.rs", "rank": 72, "score": 70879.09944318797 }, { "content": "#[test]\n\nfn archive_blocks_large_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n // 4096 blocks\n\n let accounts = make_accounts(64, 64);\n\n\n\n let blocks_per_archive_node: usize = 32768;\n\n\n\n // 1 MiB\n\n let max_message_size_bytes: usize = 1024 * 1024;\n\n let node_max_memory_size_bytes: usize =\n\n example_block().encode().unwrap().size_bytes() * blocks_per_archive_node;\n\n let archive_options = Some(ArchiveOptions {\n\n trigger_threshold: 64 * 64,\n\n num_blocks_to_archive: 64 * 64,\n\n node_max_memory_size_bytes: Some(node_max_memory_size_bytes),\n\n max_message_size_bytes: Some(max_message_size_bytes),\n\n controller_id: CanisterId::from_u64(876),\n\n });\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 73, "score": 70052.77331526217 }, { "content": "#[test]\n\nfn archive_blocks_small_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n // 12 blocks\n\n let accounts = make_accounts(4, 3);\n\n println!(\"[test] accounts: {:?}\", accounts);\n\n\n\n // For this test we will use a tiny node size. This is because\n\n // we want multiple archive nodes to be created\n\n let blocks_per_archive_node = 2;\n\n println!(\n\n \"[test] blocks per archive node: {}\",\n\n blocks_per_archive_node\n\n );\n\n // The tiny maximum message size will force archiving one block at a\n\n // time\n\n let max_message_size_bytes = 192;\n\n let node_max_memory_size_bytes =\n\n example_block().encode().unwrap().size_bytes() * blocks_per_archive_node;\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 74, "score": 70052.77331526217 }, { "content": "#[test]\n\nfn get_multiple_blocks_test() {\n\n local_test_e(|r| async move {\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n\n\n let minting_account = create_sender(0);\n\n\n\n // This is how many blocks we want to generate for this test.\n\n // Generating blocks is done by proxy, that is, by creating multiple\n\n // accounts (since each account will generate a Mint transaction).\n\n let num_blocks = 14u64;\n\n\n\n let accounts = make_accounts(num_blocks - 1, 1);\n\n\n\n // For this test we only need two archive nodes to check the range\n\n // queries. We will start with 14 blocks, so the first archive node\n\n // will be filled and then some space will be left in the second. Note\n\n // that the number here is **approximate**\n\n let blocks_per_archive_node: usize = 8;\n\n\n\n let max_message_size_bytes: usize = 1024 * 1024;\n", "file_path": "rs/rosetta-api/ledger_canister/test/test.rs", "rank": 75, "score": 70052.77331526217 }, { "content": "// Memory and table need to be exported as \"memory\" and \"table\". This test\n\n// checks that we export them if they are not.\n\nfn test_instrument_module_export_memory_table() {\n\n let output = instrument(\n\n &BinaryEncodedWasm::new(\n\n wabt::wat2wasm(\n\n r#\"\n\n (module\n\n (memory 1 2)\n\n (table 2 2 anyfunc)\n\n (func $run (export \"run\") (result i32)\n\n (i32.const 123)\n\n )\n\n )\n\n \"#,\n\n )\n\n .unwrap(),\n\n ),\n\n &InstructionCostTable::new(),\n\n )\n\n .unwrap();\n\n\n\n let module =\n\n parity_wasm::elements::deserialize_buffer::<Module>(output.binary.as_slice()).unwrap();\n\n assert_memory_and_table_exports(&module);\n\n // check that instrumented module instantiates correctly\n\n let result = wasmtime_simple::wasmtime_instantiate_and_call_run(&output.binary);\n\n assert_eq!(result[0].i32().unwrap(), 123);\n\n}\n\n\n", "file_path": "rs/embedders/tests/misc_tests.rs", "rank": 76, "score": 70052.14844786114 }, { "content": "// Memory and table need to be exported as \"memory\" and \"table\". This tests\n\n// checks that we rename \"mem\" to \"memory\" and \"tab\" to \"table\" during\n\n// instrumentation.\n\nfn test_instrument_module_rename_memory_table() {\n\n let output = instrument(\n\n &BinaryEncodedWasm::new(\n\n wabt::wat2wasm(\n\n r#\"\n\n (module\n\n (memory (export \"mem\") 1 2)\n\n (table (export \"tab\") 2 2 anyfunc)\n\n (func $run (export \"run\") (result i32)\n\n (i32.const 123)\n\n )\n\n )\n\n \"#,\n\n )\n\n .unwrap(),\n\n ),\n\n &InstructionCostTable::new(),\n\n )\n\n .unwrap();\n\n\n\n let module =\n\n parity_wasm::elements::deserialize_buffer::<Module>(output.binary.as_slice()).unwrap();\n\n assert_memory_and_table_exports(&module);\n\n // check that instrumented module instantiates correctly\n\n let result = wasmtime_simple::wasmtime_instantiate_and_call_run(&output.binary);\n\n assert_eq!(result[0].i32().unwrap(), 123);\n\n}\n\n\n\n#[test]\n", "file_path": "rs/embedders/tests/misc_tests.rs", "rank": 77, "score": 70051.81078146631 }, { "content": "type MpscSender = Sender<TestMessage>;\n", "file_path": "rs/transport/src/tests/test_client.rs", "rank": 78, "score": 70045.66400464263 }, { "content": "type MpscReceiver = Receiver<TestMessage>;\n\n\n\nconst ARG_NODE_ID: &str = \"node\";\n\nconst ARG_MSG_COUNT: &str = \"count\";\n\n\n\nconst REG_V1: RegistryVersion = RegistryVersion::new(1);\n\nconst SUBNET_ID: u8 = 100;\n\nconst FLOW_TAG_1: u32 = 1234;\n\nconst FLOW_TAG_2: u32 = 5678;\n\n\n\nconst TEST_MESSAGE_LEN: usize = 1_000_000;\n\n\n\nconst RECV_TIMEOUT_MS: u64 = 40000;\n\n\n", "file_path": "rs/transport/src/tests/test_client.rs", "rank": 79, "score": 70045.66400464263 }, { "content": "mod test {\n\n use assert_matches::assert_matches;\n\n use candid::Encode;\n\n use canister_test::{Canister, Project, Runtime};\n\n use ic_crypto_tree_hash::{flatmap, lookup_path, Label, LabeledTree, MixedHashTree};\n\n use ic_interfaces::registry::RegistryTransportRecord;\n\n use ic_nns_common::registry::encode_or_panic;\n\n use ic_nns_constants::GOVERNANCE_CANISTER_ID;\n\n use ic_nns_test_utils::itest_helpers::{\n\n forward_call_via_universal_canister, set_up_universal_canister,\n\n };\n\n use ic_nns_test_utils::{\n\n itest_helpers::{local_test_on_nns_subnet, maybe_upgrade_to_self, UpgradeTestingScenario},\n\n registry::invariant_compliant_mutation_as_atomic_req,\n\n };\n\n use ic_nns_test_utils_macros::parameterized_upgrades;\n\n use ic_registry_common::certification::decode_hash_tree;\n\n use ic_registry_transport::{\n\n insert,\n\n pb::v1::{\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 80, "score": 69863.09636532306 }, { "content": "use canister_test::{Canister, Runtime};\n\nuse dfn_candid::candid;\n\nuse ic_nns_gtc::init::GenesisTokenCanisterInitPayloadBuilder;\n\nuse ic_nns_gtc::pb::v1::Gtc;\n\nuse ic_nns_gtc_accounts::{ECT_ACCOUNTS, SEED_ROUND_ACCOUNTS};\n\nuse ic_nns_test_utils::itest_helpers::{\n\n maybe_upgrade_to_self, set_up_genesis_token_canister, UpgradeTestingScenario,\n\n};\n\nuse ic_nns_test_utils_macros::parameterized_upgrades;\n\n\n\n// This tests examples shown in README\n\nasync fn test_gtc(gtc: &Canister<'_>) {\n\n let total: u32 = gtc.query_(\"total\", candid, ()).await.unwrap();\n\n assert_eq!(total, 160561922);\n\n\n\n let len: u16 = gtc.query_(\"len\", candid, ()).await.unwrap();\n\n assert_eq!(len, 375);\n\n\n\n let balance: u32 = gtc\n\n .query_(\n", "file_path": "rs/nns/gtc/tests/test.rs", "rank": 81, "score": 69862.46692877132 }, { "content": "#![allow(clippy::unwrap_used)]\n\nuse crate::keygen_utils::TestKeygenCrypto;\n\nuse ic_config::crypto::CryptoConfig;\n\nuse ic_crypto::utils::{\n\n get_node_keys_or_generate_if_missing, NodeKeysToGenerate, TempCryptoComponent,\n\n};\n\nuse ic_crypto::CryptoComponent;\n\nuse ic_crypto_test_utils::tls::x509_certificates::generate_ed25519_cert;\n\nuse ic_interfaces::crypto::KeyManager;\n\nuse ic_logger::replica_logger::no_op_logger;\n\nuse ic_protobuf::crypto::v1::NodePublicKeys;\n\nuse ic_protobuf::registry::crypto::v1::PublicKey;\n\nuse ic_protobuf::registry::crypto::v1::X509PublicKeyCert;\n\nuse ic_registry_client::fake::FakeRegistryClient;\n\nuse ic_registry_common::proto_registry_data_provider::ProtoRegistryDataProvider;\n\nuse ic_test_utilities::types::ids::node_test_id;\n\nuse ic_types::crypto::{AlgorithmId, CryptoError, KeyPurpose};\n\nuse ic_types::RegistryVersion;\n\nuse std::sync::Arc;\n\n\n\nmod keygen_utils;\n\n\n\nconst REG_V1: RegistryVersion = RegistryVersion::new(1);\n\nconst NODE_ID: u64 = 42;\n\n\n\n#[test]\n", "file_path": "rs/crypto/tests/integration_test.rs", "rank": 82, "score": 69862.11971630176 }, { "content": "mod wasmtime_simple;\n\n\n\nuse ic_embedders::wasm_utils::instrumentation::{instrument, InstructionCostTable};\n\nuse ic_wasm_types::BinaryEncodedWasm;\n\nuse parity_wasm::elements::Module;\n\n\n", "file_path": "rs/embedders/tests/misc_tests.rs", "rank": 83, "score": 69855.49132400336 }, { "content": "/// on both an initialized GTC and an upgraded GTC\n\n#[parameterized_upgrades]\n\nasync fn test_gtc_before_and_after_upgrade(\n\n runtime: &Runtime,\n\n upgrade_scenario: UpgradeTestingScenario,\n\n) {\n\n let mut gtc = set_up_genesis_token_canister(runtime, get_gtc_init_payload()).await;\n\n test_gtc(&gtc).await;\n\n maybe_upgrade_to_self(&mut gtc, upgrade_scenario).await;\n\n test_gtc(&gtc).await;\n\n}\n\n\n", "file_path": "rs/nns/gtc/tests/test.rs", "rank": 84, "score": 69855.2337366694 }, { "content": " .unwrap(),\n\n );\n\n Ok(())\n\n });\n\n }\n\n\n\n /// Tests that the canister init traps if any initial mutation fails, even\n\n /// if previous ones have succeeded\n\n #[test]\n\n fn test_that_init_traps_if_any_init_mutation_fails() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n let init_payload = RegistryCanisterInitPayloadBuilder::new()\n\n .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req())\n\n .push_init_mutate_request(RegistryAtomicMutateRequest {\n\n mutations: vec![\n\n upsert(b\"rock steady\", b\"jamaica\"),\n\n upsert(b\"jazz\", b\"usa\"),\n\n upsert(b\"dub\", b\"uk\"),\n\n ],\n\n preconditions: vec![],\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 85, "score": 69854.76902073779 }, { "content": " });\n\n }\n\n\n\n /// Tests that the state of the registry after initialization includes what\n\n /// was set by the initial mutations, when they all succeed.\n\n #[test]\n\n fn test_initial_mutations_ok() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n let init_payload = RegistryCanisterInitPayloadBuilder::new()\n\n .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req())\n\n .push_init_mutate_request(RegistryAtomicMutateRequest {\n\n mutations: vec![\n\n upsert(b\"dufourspitze\", b\"4634 m\"),\n\n upsert(b\"dom\", b\"4545 m\"),\n\n ],\n\n preconditions: vec![],\n\n })\n\n .push_init_mutate_request(RegistryAtomicMutateRequest {\n\n mutations: vec![upsert(b\"matterhorn\", b\"4478 m\")],\n\n preconditions: vec![precondition(b\"dom\", 1)],\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 86, "score": 69854.72776229407 }, { "content": " \"balance\",\n\n candid,\n\n (\"006b572cd1af263c1f6c7c4d74f9260cd308c937\",),\n\n )\n\n .await\n\n .unwrap();\n\n assert_eq!(balance, 756);\n\n\n\n let balance: u32 = gtc\n\n .query_(\n\n \"balance\",\n\n candid,\n\n (\"6d9bd871135894e872df9db1e5a07cb5102297e8\",),\n\n )\n\n .await\n\n .unwrap();\n\n assert_eq!(balance, 81778);\n\n}\n\n\n\n/// Run `test_gtc` before and after an upgrade to confirm that `test_gtc` passes\n", "file_path": "rs/nns/gtc/tests/test.rs", "rank": 87, "score": 69854.5885414549 }, { "content": " .unwrap();\n\n\n\n let tree = data_part(&certified_response);\n\n assert_eq!(count_deltas(&tree), MAX_VERSIONS_PER_QUERY as usize / 2);\n\n assert!(has_delta(&tree, MAX_VERSIONS_PER_QUERY + 1));\n\n assert!(has_delta(&tree, 3 * MAX_VERSIONS_PER_QUERY / 2));\n\n decode_hash_tree(\n\n MAX_VERSIONS_PER_QUERY,\n\n certified_response.hash_tree.unwrap().try_into().unwrap(),\n\n )\n\n .unwrap();\n\n\n\n Ok(())\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_canister_installation_traps_on_bad_init_payload() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n assert_matches!(\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 88, "score": 69854.32264424219 }, { "content": " Project::new(env!(\"CARGO_MANIFEST_DIR\"))\n\n .cargo_bin(\"registry-canister\")\n\n .install(&runtime)\n\n .bytes(b\"This is not legal candid\".to_vec())\n\n .await,\n\n Err(msg) if msg.contains(\"must be a Candid-encoded RegistryCanisterInitPayload\"));\n\n Ok(())\n\n });\n\n }\n\n\n\n #[test]\n\n fn test_mutations_are_rejected_from_non_authorized_sources() {\n\n local_test_on_nns_subnet(|runtime| async move {\n\n let mut canister = install_registry_canister(\n\n &runtime,\n\n RegistryCanisterInitPayloadBuilder::new()\n\n .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req())\n\n .build(),\n\n )\n\n .await;\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 89, "score": 69854.22705075936 }, { "content": " registry_error::Code, CertifiedResponse, RegistryAtomicMutateRequest,\n\n RegistryAtomicMutateResponse, RegistryError, RegistryGetChangesSinceRequest,\n\n RegistryGetLatestVersionResponse, RegistryGetValueRequest, RegistryGetValueResponse,\n\n },\n\n precondition, update, upsert,\n\n };\n\n use ic_types::RegistryVersion;\n\n use registry_canister::{\n\n init::{RegistryCanisterInitPayload, RegistryCanisterInitPayloadBuilder},\n\n proto_on_wire::protobuf,\n\n };\n\n use std::convert::TryInto;\n\n\n\n pub async fn install_registry_canister(\n\n runtime: &Runtime,\n\n init_payload: RegistryCanisterInitPayload,\n\n ) -> Canister<'_> {\n\n try_to_install_registry_canister(runtime, init_payload)\n\n .await\n\n .unwrap()\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 90, "score": 69853.95336577146 }, { "content": " }\n\n\n\n /// This is a simple end-to-end test of the Registry canister, in which\n\n /// key/value pairs are first inserted, and in a second time the value\n\n /// for one key is retrieved.\n\n #[parameterized_upgrades]\n\n async fn registry(runtime: &Runtime, upgrade_scenario: UpgradeTestingScenario) {\n\n // Set up: install the registry canister\n\n let mut canister = install_registry_canister(\n\n runtime,\n\n RegistryCanisterInitPayloadBuilder::new()\n\n .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req())\n\n .build(),\n\n )\n\n .await;\n\n\n\n // Sets up a universal canister in lieu of the governance canister so it can\n\n // impersonate it.\n\n let fake_governance_canister = set_up_universal_canister(runtime).await;\n\n assert_eq!(\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 91, "score": 69853.23177403749 }, { "content": " );\n\n\n\n maybe_upgrade_to_self(&mut canister, upgrade_scenario).await;\n\n\n\n let (deltas, version) = query_certified_changes_since(&canister, 1).await;\n\n assert_eq!(version, RegistryVersion::from(2));\n\n assert_eq!(deltas.len(), 1);\n\n\n\n let (deltas, version) = query_certified_changes_since(&canister, 2).await;\n\n assert_eq!(version, RegistryVersion::from(2));\n\n assert!(deltas.is_empty());\n\n }\n\n\n\n #[test]\n\n fn test_does_not_return_more_than_1000_certified_deltas() {\n\n fn count_deltas(tree: &LabeledTree<Vec<u8>>) -> usize {\n\n match lookup_path(tree, &[&b\"delta\"[..]]).unwrap() {\n\n LabeledTree::SubTree(children) => children.len(),\n\n _ => panic!(\"unexpected data tree shape: {:?}\", tree),\n\n }\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 92, "score": 69853.18168838539 }, { "content": " get_value_resp_non_existent,\n\n RegistryGetValueResponse {\n\n error: Some(RegistryError {\n\n code: Code::KeyNotPresent as i32,\n\n key: b\"Oh no, that key does not exist!\".to_vec(),\n\n reason: \"\".to_string()\n\n }),\n\n version: 3,\n\n value: vec![]\n\n }\n\n );\n\n }\n\n\n\n #[parameterized_upgrades]\n\n async fn get_latest_version_certified(\n\n runtime: &Runtime,\n\n upgrade_scenario: UpgradeTestingScenario,\n\n ) {\n\n type T = LabeledTree<Vec<u8>>;\n\n\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 93, "score": 69852.34426337302 }, { "content": " );\n\n }\n\n\n\n #[parameterized_upgrades]\n\n async fn get_changes_since_certified(\n\n runtime: &Runtime,\n\n upgrade_scenario: UpgradeTestingScenario,\n\n ) {\n\n let mut canister = install_registry_canister(\n\n runtime,\n\n RegistryCanisterInitPayloadBuilder::new()\n\n .push_init_mutate_request(invariant_compliant_mutation_as_atomic_req())\n\n .build(),\n\n )\n\n .await;\n\n\n\n // Sets up a universal canister in lieu of the governance canister so it can\n\n // impersonate it.\n\n let fake_governance_canister = set_up_universal_canister(runtime).await;\n\n assert_eq!(\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 94, "score": 69852.07717118878 }, { "content": " .query_(\"get_latest_version\", protobuf, vec![])\n\n .await\n\n .unwrap();\n\n assert_eq!(\n\n get_latest_version_resp,\n\n RegistryGetLatestVersionResponse { version: 2_u64 }\n\n );\n\n\n\n // Mutate an existing key to be able to test the existence of several values for\n\n // one key.\n\n assert!(\n\n forward_call_via_universal_canister(\n\n &fake_governance_canister,\n\n &canister,\n\n \"atomic_mutate\",\n\n encode_or_panic(&RegistryAtomicMutateRequest {\n\n mutations: vec![update(\"zurich\", \"die Schweiz\")],\n\n preconditions: vec![],\n\n })\n\n )\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 95, "score": 69852.01520132017 }, { "content": " }\n\n fn has_delta(tree: &LabeledTree<Vec<u8>>, version: u64) -> bool {\n\n lookup_path(tree, &[&b\"delta\"[..], &version.to_be_bytes()[..]]).is_some()\n\n }\n\n\n\n local_test_on_nns_subnet(|runtime| async move {\n\n const MAX_VERSIONS_PER_QUERY: u64 = 1000;\n\n\n\n let canister = install_registry_canister(&runtime, {\n\n let mut builder = RegistryCanisterInitPayloadBuilder::new();\n\n builder.push_init_mutate_request(invariant_compliant_mutation_as_atomic_req());\n\n for v in 1..(3 * MAX_VERSIONS_PER_QUERY / 2) {\n\n let mutation_request = RegistryAtomicMutateRequest {\n\n mutations: vec![insert(format!(\"key{}\", v), \"value\")],\n\n preconditions: vec![],\n\n };\n\n builder.push_init_mutate_request(mutation_request);\n\n }\n\n builder.build()\n\n })\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 96, "score": 69851.48123713852 }, { "content": " version,\n\n key: key.as_ref().to_vec(),\n\n }\n\n }\n\n\n\n fn changes_since(version: u64) -> RegistryGetChangesSinceRequest {\n\n RegistryGetChangesSinceRequest { version }\n\n }\n\n\n\n fn data_part(certified_response: &CertifiedResponse) -> LabeledTree<Vec<u8>> {\n\n let tree: MixedHashTree = certified_response\n\n .hash_tree\n\n .clone()\n\n .expect(\"certified response doesn't include a hash tree\")\n\n .try_into()\n\n .expect(\"failed to decode mixed hash tree\");\n\n let data_part: LabeledTree<Vec<u8>> = tree\n\n .try_into()\n\n .expect(\"failed to convert mixed hash tree into a labeled tree\");\n\n data_part\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 97, "score": 69847.76969252585 }, { "content": " \"get_certified_changes_since\",\n\n protobuf,\n\n changes_since(version),\n\n )\n\n .await\n\n .expect(\"failed to query certified changes\");\n\n\n\n decode_hash_tree(\n\n version,\n\n certified_response\n\n .hash_tree\n\n .expect(\"no hash tree in a certified response\")\n\n .try_into()\n\n .expect(\"failed to decode hash tree from protobuf\"),\n\n )\n\n .expect(\"failed to decode registry deltas\")\n\n }\n\n\n\n fn get_value_request(key: impl AsRef<[u8]>, version: Option<u64>) -> RegistryGetValueRequest {\n\n RegistryGetValueRequest {\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 98, "score": 69847.76969252585 }, { "content": " }\n\n\n\n async fn try_to_install_registry_canister(\n\n runtime: &Runtime,\n\n init_payload: RegistryCanisterInitPayload,\n\n ) -> Result<Canister<'_>, String> {\n\n let encoded = Encode!(&init_payload).unwrap();\n\n let proj = Project::new(env!(\"CARGO_MANIFEST_DIR\"));\n\n proj.cargo_bin(\"registry-canister\")\n\n .install(runtime)\n\n .bytes(encoded)\n\n .await\n\n }\n\n\n\n async fn query_certified_changes_since(\n\n canister: &Canister<'_>,\n\n version: u64,\n\n ) -> (Vec<RegistryTransportRecord>, RegistryVersion) {\n\n let certified_response: CertifiedResponse = canister\n\n .query_(\n", "file_path": "rs/registry/canister/tests/test.rs", "rank": 99, "score": 69847.76969252585 } ]
Rust
src/sim/render/lighting.rs
FreddyWordingham/arctk
05842d8a5842653179acaaf2a7de843ed9293b82
use crate::{ geom::Ray, math::{rand_circle_point, rand_hemisphere_point, Dir3}, phys::Crossing, sim::render::{Attribute, Input}, }; use rand::{rngs::ThreadRng, Rng}; use std::f64::consts::PI; #[inline] #[must_use] pub fn light(input: &Input, ray: &Ray, norm: &Dir3) -> f64 { let light_dir = Dir3::new_normalize(input.shader.sun_pos() - ray.pos()); let view_dir = Dir3::new_normalize(input.cam.pos() - ray.pos()); let ref_dir = Crossing::calc_ref_dir(ray.dir(), norm); let [ambient, mut diffuse, mut specular] = input.shader.light(); diffuse *= norm.dot(&light_dir); specular *= view_dir .dot(&ref_dir) .max(0.0) .powi(input.shader.spec_pow()); ambient + diffuse + specular } #[inline] #[must_use] pub fn shadow(input: &Input, rng: &mut ThreadRng, ray: &Ray, norm: &Dir3) -> f64 { let bump_dist = input.sett.bump_dist(); let sun_dir = Dir3::new_normalize(input.shader.sun_pos() - ray.pos()); let mut light_ray = Ray::new(*ray.pos(), *norm); light_ray.travel(bump_dist); *light_ray.dir_mut() = sun_dir; let solar = if let Some((samples, rad)) = input.shader.soft_shadow_samples() { let offset = rng.gen_range(0.0..(2.0 * PI)); let mut total = 0.0; for n in 0..samples { let (r, theta) = rand_circle_point(n, samples); let mut soft_ray = light_ray.clone(); soft_ray.rotate(r * rad, theta + offset); total += occlusion(input, soft_ray, input.shader.occ_dist()[1]); } total / f64::from(samples) } else { occlusion(input, light_ray, input.shader.occ_dist()[1]) }; if let Some((samples, power)) = input.shader.ambient_shadow_samples() { let offset = rng.gen_range(0.0..(2.0 * PI)); let mut total = 0.0; let mut norm_ray = Ray::new(*ray.pos(), *norm); norm_ray.travel(bump_dist); for n in 0..samples { let (phi, theta) = rand_hemisphere_point(n, samples); let mut ambient_ray = norm_ray.clone(); ambient_ray.rotate(phi, theta + offset); total += occlusion(input, ambient_ray, input.shader.occ_dist()[1]); } let ambient = (total / f64::from(samples)).powi(power); return ambient.mul_add(input.shader.shadow()[0], solar * input.shader.shadow()[1]); }; solar } #[inline] #[must_use] pub fn occlusion(input: &Input, mut ray: Ray, mut dist: f64) -> f64 { debug_assert!(dist > 0.0); let bump_dist = input.sett.bump_dist(); let loop_limit = input.sett.loop_limit(); let min_weight = input.sett.min_weight(); let mut vis = 1.0; let mut num_loops = 0; while let Some(hit) = input.tree.scan(ray.clone(), bump_dist, dist) { if num_loops >= loop_limit { println!("[WARN] : Terminating shadower: loop limit reached."); return 0.0; } num_loops += 1; dist -= hit.dist(); if dist < 0.0 { return vis; } if vis < min_weight { return 0.0; } match *hit.tag() { Attribute::Opaque(..) | Attribute::Switchable(..) => { return vis / dist.mul_add(input.shader.fall_off(), 1.0); } Attribute::Mirror(.., abs_frac) => { ray.travel(dist); vis *= 1.0 - abs_frac; *ray.dir_mut() = Crossing::calc_ref_dir(ray.dir(), hit.side().norm()); ray.travel(bump_dist); } Attribute::Transparent(.., abs_frac) => { ray.travel(dist + bump_dist); vis *= 1.0 - abs_frac; } Attribute::Refractive(.., abs_frac, [_inside, _outside]) => { ray.travel(dist + bump_dist); vis *= 1.0 - abs_frac; } Attribute::Luminous(.., bright_mult) => { return (vis * bright_mult) / dist.mul_add(input.shader.fall_off(), 1.0); } } } vis }
use crate::{ geom::Ray, math::{rand_circle_point, rand_hemisphere_point, Dir3}, phys::Crossing, sim::render::{Attribute, Input}, }; use rand::{rngs::ThreadRng, Rng}; use std::f64::consts::PI; #[inline] #[must_use] pub fn light(input: &Input, ray: &Ray, norm: &Dir3) -> f64 { let light_dir = Dir3::new_normalize(input.shader.sun_pos() - ray.pos()); let view_dir = Dir3::new_normalize(input.cam.pos() - ray.pos()); let ref_dir = Crossing::calc_ref_dir(ray.dir(), norm); let [ambient, mut diffuse, mut specular] = input.shader.light();
#[inline] #[must_use] pub fn shadow(input: &Input, rng: &mut ThreadRng, ray: &Ray, norm: &Dir3) -> f64 { let bump_dist = input.sett.bump_dist(); let sun_dir = Dir3::new_normalize(input.shader.sun_pos() - ray.pos()); let mut light_ray = Ray::new(*ray.pos(), *norm); light_ray.travel(bump_dist); *light_ray.dir_mut() = sun_dir; let solar = if let Some((samples, rad)) = input.shader.soft_shadow_samples() { let offset = rng.gen_range(0.0..(2.0 * PI)); let mut total = 0.0; for n in 0..samples { let (r, theta) = rand_circle_point(n, samples); let mut soft_ray = light_ray.clone(); soft_ray.rotate(r * rad, theta + offset); total += occlusion(input, soft_ray, input.shader.occ_dist()[1]); } total / f64::from(samples) } else { occlusion(input, light_ray, input.shader.occ_dist()[1]) }; if let Some((samples, power)) = input.shader.ambient_shadow_samples() { let offset = rng.gen_range(0.0..(2.0 * PI)); let mut total = 0.0; let mut norm_ray = Ray::new(*ray.pos(), *norm); norm_ray.travel(bump_dist); for n in 0..samples { let (phi, theta) = rand_hemisphere_point(n, samples); let mut ambient_ray = norm_ray.clone(); ambient_ray.rotate(phi, theta + offset); total += occlusion(input, ambient_ray, input.shader.occ_dist()[1]); } let ambient = (total / f64::from(samples)).powi(power); return ambient.mul_add(input.shader.shadow()[0], solar * input.shader.shadow()[1]); }; solar } #[inline] #[must_use] pub fn occlusion(input: &Input, mut ray: Ray, mut dist: f64) -> f64 { debug_assert!(dist > 0.0); let bump_dist = input.sett.bump_dist(); let loop_limit = input.sett.loop_limit(); let min_weight = input.sett.min_weight(); let mut vis = 1.0; let mut num_loops = 0; while let Some(hit) = input.tree.scan(ray.clone(), bump_dist, dist) { if num_loops >= loop_limit { println!("[WARN] : Terminating shadower: loop limit reached."); return 0.0; } num_loops += 1; dist -= hit.dist(); if dist < 0.0 { return vis; } if vis < min_weight { return 0.0; } match *hit.tag() { Attribute::Opaque(..) | Attribute::Switchable(..) => { return vis / dist.mul_add(input.shader.fall_off(), 1.0); } Attribute::Mirror(.., abs_frac) => { ray.travel(dist); vis *= 1.0 - abs_frac; *ray.dir_mut() = Crossing::calc_ref_dir(ray.dir(), hit.side().norm()); ray.travel(bump_dist); } Attribute::Transparent(.., abs_frac) => { ray.travel(dist + bump_dist); vis *= 1.0 - abs_frac; } Attribute::Refractive(.., abs_frac, [_inside, _outside]) => { ray.travel(dist + bump_dist); vis *= 1.0 - abs_frac; } Attribute::Luminous(.., bright_mult) => { return (vis * bright_mult) / dist.mul_add(input.shader.fall_off(), 1.0); } } } vis }
diffuse *= norm.dot(&light_dir); specular *= view_dir .dot(&ref_dir) .max(0.0) .powi(input.shader.spec_pow()); ambient + diffuse + specular }
function_block-function_prefix_line
[ { "content": "#[inline]\n\n#[must_use]\n\npub fn sample_normal<R: Rng>(rng: &mut R) -> f64 {\n\n let a = (-2.0 * rng.gen_range(0.0_f64..1.0).ln()).sqrt();\n\n let theta = rng.gen_range(0.0..(2.0 * PI));\n\n\n\n // Z = Some(a * theta.sin()); // Using mutable static will lead to data race; we waste the the other value :(.\n\n\n\n a * theta.cos()\n\n}\n\n\n\n/// Sample a gaussian distribution.\n", "file_path": "src/math/rng/distribution.rs", "rank": 3, "score": 326551.2072325472 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn sample_henyey_greenstein<R: Rng>(rng: &mut R, asym: f64) -> f64 {\n\n debug_assert!(asym.abs() <= 1.0);\n\n\n\n if asym.abs() < 1.0e-6 {\n\n return rng.gen_range(-1.0_f64..1.0).acos();\n\n }\n\n\n\n let asym_sq = asym * asym;\n\n\n\n let a = (1.0 - asym_sq) / asym.mul_add(rng.gen_range(-1.0..1.0), 1.0);\n\n ((1.0 + asym_sq - (a * a)) / (2.0 * asym)).acos()\n\n}\n\n\n\n/// Sample the normal distribution.\n", "file_path": "src/math/rng/distribution.rs", "rank": 4, "score": 326359.44683099794 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn sample_gaussian<R: Rng>(rng: &mut R, mu: f64, sigma: f64) -> f64 {\n\n debug_assert!(sigma > 0.0);\n\n\n\n sample_normal(rng).mul_add(sigma, mu)\n\n}\n\n\n\n/// Create a random unit vector.\n", "file_path": "src/math/rng/distribution.rs", "rank": 5, "score": 325784.085946927 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn standard(input: &Input, mut data: &mut Output, mut rng: &mut ThreadRng, mut phot: Photon) {\n\n // Check photon is within the grid.\n\n if let Some(index) = input.grid.gen_index(phot.ray().pos()) {\n\n data.emission[index] += phot.power() * phot.weight();\n\n } else {\n\n panic!(\"Photon was not emitted within the grid.\");\n\n }\n\n\n\n // Common constants.\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n let min_weight = input.sett.min_weight();\n\n let roulette_barrels = input.sett.roulette_barrels() as f64;\n\n let roulette_survive_prob = 1.0 / roulette_barrels;\n\n\n\n // Initialisation.\n\n let mat = input.light.mat();\n\n let mut env = mat.sample_environment(phot.wavelength());\n\n\n\n // Main event loop.\n", "file_path": "src/sim/mcrt/engines/standard.rs", "rank": 6, "score": 323949.61126574484 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn rand_isotropic_dir<R: Rng>(rng: &mut R) -> Dir3 {\n\n let theta = rng.gen_range(0.0..(2.0 * PI));\n\n let z: f64 = rng.gen_range(-1.0..1.0);\n\n\n\n let v = (1.0 - (z * z)).sqrt();\n\n\n\n let x = v * theta.cos();\n\n let y = v * theta.sin();\n\n\n\n Dir3::new_normalize(Vec3::new(x, y, z))\n\n}\n\n\n\n/// Sample points within a circle using the golden ratio.\n", "file_path": "src/math/rng/distribution.rs", "rank": 7, "score": 323168.8249782063 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn engine(input: &Input, rng: &mut ThreadRng, index: [usize; 3], data: &mut Output) {\n\n let super_sampling = input.sett.super_sampling();\n\n let super_samples = super_sampling.num_samples();\n\n let weight = 1.0 / super_samples as f64;\n\n\n\n let voxel = input.grid.gen_voxel(&index);\n\n for n in 0..super_samples {\n\n let pos = super_sampling.sample(&voxel, n, rng);\n\n\n\n if let Some(mat) = find_mat(input, &pos) {\n\n data.mats[mat][index] += weight;\n\n } else {\n\n println!(\n\n \"[WARN] Could not determine key at index: {} : {} : {}\",\n\n index[X], index[Y], index[Z],\n\n );\n\n\n\n data.void[index] += weight;\n\n }\n\n }\n\n}\n\n\n\n/// Determine the material at this point.\n", "file_path": "src/sim/cartographer/engine.rs", "rank": 8, "score": 315106.77653149364 }, { "content": "#[inline]\n\npub fn run(mut values: Array1<f64>, input: &Input) -> Result<Array2<f64>, Error> {\n\n // Constants.\n\n let steps = input.sett.dumps() + 1;\n\n let dt = input.sett.time() / (input.sett.dumps() + 1) as f64;\n\n let fraction = 1.0 - input.sett.quality();\n\n let min_time = input.sett.min_time();\n\n\n\n // Allocation.\n\n let mut data = Array2::zeros([steps + 1, values.len() + 1]);\n\n let mut rates = [\n\n Array1::zeros(values.len()),\n\n Array1::zeros(values.len()),\n\n Array1::zeros(values.len()),\n\n Array1::zeros(values.len()),\n\n ];\n\n\n\n // Initial value write.\n\n data[[0, 0]] = 0.0;\n\n for (i, val) in values.iter().enumerate() {\n\n data[[0, i + 1]] = *val;\n", "file_path": "src/sim/flask/run.rs", "rank": 9, "score": 310434.35594544443 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn peel_off(input: &Input, mut phot: Photon, env: &Local, pos: Pos3) -> Option<f64> {\n\n let g = env.asym();\n\n let g_sq = g * g;\n\n\n\n let dir = Dir3::new_normalize(pos - phot.ray().pos());\n\n\n\n let cos_ang = phot.ray().dir().dot(&dir);\n\n let mut prob = 0.5 * ((1.0 - g_sq) / (1.0 + g_sq - (2.0 * g * cos_ang)).powf(1.5));\n\n\n\n if prob < THRESHOLD {\n\n return None;\n\n }\n\n\n\n *phot.ray_mut().dir_mut() = dir;\n\n\n\n let loop_limit = input.sett.loop_limit();\n\n let bump_dist = input.sett.bump_dist();\n\n let mut inter_coeff = env.inter_coeff();\n\n\n\n // Main trace loop.\n", "file_path": "src/sim/mcrt/peel_off.rs", "rank": 10, "score": 281884.5668839372 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn wavelength_to_rbg(mut wavelength: f64) -> [f64; 3] {\n\n let gamma = 0.8;\n\n wavelength *= 1.0e9;\n\n\n\n if (380.0..440.0).contains(&wavelength) {\n\n let a = 0.3 + (0.7 * (wavelength - 380.0) / (440.0 - 380.0));\n\n let r = ((-(wavelength - 440.0) / (440.0 - 380.0)) * a).powf(gamma);\n\n let g = 0.0;\n\n let b = a.powf(gamma);\n\n return [r, g, b];\n\n } else if (440.0..490.0).contains(&wavelength) {\n\n let r = 0.0;\n\n let g = ((wavelength - 440.0) / (490.0 - 440.0)).powf(gamma);\n\n let b = 1.0;\n\n return [r, g, b];\n\n } else if (490.0..510.0).contains(&wavelength) {\n\n let r = 0.0;\n\n let g = 1.0;\n\n let b = (-(wavelength - 510.0) / (510.0 - 490.0)).powf(gamma);\n\n return [r, g, b];\n", "file_path": "src/sim/mcrt/engines/photo.rs", "rank": 11, "score": 270252.0455091496 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn rand_hemisphere_point(n: i32, max: i32) -> (f64, f64) {\n\n debug_assert!(n >= 0);\n\n debug_assert!(n < max);\n\n\n\n rand_sphere_point(n, max * 2)\n\n}\n", "file_path": "src/math/rng/distribution.rs", "rank": 12, "score": 261219.0662988963 }, { "content": "#[inline]\n\npub fn scatter(rng: &mut ThreadRng, phot: &mut Photon, env: &Local) {\n\n // Part of the weight is absorbed.\n\n *phot.weight_mut() *= env.albedo();\n\n\n\n // The remaining weight is scattered.\n\n let phi = sample_henyey_greenstein(rng, env.asym());\n\n let theta = rng.gen_range(0.0..(PI * 2.0));\n\n phot.ray_mut().rotate(phi, theta);\n\n}\n\n\n\n/// Perform a photon scattering event with a probability of shifting wavelength.\n", "file_path": "src/sim/mcrt/scatter.rs", "rank": 13, "score": 257764.30756581383 }, { "content": "#[inline]\n\npub fn shift_scatter(rng: &mut ThreadRng, phot: &mut Photon, env: &Local) {\n\n // Part of the weight is absorbed.\n\n *phot.weight_mut() *= env.albedo();\n\n\n\n // The remaining weight may be shifted in a Raman/fluorescence event.\n\n let r = rng.gen::<f64>();\n\n if r <= env.shift_prob() {\n\n // Shift occurs.\n\n // Fluorescence event removes photons from optical range of interest.\n\n *phot.weight_mut() = 0.0;\n\n return;\n\n }\n\n\n\n // The remaining weight is scattered.\n\n let phi = sample_henyey_greenstein(rng, env.asym());\n\n let theta = rng.gen_range(0.0..(PI * 2.0));\n\n phot.ray_mut().rotate(phi, theta);\n\n}\n", "file_path": "src/sim/mcrt/scatter.rs", "rank": 14, "score": 255219.05008116958 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn rand_circle_point(n: i32, max: i32) -> (f64, f64) {\n\n debug_assert!(n >= 0);\n\n debug_assert!(n < max);\n\n\n\n let r = f64::from(n) / f64::from(max - 1);\n\n let theta = f64::from(n) * *GOLDEN_RATIO;\n\n\n\n (r, theta)\n\n}\n\n\n\n/// Sample points on a sphere's surface using the golden ratio.\n", "file_path": "src/math/rng/distribution.rs", "rank": 15, "score": 243586.33842310403 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn rand_sphere_point(n: i32, max: i32) -> (f64, f64) {\n\n debug_assert!(n >= 0);\n\n debug_assert!(n < max);\n\n\n\n let d = f64::from(1 - max).mul_add(0.5, f64::from(n));\n\n let phi = ((2.0 * d) / f64::from(max)).asin() + FRAC_PI_2;\n\n let theta = ((2.0 * PI) / *GOLDEN_RATIO) * (d % *GOLDEN_RATIO);\n\n\n\n (phi, theta)\n\n}\n\n\n\n/// Sample points on a hemisphere's surface using the golden ratio.\n", "file_path": "src/math/rng/distribution.rs", "rank": 16, "score": 243586.33842310403 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn min(vec: &[f64]) -> f64 {\n\n vec.iter().copied().fold(std::f64::NAN, f64::max)\n\n}\n\n\n\n/// Determine the maximum value within a list.\n", "file_path": "src/math/slice.rs", "rank": 17, "score": 229241.83166631142 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn max(vec: &[f64]) -> f64 {\n\n vec.iter().copied().fold(std::f64::NAN, f64::min)\n\n}\n\n\n\n/// Determine if the list is sorted in ascending order.\n", "file_path": "src/math/slice.rs", "rank": 18, "score": 229241.83166631142 }, { "content": "#[inline]\n\npub fn travel(data: &mut Output, phot: &mut Photon, env: &Local, index: [usize; 3], dist: f64) {\n\n debug_assert!(dist > 0.0);\n\n\n\n let weight_power_dist = phot.weight() * phot.power() * dist;\n\n data.energy[index] += weight_power_dist * env.ref_index() / SPEED_OF_LIGHT_IN_VACUUM;\n\n data.absorptions[index] += weight_power_dist * env.abs_coeff();\n\n data.shifts[index] += weight_power_dist * env.shift_coeff();\n\n\n\n phot.ray_mut().travel(dist);\n\n}\n", "file_path": "src/sim/mcrt/travel.rs", "rank": 19, "score": 221618.97197724626 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\n#[must_use]\n\npub fn evolve(\n\n input: &Input,\n\n voxel_size_sq: &Vec3,\n\n time: f64,\n\n mut dt: f64,\n\n mut values: Array3<f64>,\n\n rates: Array3<f64>,\n\n) -> (Array3<f64>, Array3<f64>) {\n\n debug_assert!(time > 0.0);\n\n debug_assert!(dt > 0.0);\n\n\n\n // Constants.\n\n let steps = 1 + (time / dt) as usize;\n\n dt = time / steps as f64;\n\n\n\n // Threading.\n\n let rates = Mutex::new(rates);\n\n let num_threads = input\n\n .sett\n\n .num_threads()\n", "file_path": "src/sim/diffuse/run.rs", "rank": 20, "score": 207916.5583698803 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn is_descending(vec: &[f64]) -> bool {\n\n for (b, a) in vec.iter().zip(vec.iter().skip(1)) {\n\n if a > b {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Get the mode of a slice.\n", "file_path": "src/math/slice.rs", "rank": 21, "score": 206707.9004719049 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn multi_thread(\n\n input: &Input,\n\n mut values: Array3<f64>,\n\n out_dir: &Path,\n\n) -> Result<Array3<f64>, Error> {\n\n // Constants.\n\n let voxel_size = input.grid.voxel_size();\n\n let voxel_size_sq = Vec3::new(\n\n voxel_size.x * voxel_size.x,\n\n voxel_size.y * voxel_size.y,\n\n voxel_size.z * voxel_size.z,\n\n );\n\n let min_voxel_size_sq = voxel_size_sq.min();\n\n\n\n let max_coeff = input\n\n .coeffs\n\n .max()\n\n .expect(\"Failed to determine maximum coefficient.\");\n\n let max_dt = min_voxel_size_sq / (8.0 * max_coeff);\n\n let dt = max_dt * (1.0 - input.sett.quality()).min(1.0).max(0.0);\n", "file_path": "src/sim/diffuse/run.rs", "rank": 22, "score": 205025.7625477916 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn multi_thread<'a>(input: &'a Input) -> Result<Output<'a>, Error> {\n\n let pb = ProgressBar::new(\"Multi-threaded\", input.grid.num_cells());\n\n let pb = Arc::new(Mutex::new(pb));\n\n\n\n let num_threads = input\n\n .sett\n\n .num_threads()\n\n .unwrap_or(std::usize::MAX)\n\n .min(num_cpus::get());\n\n let threads: Vec<_> = (0..num_threads).collect();\n\n let mut out: Vec<_> = threads\n\n .par_iter()\n\n .map(|_id| thread(input, &Arc::clone(&pb)))\n\n .collect();\n\n pb.lock()?.finish_with_message(\"Mapping complete.\");\n\n\n\n let mut data = out.pop().expect(\"No data received.\");\n\n while let Some(o) = out.pop() {\n\n data += &o;\n\n }\n\n\n\n Ok(data)\n\n}\n\n\n\n/// Run a Cartography simulation using a single thread.\n\n/// # Errors\n\n/// if the progress bar can not be locked.\n", "file_path": "src/sim/cartographer/run.rs", "rank": 23, "score": 195941.21505338908 }, { "content": "#[inline]\n\npub fn single_thread<'a>(input: &'a Input) -> Result<Output<'a>, Error> {\n\n let pb = ProgressBar::new(\"Single-threaded\", input.grid.num_cells());\n\n let pb = Arc::new(Mutex::new(pb));\n\n\n\n Ok(thread(input, &pb))\n\n}\n\n\n\n/// Thread control function.\n", "file_path": "src/sim/cartographer/run.rs", "rank": 24, "score": 195937.09662805824 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn multi_thread<'a>(engine: Engine, input: &'a Input) -> Result<Output<'a>, Error> {\n\n let pb = ProgressBar::new(\"Multi-threaded\", input.cam.num_samples());\n\n let pb = Arc::new(Mutex::new(pb));\n\n\n\n let num_threads = input\n\n .sett\n\n .num_threads()\n\n .unwrap_or(std::usize::MAX)\n\n .min(num_cpus::get());\n\n let threads: Vec<_> = (0..num_threads).collect();\n\n let mut out: Vec<_> = threads\n\n .par_iter()\n\n .map(|_id| thread(engine, input, &Arc::clone(&pb)))\n\n .collect();\n\n pb.lock()?.finish_with_message(\"Simulation complete.\");\n\n\n\n let mut data = out.pop().expect(\"No data received.\");\n\n while let Some(o) = out.pop() {\n\n data += &o;\n\n }\n\n\n\n Ok(data)\n\n}\n\n\n\n/// Run a rendering simulation using a single thread.\n\n/// # Errors\n\n/// if the progress bar can not be locked.\n", "file_path": "src/sim/render/run.rs", "rank": 25, "score": 185770.67629453645 }, { "content": "#[inline]\n\npub fn single_thread<'a>(engine: Engine, input: &'a Input) -> Result<Output<'a>, Error> {\n\n let pb = ProgressBar::new(\"Single-threaded\", input.cam.num_samples());\n\n let pb = Arc::new(Mutex::new(pb));\n\n\n\n Ok(thread(engine, input, &pb))\n\n}\n\n\n\n/// Thread control function.\n", "file_path": "src/sim/render/run.rs", "rank": 26, "score": 185766.5578692056 }, { "content": "#[inline]\n\nfn sky_colour(input: &Input, trace: &Tracer, data: &mut Output, pixel: [usize; 2]) {\n\n // Colour calculation.\n\n let u = trace.ray().dir().z.abs();\n\n let col = input.shader.sky_grad().get(u as f32);\n\n\n\n // Get remaining weight.\n\n let weight = trace.weight();\n\n\n\n // Data recording.\n\n data.shadow[pixel] += weight;\n\n data.light[pixel] += weight;\n\n\n\n // Colouring.\n\n data.colour.pixels_mut()[pixel] += col * weight as f32;\n\n}\n\n\n\n/// Determine the colour of a ray-surface collision.\n\n/// Record the data.\n", "file_path": "src/sim/render/engines/cross.rs", "rank": 27, "score": 184773.8043738958 }, { "content": "#[inline]\n\nfn sky_colour(input: &Input, trace: &Tracer, data: &mut Output, pixel: [usize; 2]) {\n\n // Colour calculation.\n\n let u = trace.ray().dir().z.abs();\n\n let col = input.shader.sky_grad().get(u as f32);\n\n\n\n // Get remaining weight.\n\n let weight = trace.weight();\n\n\n\n // Data recording.\n\n data.shadow[pixel] += weight;\n\n data.light[pixel] += weight;\n\n\n\n // Colouring.\n\n data.colour.pixels_mut()[pixel] += col * weight as f32;\n\n}\n\n\n\n/// Determine the colour of a ray-surface collision.\n\n/// Record the data.\n", "file_path": "src/sim/render/engines/antler.rs", "rank": 28, "score": 184773.8043738958 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn evolve(\n\n input: &Input,\n\n voxel_size_sq: &Vec3,\n\n time: f64,\n\n mut dt: f64,\n\n mut values: Array4<f64>,\n\n swap: Array4<f64>,\n\n) -> Result<(Array4<f64>, Array4<f64>), Error> {\n\n debug_assert!(time > 0.0);\n\n debug_assert!(dt > 0.0);\n\n\n\n // Constants.\n\n let steps = 1 + (time / dt) as usize;\n\n dt = time / steps as f64;\n\n\n\n // Threading.\n\n let mut swap = Mutex::new(swap);\n\n\n\n // Evolution.\n\n let mut pb = ProgressBar::new(\"Stepping\", steps);\n", "file_path": "src/sim/reactor/run.rs", "rank": 29, "score": 159720.25478866138 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn surface(\n\n rng: &mut ThreadRng,\n\n hit: &Hit<Attribute>,\n\n phot: &mut Photon,\n\n env: &mut Local,\n\n data: &mut Output,\n\n) {\n\n match *hit.tag() {\n\n Attribute::Interface(inside, outside) => {\n\n // Reference materials.\n\n let (curr_mat, next_mat) = if hit.side().is_inside() {\n\n (inside, outside)\n\n } else {\n\n (outside, inside)\n\n };\n\n\n\n // Find local optical environments.\n\n let curr_env = curr_mat.sample_environment(phot.wavelength());\n\n let next_env = next_mat.sample_environment(phot.wavelength());\n\n\n", "file_path": "src/sim/mcrt/surface.rs", "rank": 30, "score": 159720.25478866138 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn photo(\n\n frames: &[Frame],\n\n input: &Input,\n\n mut data: &mut Output,\n\n mut rng: &mut ThreadRng,\n\n mut phot: Photon,\n\n) {\n\n // Check photon is within the grid.\n\n if let Some(index) = input.grid.gen_index(phot.ray().pos()) {\n\n data.emission[index] += phot.power() * phot.weight();\n\n } else {\n\n panic!(\"Photon was not emitted within the grid.\");\n\n }\n\n\n\n // Common constants.\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n let min_weight = input.sett.min_weight();\n\n let roulette_barrels = input.sett.roulette_barrels() as f64;\n\n let roulette_survive_prob = 1.0 / roulette_barrels;\n", "file_path": "src/sim/mcrt/engines/photo.rs", "rank": 31, "score": 157794.1081991207 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn fluorescence(\n\n flu_concs: &Array3<f64>,\n\n flu_spec: &Formula,\n\n input: &Input,\n\n mut data: &mut Output,\n\n mut rng: &mut ThreadRng,\n\n mut phot: Photon,\n\n) {\n\n // Check photon is within the grid.\n\n if let Some(index) = input.grid.gen_index(phot.ray().pos()) {\n\n data.emission[index] += phot.power() * phot.weight();\n\n } else {\n\n panic!(\"Photon was not emitted within the grid.\");\n\n }\n\n\n\n // Common constants.\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n let min_weight = input.sett.min_weight();\n\n let roulette_barrels = input.sett.roulette_barrels() as f64;\n", "file_path": "src/sim/mcrt/engines/fluorescence.rs", "rank": 32, "score": 157794.1081991207 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn antler(\n\n input: &Input,\n\n rng: &mut ThreadRng,\n\n mut trace: Tracer,\n\n data: &mut Output,\n\n pixel: [usize; 2],\n\n) {\n\n // Watch time.\n\n let start_time = Instant::now();\n\n\n\n // Common constants.\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n let min_weight = input.sett.min_weight();\n\n\n\n // Main event loop.\n\n let mut num_loops = 0;\n\n while let Some(hit) = input.tree.scan(trace.ray().clone(), bump_dist, 1000.0) {\n\n // Loop limit check.\n\n if num_loops >= loop_limit {\n", "file_path": "src/sim/render/engines/antler.rs", "rank": 33, "score": 157794.1081991207 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn cross(\n\n input: &Input,\n\n rng: &mut ThreadRng,\n\n mut trace: Tracer,\n\n data: &mut Output,\n\n pixel: [usize; 2],\n\n) {\n\n // Watch time.\n\n let start_time = Instant::now();\n\n\n\n // Common constants.\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n let min_weight = input.sett.min_weight();\n\n\n\n let clip = Cube::new(\n\n Pos3::new(1.0e-1, 1.0e-5, 4.0e-3),\n\n Pos3::new(-1.0e-1, -1.0e-5, -4.0e-3),\n\n );\n\n if let Some(dist) = clip.dist(trace.ray()) {\n", "file_path": "src/sim/render/engines/cross.rs", "rank": 34, "score": 157794.1081991207 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn raman(\n\n _detector_pos: &Pos3,\n\n input: &Input,\n\n mut data: &mut Output,\n\n mut rng: &mut ThreadRng,\n\n mut phot: Photon,\n\n) {\n\n // Check photon is within the grid.\n\n if let Some(index) = input.grid.gen_index(phot.ray().pos()) {\n\n data.emission[index] += phot.power() * phot.weight();\n\n } else {\n\n panic!(\"Photon was not emitted within the grid.\");\n\n }\n\n\n\n // Common constants.\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n let min_weight = input.sett.min_weight();\n\n let roulette_barrels = input.sett.roulette_barrels() as f64;\n\n let roulette_survive_prob = 1.0 / roulette_barrels;\n", "file_path": "src/sim/mcrt/engines/raman.rs", "rank": 35, "score": 157794.1081991207 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn multi_thread(\n\n input: &Input,\n\n mut values: Array4<f64>,\n\n out_dir: &Path,\n\n) -> Result<Array4<f64>, Error> {\n\n // Constants.\n\n let voxel_size = input.grid.voxel_size();\n\n let voxel_size_sq = Vec3::new(\n\n voxel_size.x * voxel_size.x,\n\n voxel_size.y * voxel_size.y,\n\n voxel_size.z * voxel_size.z,\n\n );\n\n let min_voxel_size_sq = voxel_size_sq.min();\n\n\n\n let max_coeff = input\n\n .coeffs\n\n .max()\n\n .expect(\"Failed to determine maximum coefficient.\");\n\n let max_diff_dt = min_voxel_size_sq / (8.0 * max_coeff);\n\n let dt = max_diff_dt * (1.0 - input.sett.d_quality()).min(1.0).max(0.0);\n", "file_path": "src/sim/reactor/run.rs", "rank": 36, "score": 157794.1081991207 }, { "content": "#[inline]\n\npub fn io_dirs(\n\n input: Option<PathBuf>,\n\n output: Option<PathBuf>,\n\n) -> Result<(PathBuf, PathBuf), err::Error> {\n\n let exec_name = util::exec::name()?;\n\n\n\n let in_dir = if let Some(input) = input {\n\n input\n\n } else {\n\n root()?.join(\"input\").join(&exec_name)\n\n };\n\n\n\n let out_dir = if let Some(output) = output {\n\n output\n\n } else {\n\n root()?.join(\"output\").join(exec_name)\n\n };\n\n\n\n let in_dir = input_dir(&in_dir)?;\n\n let out_dir = output_dir(&out_dir)?;\n\n Ok((in_dir, out_dir))\n\n}\n", "file_path": "src/util/install/dir.rs", "rank": 37, "score": 157789.98977378986 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\npub fn multi_thread<'a>(\n\n engine: &Engine,\n\n input: &'a Input,\n\n output: &Output<'a>,\n\n) -> Result<Output<'a>, Error> {\n\n let pb = ProgressBar::new(\"MCRT\", input.sett.num_phot());\n\n let pb = Arc::new(Mutex::new(pb));\n\n\n\n let num_threads = input\n\n .sett\n\n .num_threads()\n\n .unwrap_or(std::usize::MAX)\n\n .min(num_cpus::get());\n\n let threads: Vec<_> = (0..num_threads).collect();\n\n let mut out: Vec<_> = threads\n\n .par_iter()\n\n .map(|_id| thread(engine, input, output.clone(), &Arc::clone(&pb)))\n\n .collect();\n\n pb.lock()?.finish_with_message(\"Simulation complete.\");\n\n\n\n let mut data = out.pop().expect(\"No data received.\");\n\n while let Some(o) = out.pop() {\n\n data += &o;\n\n }\n\n\n\n Ok(data)\n\n}\n\n\n\n/// Thread control function.\n", "file_path": "src/sim/mcrt/run.rs", "rank": 38, "score": 150212.55055072796 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\nfn find_mat(input: &Input, pos: &Pos3) -> Option<usize> {\n\n let bump_dist = input.sett.bump_dist();\n\n let loop_limit = input.sett.loop_limit();\n\n\n\n let caster = input.sett.caster();\n\n let num_casts = caster.num_casts();\n\n\n\n let grid = input.grid.boundary();\n\n\n\n for m in 0..num_casts {\n\n let mut ray = caster.gen_ray(*pos, m);\n\n\n\n let mut num_loops = 0;\n\n\n\n while grid.contains(ray.pos()) {\n\n // Loop limit check.\n\n if num_loops >= loop_limit {\n\n println!(\"[WARN] : Terminating tracer: loop limit reached.\");\n\n break;\n\n }\n", "file_path": "src/sim/cartographer/engine.rs", "rank": 39, "score": 146305.2039000287 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\nfn calc_diffuse_rates(\n\n input: &Input,\n\n voxel_size_sq: &Vec3,\n\n values: &Array3<f64>,\n\n rates: &Mutex<Array3<f64>>,\n\n pb: &Arc<Mutex<SilentProgressBar>>,\n\n) {\n\n // Constants.\n\n let res = *input.grid.res();\n\n let block_size = input.sett.block_size();\n\n\n\n // Allocation.\n\n let mut holder = Array1::zeros(block_size);\n\n\n\n // Rate calculations.\n\n while let Some((start, end)) = {\n\n let mut pb = pb.lock().expect(\"Could not lock progress bar.\");\n\n let b = pb.block(block_size);\n\n std::mem::drop(pb);\n\n b\n", "file_path": "src/sim/diffuse/run.rs", "rank": 40, "score": 141773.7045592262 }, { "content": "#[inline]\n\n#[must_use]\n\nfn wavelength_to_col(wavelength: f64) -> Colour {\n\n debug_assert!(wavelength > 0.0);\n\n\n\n let gamma = 0.8;\n\n\n\n let (r, g, b) = if (380.0e-9..=440.0e-9).contains(&wavelength) {\n\n let attenuation = 0.7_f64.mul_add((wavelength - 380.0e-9) / (440.0e-9 - 380.0e-9), 0.3);\n\n (\n\n ((-(wavelength - 440.0e-9) / (440.0e-9 - 380.0e-9)) * attenuation).powf(gamma),\n\n 0.0,\n\n attenuation.powf(gamma),\n\n )\n\n } else if (440.0e-9..=490.0e-9).contains(&wavelength) {\n\n (\n\n 0.0,\n\n ((wavelength - 440.0e-9) / (490.0e-9 - 440.0e-9)).powf(gamma),\n\n 1.0,\n\n )\n\n } else if (490.0e-9..=510.0e-9).contains(&wavelength) {\n\n (\n", "file_path": "src/sim/mcrt/surface.rs", "rank": 41, "score": 140935.24145983648 }, { "content": "#[inline]\n\npub fn sub_sub_section(title: &str) {\n\n println!(\"---- {}\", colour(title));\n\n}\n\n\n\n/// Colour a given message with the appropriate section colour.\n", "file_path": "src/util/fmt/banner.rs", "rank": 42, "score": 138903.63841488172 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn width(default: usize) -> usize {\n\n if let Some((width, _)) = terminal_size() {\n\n width.0 as usize\n\n } else {\n\n default\n\n }\n\n}\n", "file_path": "src/util/fmt/term.rs", "rank": 43, "score": 135808.09727311524 }, { "content": "#[inline]\n\npub fn name() -> Result<String, Error> {\n\n let args: Vec<String> = args().collect();\n\n\n\n Ok(Path::new(&args[0])\n\n .file_stem()\n\n .ok_or(\"Missing filename.\")?\n\n .to_str()\n\n .ok_or(\"Missing string.\")?\n\n .to_owned())\n\n}\n", "file_path": "src/util/install/exec.rs", "rank": 44, "score": 135803.7701187891 }, { "content": "#[inline]\n\nfn diffuse(\n\n input: &Input,\n\n voxel_size_sq: &Vec3,\n\n dt: f64,\n\n mut values: Array4<f64>,\n\n rates: Mutex<Array4<f64>>,\n\n) -> Result<(Array4<f64>, Mutex<Array4<f64>>), Error> {\n\n debug_assert!(dt > 0.0);\n\n\n\n // Calculate diffusion rates.\n\n let spb = Arc::new(Mutex::new(ProgressBar::new(\n\n \"Diffusing\",\n\n values.len() / input.specs.len(),\n\n )));\n\n let threads: Vec<_> = (0..num_cpus::get()).collect();\n\n let _out: Vec<_> = threads\n\n .par_iter()\n\n .map(|_id| calc_diffuse_rates(input, voxel_size_sq, &values, &rates, &Arc::clone(&spb)))\n\n .collect();\n\n\n\n // Apply diffusion.\n\n values += &(&(*rates.lock()?) * dt);\n\n\n\n Ok((values, rates))\n\n}\n\n\n\n/// Calculate the diffusion rates.\n", "file_path": "src/sim/reactor/run.rs", "rank": 45, "score": 135008.61170818083 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\n#[must_use]\n\nfn thread<'a>(input: &'a Input, pb: &Arc<Mutex<ProgressBar>>) -> Output<'a> {\n\n let res = *input.grid.res();\n\n let mut data = Output::new(input.mat_reg, res);\n\n\n\n let mut rng = thread_rng();\n\n\n\n let block_size = input.sett.block_size();\n\n while let Some((start, end)) = {\n\n let mut pb = pb.lock().expect(\"Could not lock progress bar.\");\n\n let b = pb.block(block_size);\n\n std::mem::drop(pb);\n\n b\n\n } {\n\n for n in start..end {\n\n let index = linear_to_three_dim(n, &res);\n\n engine(input, &mut rng, index, &mut data);\n\n }\n\n }\n\n\n\n data\n\n}\n", "file_path": "src/sim/cartographer/run.rs", "rank": 46, "score": 134234.9272634466 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\nfn calc_diffuse_rates(\n\n input: &Input,\n\n voxel_size_sq: &Vec3,\n\n values: &Array4<f64>,\n\n rates: &Mutex<Array4<f64>>,\n\n pb: &Arc<Mutex<ProgressBar>>,\n\n) {\n\n // Constants.\n\n let num_specs = input.specs.len();\n\n let res = *input.grid.res();\n\n let block_size = input.sett.d_block_size();\n\n\n\n // Allocation.\n\n let mut holder = Array2::zeros([num_specs, block_size]);\n\n\n\n // Rate calculations.\n\n while let Some((start, end)) = {\n\n let mut pb = pb.lock().expect(\"Could not lock progress bar.\");\n\n let b = pb.block(block_size);\n\n std::mem::drop(pb);\n", "file_path": "src/sim/reactor/run.rs", "rank": 47, "score": 131078.6875405172 }, { "content": "#[inline]\n\npub fn title(term_width: usize, title: &str) {\n\n let title = title.to_uppercase();\n\n\n\n let (left_bar, right_bar) = if term_width < ((title.len() * 2) + 11) {\n\n (4, 4)\n\n } else {\n\n let left_bar = (term_width - (title.len() * 2) - 3) / 2;\n\n (left_bar, term_width - (title.len() * 2) - 3 - left_bar)\n\n };\n\n\n\n print!(\"{} \", \"\\u{2588}\".repeat(left_bar));\n\n\n\n for (pos, ch) in title.chars().enumerate() {\n\n match pos % 6 {\n\n 0 => print!(\" {}\", format!(\"{}\", ch).bright_red().bold()),\n\n 1 => print!(\" {}\", format!(\"{}\", ch).bright_yellow().bold()),\n\n 2 => print!(\" {}\", format!(\"{}\", ch).bright_green().bold()),\n\n 3 => print!(\" {}\", format!(\"{}\", ch).bright_cyan().bold()),\n\n 4 => print!(\" {}\", format!(\"{}\", ch).bright_blue().bold()),\n\n 5 => print!(\" {}\", format!(\"{}\", ch).bright_magenta().bold()),\n\n _ => unreachable!(),\n\n }\n\n }\n\n\n\n println!(\" {}\", \"\\u{2588}\".repeat(right_bar));\n\n}\n\n\n\n/// Print a section bar to the terminal.\n", "file_path": "src/util/fmt/banner.rs", "rank": 48, "score": 128477.3800723986 }, { "content": "#[inline]\n\npub fn section(term_width: usize, title: &str) {\n\n let title = title.to_uppercase();\n\n unsafe {\n\n SECTION += 1;\n\n }\n\n\n\n print!(\"====\");\n\n print!(\" {}\", colour(&title).bold());\n\n\n\n let mut cur_len = 5 + title.len();\n\n if cur_len >= term_width {\n\n println!();\n\n return;\n\n }\n\n\n\n print!(\" \");\n\n cur_len += 1;\n\n while cur_len < term_width {\n\n print!(\"=\");\n\n cur_len += 1;\n\n }\n\n\n\n println!();\n\n}\n\n\n\n/// Print a sub-section message to the terminal.\n", "file_path": "src/util/fmt/banner.rs", "rank": 49, "score": 128477.3800723986 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\n#[must_use]\n\nfn thread<'a>(engine: Engine, input: &'a Input, pb: &Arc<Mutex<ProgressBar>>) -> Output<'a> {\n\n let res = *input.cam.res();\n\n let mut data = Output::new(res, input.shader.data_grad(), input.img_id);\n\n\n\n let mut rng = thread_rng();\n\n\n\n let super_samples = input.cam.num_super_samples();\n\n let ss_power = input.cam.ss_power();\n\n let init_weight = 1.0 / super_samples as f64;\n\n\n\n let block_size = input.sett.block_size();\n\n while let Some((start, end)) = {\n\n let mut pb = pb.lock().expect(\"Could not lock progress bar.\");\n\n let b = pb.block(block_size);\n\n std::mem::drop(pb);\n\n b\n\n } {\n\n for n in start..end {\n\n let p = n / super_samples;\n\n let s = n - (p * super_samples);\n", "file_path": "src/sim/render/run.rs", "rank": 50, "score": 127827.15548842857 }, { "content": "#[inline]\n\npub fn sub_section(term_width: usize, title: &str) {\n\n println!(\n\n \"---- {} {}\",\n\n colour(title).bold(),\n\n \"-\".repeat(term_width - 6 - title.len())\n\n );\n\n}\n\n\n\n/// Print a sub-sub-section message to the terminal.\n", "file_path": "src/util/fmt/banner.rs", "rank": 51, "score": 126888.53460685918 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn is_ascending<T: PartialOrd>(vec: &[T]) -> bool {\n\n for (b, a) in vec.iter().zip(vec.iter().skip(1)) {\n\n if a < b {\n\n return false;\n\n }\n\n }\n\n\n\n true\n\n}\n\n\n\n/// Determine if the list is sorted in descending order.\n", "file_path": "src/math/slice.rs", "rank": 52, "score": 125157.71866712008 }, { "content": "#[inline]\n\npub fn root() -> Result<PathBuf, std::env::VarError> {\n\n Ok(Path::new(&var(\"ARCTK_DIR\")?).to_path_buf())\n\n}\n\n\n\n/// Initialise the current working directory.\n", "file_path": "src/util/install/dir.rs", "rank": 53, "score": 122032.4559564081 }, { "content": "#[inline]\n\npub fn from_json<T>(path: &Path) -> Result<T, Error>\n\nwhere\n\n for<'de> T: Deserialize<'de>,\n\n{\n\n let s = read_to_string(path)?;\n\n Ok(json5::from_str(&s)?)\n\n}\n\n\n\n/// Deserialise the type in json format.\n\n/// # Errors\n\n/// if string can not be serialised into an instance of the required type.\n", "file_path": "src/fs/extensions/json.rs", "rank": 54, "score": 120737.94869168228 }, { "content": "#[inline]\n\npub fn from_json_str<T>(s: &str) -> Result<T, Error>\n\nwhere\n\n for<'de> T: Deserialize<'de>,\n\n{\n\n Ok(json5::from_str(s)?)\n\n}\n", "file_path": "src/fs/extensions/json.rs", "rank": 55, "score": 120737.94869168228 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn linear_to_three_dim(n: usize, res: &[usize; 3]) -> [usize; 3] {\n\n debug_assert!(n < (res[X] * res[Y] * res[Z]));\n\n\n\n let zi = n % res[Z];\n\n let yi = (n / res[Z]) % res[Y];\n\n let xi = n / (res[Y] * res[Z]);\n\n\n\n [xi, yi, zi]\n\n}\n", "file_path": "src/tools/index.rs", "rank": 56, "score": 119898.04451318191 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn to_string(grad: &Gradient<LinSrgba>, len: usize) -> String {\n\n let mut scale = String::new();\n\n\n\n for i in 0..len {\n\n let x = i as f64 / (len - 1) as f64;\n\n\n\n let col = grad.get(x as f32);\n\n\n\n let (r, g, b) = (\n\n (col.red * 255.0) as u8,\n\n (col.green * 255.0) as u8,\n\n (col.blue * 255.0) as u8,\n\n );\n\n scale.push_str(&format!(\"{}\", \" \".bg(RGB8::new(r, g, b,))));\n\n }\n\n\n\n scale\n\n}\n", "file_path": "src/util/fmt/gradient.rs", "rank": 57, "score": 119210.18575516203 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn two_dim_to_linear(pos: [usize; 2], res: &[usize; 2]) -> usize {\n\n debug_assert!(pos[X] < res[X]);\n\n debug_assert!(pos[Y] < res[Y]);\n\n\n\n (pos[Y] * res[Y]) + pos[X]\n\n}\n\n\n\n/// Create the next three-dimensional index from the given linear index.\n", "file_path": "src/tools/index.rs", "rank": 58, "score": 118309.19904764248 }, { "content": "#[inline]\n\npub fn as_json<T: Serialize>(instance: &T, path: &Path) -> Result<(), Error> {\n\n println!(\"[SAVE] {}\", path.display());\n\n let s = to_string(instance)?;\n\n Ok(write(path, s)?)\n\n}\n", "file_path": "src/fs/save.rs", "rank": 59, "score": 114656.39454377192 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn kinds<T: Clone + Eq + std::hash::Hash>(numbers: &[T]) -> usize {\n\n let mut counts = std::collections::HashMap::new();\n\n for n in numbers {\n\n if !counts.contains_key(n) {\n\n counts.insert(n, true);\n\n }\n\n }\n\n counts.len()\n\n}\n", "file_path": "src/math/slice.rs", "rank": 60, "score": 111324.6491145762 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn mode<T: Clone + Eq + std::hash::Hash>(numbers: &[T]) -> Option<T> {\n\n let mut counts = std::collections::HashMap::new();\n\n\n\n numbers.iter().cloned().max_by_key(|n| {\n\n let count = counts.entry(n.clone()).or_insert(0);\n\n *count += 1;\n\n *count\n\n })\n\n}\n\n\n\n/// Get the number of kinds within a slice.\n", "file_path": "src/math/slice.rs", "rank": 61, "score": 108253.51899371795 }, { "content": "//! Simulation input.\n\n\n\nuse crate::{fmt_report, geom::Grid, sim::diffuse::Settings, util::fmt::Analyze};\n\nuse ndarray::Array3;\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\n/// Diffuse simulation resources conglomerate.\n\npub struct Input<'a> {\n\n /// Map of diffusion coeffs.\n\n pub coeffs: &'a Array3<f64>,\n\n /// Map sources/sinks.\n\n pub sources: &'a Array3<f64>,\n\n /// Measurement grid.\n\n pub grid: &'a Grid,\n\n /// General settings.\n\n pub sett: &'a Settings,\n\n}\n\n\n\nimpl<'a> Input<'a> {\n\n /// Construct a new instance.\n", "file_path": "src/sim/diffuse/input.rs", "rank": 62, "score": 100564.60082076746 }, { "content": " #[inline]\n\n #[must_use]\n\n pub const fn new(\n\n coeffs: &'a Array3<f64>,\n\n sources: &'a Array3<f64>,\n\n grid: &'a Grid,\n\n sett: &'a Settings,\n\n ) -> Self {\n\n Self {\n\n coeffs,\n\n sources,\n\n grid,\n\n sett,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Input<'_> {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {\n\n writeln!(fmt, \"...\")?;\n\n fmt_report!(fmt, self.coeffs.display(), \"diffusion coefficients\");\n\n fmt_report!(fmt, self.sources.display(), \"sources/sinks\");\n\n fmt_report!(fmt, self.grid, \"measurement grid\");\n\n fmt_report!(fmt, self.sett, \"settings\");\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/sim/diffuse/input.rs", "rank": 63, "score": 100560.84805459843 }, { "content": "#[inline]\n\nfn input_dir(dir: &Path) -> Result<PathBuf, std::io::Error> {\n\n set_current_dir(dir)?;\n\n current_dir()\n\n}\n\n\n\n/// Create an output directory.\n", "file_path": "src/util/install/dir.rs", "rank": 64, "score": 100309.62778032225 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\n#[must_use]\n\nfn react(\n\n mut values: Array1<f64>,\n\n sources: &Array1<f64>,\n\n rates: &mut [Array1<f64>; 4],\n\n reactor: &Reactor,\n\n total_time: f64,\n\n fraction: f64,\n\n min_time: f64,\n\n) -> Array1<f64> {\n\n debug_assert!(total_time > 0.0);\n\n debug_assert!(fraction > 0.0);\n\n debug_assert!(fraction < 1.0);\n\n debug_assert!(min_time <= total_time);\n\n\n\n let mut time = 0.0;\n\n while time < total_time {\n\n // Rates and dt.\n\n rates[0] = reactor.deltas(&values.view());\n\n\n\n let dt = (((&values + MIN_POSITIVE) / &rates[0])\n", "file_path": "src/sim/flask/run.rs", "rank": 65, "score": 85813.17581734774 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\n#[must_use]\n\nfn reaction(\n\n n: usize,\n\n mut values: Array2<f64>,\n\n mut rates: [Array1<f64>; 4],\n\n reactor: &Reactor,\n\n total_time: f64,\n\n fraction: f64,\n\n min_time: f64,\n\n) -> (Array2<f64>, [Array1<f64>; 4]) {\n\n debug_assert!(total_time > 0.0);\n\n debug_assert!(fraction > 0.0);\n\n debug_assert!(fraction < 1.0);\n\n debug_assert!(min_time <= total_time);\n\n\n\n let mut vs = values.index_axis_mut(Axis(1), n);\n\n\n\n let mut time = 0.0;\n\n while time < total_time {\n\n // Rates and dt.\n\n rates[0] = reactor.deltas(&vs.view());\n", "file_path": "src/sim/reactor/run.rs", "rank": 66, "score": 85813.17581734774 }, { "content": "#[inline]\n\nfn react(\n\n input: &Input,\n\n dt: f64,\n\n values: Array4<f64>,\n\n new_values: Mutex<Array4<f64>>,\n\n) -> Result<(Array4<f64>, Array4<f64>), Error> {\n\n debug_assert!(dt > 0.0);\n\n\n\n let spb = Arc::new(Mutex::new(ProgressBar::new(\n\n \"Reacting\",\n\n values.len() / input.specs.len(),\n\n )));\n\n let num_threads = input\n\n .sett\n\n .num_threads()\n\n .unwrap_or(std::usize::MAX)\n\n .min(num_cpus::get());\n\n let threads: Vec<_> = (0..num_threads).collect();\n\n let _out: Vec<_> = threads\n\n .par_iter()\n\n .map(|_id| react_impl(input, &values, &new_values, dt, &Arc::clone(&spb)))\n\n .collect();\n\n\n\n let new_values = new_values.into_inner()?;\n\n\n\n Ok((new_values, values))\n\n}\n\n\n\n/// Enact the reactions.\n", "file_path": "src/sim/reactor/run.rs", "rank": 67, "score": 85808.53096568184 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\nfn react_impl(\n\n input: &Input,\n\n values: &Array4<f64>,\n\n new_values: &Mutex<Array4<f64>>,\n\n dt: f64,\n\n pb: &Arc<Mutex<ProgressBar>>,\n\n) {\n\n debug_assert!(dt > 0.0);\n\n\n\n // Constants.\n\n let num_specs = input.specs.len();\n\n let res = *input.grid.res();\n\n let block_size = input.sett.r_block_size();\n\n let fraction = 1.0 - input.sett.r_quality();\n\n let min_time = input.sett.min_time();\n\n\n\n // Allocation.\n\n let mut holder = Array2::zeros([num_specs, block_size]);\n\n let mut rates: [Array1<f64>; 4] = [\n\n Array1::zeros(num_specs),\n", "file_path": "src/sim/reactor/run.rs", "rank": 68, "score": 84809.77958247592 }, { "content": "#[allow(clippy::too_many_arguments)]\n\n#[inline]\n\nfn colour(\n\n input: &Input,\n\n rng: &mut ThreadRng,\n\n trace: &mut Tracer,\n\n norm: &Dir3,\n\n grad: &Gradient,\n\n data: &mut Output,\n\n pixel: [usize; 2],\n\n abs_frac: f64,\n\n) {\n\n debug_assert!(abs_frac > 0.0);\n\n debug_assert!(abs_frac <= 1.0);\n\n\n\n // Colour calculation.\n\n let shadow = lighting::shadow(input, rng, trace.ray(), norm);\n\n let light = lighting::light(input, trace.ray(), norm);\n\n let base_col = grad.get(light as f32);\n\n let col = Gradient::new(vec![Colour::default(), base_col]).get(shadow as f32);\n\n\n\n // Weighting.\n", "file_path": "src/sim/render/engines/cross.rs", "rank": 69, "score": 84805.44736374794 }, { "content": "#[allow(clippy::too_many_arguments)]\n\n#[inline]\n\nfn colour(\n\n input: &Input,\n\n rng: &mut ThreadRng,\n\n trace: &mut Tracer,\n\n norm: &Dir3,\n\n grad: &Gradient,\n\n data: &mut Output,\n\n pixel: [usize; 2],\n\n abs_frac: f64,\n\n) {\n\n debug_assert!(abs_frac > 0.0);\n\n debug_assert!(abs_frac <= 1.0);\n\n\n\n // Colour calculation.\n\n let shadow = lighting::shadow(input, rng, trace.ray(), norm);\n\n let light = lighting::light(input, trace.ray(), norm);\n\n let base_col = grad.get(light as f32);\n\n let col = Gradient::new(vec![Colour::default(), base_col]).get(shadow as f32);\n\n\n\n // Weighting.\n", "file_path": "src/sim/render/engines/antler.rs", "rank": 70, "score": 84805.44736374794 }, { "content": "/// Types implementing this trait can be loaded from a file.\n\npub trait File\n\nwhere\n\n Self: std::marker::Sized,\n\n{\n\n /// Load an instance of this type from a given path.\n\n /// # Errors\n\n /// if the target file can not be found,\n\n /// or the read string can not be serialised into an instance of the required type.\n\n #[inline]\n\n fn new_from_file(path: &Path) -> Result<Self, Error> {\n\n println!(\"[LOAD] {}\", path.display());\n\n Self::load(path)\n\n }\n\n\n\n /// Deserialize the type from a given file.\n\n /// # Errors\n\n /// if the target file can not be found,\n\n /// or the read string can not be serialised into an instance of the required type.\n\n fn load(path: &Path) -> Result<Self, Error>;\n\n}\n", "file_path": "src/fs/file.rs", "rank": 71, "score": 80893.07662200472 }, { "content": "/// Types implementing this trait can be built into another type by loading in additional resources.\n\npub trait Load {\n\n /// End type to be built.\n\n type Inst;\n\n\n\n /// Build the instance type.\n\n /// # Errors\n\n /// if a component could not be built successfully.\n\n fn load(self, in_dir: &Path) -> Result<Self::Inst, Error>;\n\n}\n", "file_path": "src/fs/load.rs", "rank": 72, "score": 80893.07662200472 }, { "content": "/// Types implementing this trait can be queried for validity.\n\npub trait Valid {\n\n /// Check if the current state is valid.\n\n #[must_use]\n\n fn check(&self) -> bool;\n\n}\n", "file_path": "src/tools/valid.rs", "rank": 73, "score": 80893.07662200472 }, { "content": "/// Types implementing this trait can be built into another type.\n\npub trait Build {\n\n /// End type to be built.\n\n type Inst;\n\n\n\n /// Build the instance type.\n\n fn build(self) -> Self::Inst;\n\n}\n", "file_path": "src/ord/build.rs", "rank": 74, "score": 80893.07662200472 }, { "content": "/// Types implementing this trait can be saved to file.\n\npub trait Save {\n\n /// Serialise the type to a given file\n\n /// # Errors\n\n /// if the instance can not be serialised or if the file can't be written to.\n\n fn save_data(&self, path: &Path) -> Result<(), Error>;\n\n\n\n /// Report the saving of a file (if it is a filepath) and save the data.\n\n /// # Errors\n\n /// if the instance can not be serialised or if the file can't be written to.\n\n #[inline]\n\n fn save(&self, path: &Path) -> Result<(), Error> {\n\n println!(\"[SAVE] {}\", path.display());\n\n\n\n self.save_data(path)\n\n }\n\n}\n\n\n\n/// Serialise the type in json format.\n\n/// # Errors\n\n/// if the instance can not be serialised into json or if the file can't be written to.\n", "file_path": "src/fs/save.rs", "rank": 75, "score": 80893.07662200472 }, { "content": "#[allow(clippy::expect_used)]\n\n#[inline]\n\n#[must_use]\n\nfn thread<'a>(\n\n engine: &Engine,\n\n input: &'a Input,\n\n mut output: Output<'a>,\n\n pb: &Arc<Mutex<ProgressBar>>,\n\n) -> Output<'a> {\n\n let mut rng = thread_rng();\n\n\n\n let phot_energy = input.light.power() / input.sett.num_phot() as f64;\n\n\n\n let block_size = input.sett.block_size();\n\n while let Some((start, end)) = {\n\n let mut pb = pb.lock().expect(\"Could not lock progress bar.\");\n\n let b = pb.block(block_size);\n\n std::mem::drop(pb);\n\n b\n\n } {\n\n for _ in start..end {\n\n let phot = input.light.emit(&mut rng, phot_energy);\n\n engine.run(input, &mut output, &mut rng, phot);\n\n }\n\n }\n\n\n\n output\n\n}\n", "file_path": "src/sim/mcrt/run.rs", "rank": 76, "score": 80834.12526646022 }, { "content": "/// Trace trait implementation.\n\n/// Types implementing this trait can be traced using 'Ray's.\n\npub trait Trace {\n\n /// Determine if a ray hit occurs.\n\n fn hit(&self, ray: &Ray) -> bool;\n\n\n\n /// Distance to the surface along the ray's line of travel.\n\n fn dist(&self, ray: &Ray) -> Option<f64>;\n\n\n\n /// Distance to the surface along the ray's line of travel and side of collision.\n\n fn dist_side(&self, ray: &Ray) -> Option<(f64, Side)>;\n\n}\n", "file_path": "src/geom/properties/trace.rs", "rank": 77, "score": 79898.12336436959 }, { "content": "/// Emit trait implementation.\n\n/// Types implementing this trait can cast Rays.\n\npub trait Emit {\n\n /// Cast a new ray.\n\n fn cast<R: Rng>(&self, rng: &mut R) -> Ray;\n\n}\n\n\n\nimpl Emit for Pos3 {\n\n #[inline]\n\n #[must_use]\n\n fn cast<R: Rng>(&self, rng: &mut R) -> Ray {\n\n let theta = rng.gen_range(0.0..(2.0 * PI));\n\n let z = rng.gen_range(-1.0..1.0);\n\n\n\n Ray::new(\n\n *self,\n\n Dir3::new_normalize(Vec3::new(\n\n (1.0_f64 - (z * z)).sqrt() * theta.cos(),\n\n (1.0_f64 - (z * z)).sqrt() * theta.sin(),\n\n z,\n\n )),\n\n )\n\n }\n\n}\n", "file_path": "src/geom/properties/emit.rs", "rank": 78, "score": 79894.08250061257 }, { "content": "/// Types implementing this trait may be transformed.\n\npub trait Transformable {\n\n /// Apply the given transformation.\n\n fn transform(&mut self, trans: &Trans3);\n\n}\n", "file_path": "src/geom/properties/transformable.rs", "rank": 79, "score": 79889.52575707299 }, { "content": "/// Types implementing this trait can be analysed to produce a printable type.\n\npub trait Analyze {\n\n /// End type to be built.\n\n type Inst;\n\n\n\n /// Create a displayable instance.\n\n fn display(&self) -> Self::Inst;\n\n}\n", "file_path": "src/util/fmt/analyze.rs", "rank": 80, "score": 79889.52575707299 }, { "content": "/// Collide trait implementation.\n\n/// Types implementing this trait can be tested for collision with an axis-aligned bounding box.\n\npub trait Collide {\n\n /// Check for an overlapping collision.\n\n fn overlap(&self, aabb: &Cube) -> bool;\n\n}\n", "file_path": "src/geom/properties/collide.rs", "rank": 81, "score": 79889.52575707299 }, { "content": "initSidebarItems({\"struct\":[[\"Input\",\"Diffuse simulation resources conglomerate.\"]]});", "file_path": "docs/arctk/sim/diffuse/input/sidebar-items.js", "rank": 82, "score": 76210.96636458162 }, { "content": "#!/usr/bin/python3\n\n\n\n\n\nimport csv\n\nimport matplotlib.pyplot as plt\n\nimport sys\n\nimport pandas\n\n\n\n\n\ndef quit_figure(event):\n\n if event.key == 'escape':\n\n plt.close(event.canvas.figure)\n\n\n\n\n\ncid = plt.gcf().canvas.mpl_connect('key_press_event', quit_figure)\n\n\n\n\n\n# Settings\n\nTITLE_LABEL = \"XY - Scatter\"\n\nX_AXIS_LABEL = \"X\"\n\nY_AXIS_LABEL = \"Y\"\n\n\n\nCOLS_SMALL = [\"r.\",\n\n \"m.\",\n\n \"b.\",\n\n \"c.\",\n\n \"g.\",\n\n \"y.\",\n\n \"k.\"]\n\nCOLS_LARGE = [\"#FF0000\",\n\n \"#800000\",\n\n \"#FFFF00\",\n\n \"#808000\",\n\n \"#00FF00\",\n\n \"#008000\",\n\n \"#00FFFF\",\n\n \"#008080\",\n\n \"#0000FF\",\n\n \"#000080\",\n\n \"#FF00FF\",\n\n \"#800080\",\n\n \"#000000\",\n\n \"#808080\",\n\n \"#C0C0C0\"]\n\n\n\n\n\n# Main\n\nif len(sys.argv) != 2:\n\n print(\"Incorrect arguments: <filename>\")\n\n quit()\n\n\n\nfilename = sys.argv[1]\n\n\n\n# colnames = ['t', 'a', 'b', 'c', 'd', 'e', 'f', 'g']\n\n\n\ndata = pandas.read_csv(filename, header=0)\n\nheaders = list(pandas.read_csv(filename, header=0).head())\n\n\n\nnum_specs = len(data.columns) - 1\n\nprint(\"Total species: \", num_specs)\n\n\n\nif num_specs <= len(COLS_SMALL):\n\n cols = COLS_SMALL\n\nelse:\n\n cols = COLS_LARGE\n\n\n\ncol_number = 0\n\nfor col in data:\n\n if col_number == 0:\n\n y = data[col]\n\n else:\n\n plt.plot(y, data[col], cols[(col_number % len(cols)) - 1])\n\n col_number += 1\n\n\n\nplt.xlabel(X_AXIS_LABEL)\n\nplt.ylabel(Y_AXIS_LABEL)\n\nplt.title(TITLE_LABEL)\n\nplt.legend(headers[1:])\n\n\n\nplt.show()\n\nplt.close()\n", "file_path": "input/plot.py", "rank": 83, "score": 73349.6072426239 }, { "content": "/// Types implementing this trait can be linked to a set to produce a referenced type.\n\npub trait Link<'a, T> {\n\n /// Type to be built.\n\n type Inst;\n\n\n\n /// Get a list of all required resource keys.\n\n fn requires(&self) -> Vec<Name>;\n\n\n\n /// Link the instance type.\n\n /// # Errors\n\n /// if a field could not be referenced.\n\n fn link(self, set: &'a Set<T>) -> Result<Self::Inst, Error>;\n\n}\n\n\n\n#[allow(clippy::use_self)]\n\nimpl<'a, T, S: Link<'a, T>> Link<'a, T> for Vec<S> {\n\n type Inst = Vec<S::Inst>;\n\n\n\n #[inline]\n\n fn requires(&self) -> Vec<Name> {\n\n self.iter()\n", "file_path": "src/ord/link.rs", "rank": 84, "score": 73113.75847275819 }, { "content": "#[inline]\n\n#[must_use]\n\nfn colour(string: &str) -> String {\n\n match unsafe { SECTION } % 6 {\n\n 0 => format!(\"{}\", string.bright_magenta()),\n\n 1 => format!(\"{}\", string.bright_red()),\n\n 2 => format!(\"{}\", string.bright_yellow()),\n\n 3 => format!(\"{}\", string.bright_green()),\n\n 4 => format!(\"{}\", string.bright_cyan()),\n\n 5 => format!(\"{}\", string.bright_blue()),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/util/fmt/banner.rs", "rank": 85, "score": 71174.86626687639 }, { "content": "def smoothstep_1(t):\n\n \"Linear (no) smoothing\"\n\n return t\n\n\n\n\n\ndef smoothstep_2(t):\n\n \"Quadratic smoothing\"\n\n return (3.0 * t**2) - (2.0 * t**3)\n\n\n\n\n\ndef smoothstep_3(t):\n\n \"Cubic smoothing\"\n\n return (10.0 * t**3) - (15.0 * t**4) + (6.0 * t**5)\n\n\n\n\n\ndef smoothstep_4(t):\n\n \"Fourth degree smoothing\"\n\n return (35.0 * t**4) - (84.0 * t**5) + (70.0 * t**6) - (20.0 * t**7)\n\n\n\n\n\ndef smoothstep_5(t):\n\n \"Fifth degree smoothing\"\n\n return (126.0 * t**5) - (420.0 * t**6) + (540.0 * t**7) - (315.0 * t**8) + (70.0 * t**9)\n\n\n\n\n\ndef smoothstep_6(t):\n\n \"Sitth degree smoothing\"\n\n return (462.0 * t**6) - (1980.0 * t**7) + (3465.0 * t**8) - (3080.0 * t**9) + (1386.0 * t**10) - (252.0 * t**11)\n\n\n\n\n\ndef smoothstep_7(t):\n\n \"Seventh degree smoothing\"\n\n return (1716.0 * t**7) - (9009.0 * t**8) + (20020.0 * t**9) - (24024.0 * t**10) + (16380.0 * t**11) - (6006.0 * t**12) + (924.0 * t**13)\n\n\n\n\n\ndef interpolate(t, a, b, ss):\n\n \"Interpolation function\"\n\n return a + (ss(t) * (b - a))\n", "file_path": "script/arctk/rng/smooth.py", "rank": 86, "score": 66986.55818849656 }, { "content": "import math\n\nimport numpy as np\n\nimport random\n\n\n\nfrom .smooth import interpolate, smoothstep_1\n\n\n\n\n\ninv_sqrt_2 = 1.0 / math.sqrt(2.0)\n\n\n\n\n\nclass Perlin:\n\n \"\"\"\n\n Perlin noise map.\n\n \"\"\"\n\n\n\n def __init__(self, res, smoothstep):\n\n \"\"\"\n\n Construct a new instance,\n\n with a given resolution of gradient vectors.\n\n \"\"\"\n\n assert(res[0] >= 2)\n\n assert(res[1] >= 2)\n\n\n\n self.smoothstep = smoothstep\n\n\n\n self.gradients = np.empty((res[0], res[1], 3))\n\n\n\n for iy in range(res[1]):\n\n for ix in range(res[0]):\n\n theta = random.uniform(0.0, 2.0 * math.pi)\n\n self.gradients[ix, iy, 0] = math.sin(theta)\n\n self.gradients[ix, iy, 1] = math.cos(theta)\n\n self.gradients[ix, iy, 2] = random.uniform(0.0, 1.0)\n\n\n\n def sample(self, pos):\n\n \"\"\"\n\n Sample the noise map.\n\n Return a value in the range [-1:1]\n\n \"\"\"\n\n (nx, ny, _) = self.gradients.shape\n\n\n\n # Move point inside the gradients.\n\n x = math.modf(pos[0])[0]\n\n y = math.modf(pos[1])[0]\n\n\n\n # Indices\n\n ix0 = math.floor(x * nx)\n\n ix1 = (ix0 + 1) % nx\n\n iy0 = math.floor(y * ny)\n\n iy1 = (iy0 + 1) % ny\n\n\n\n # UV coordinates\n\n u = 1.0 - (x * nx) + ix0\n\n v = 1.0 - (y * ny) + iy0\n\n\n\n # Gradient vectors dot offset vectors\n\n g00 = self.gradients[ix0, iy0, 0:2].dot([u, v])\n\n g10 = self.gradients[ix1, iy0, 0:2].dot([1.0 - u, v])\n\n g01 = self.gradients[ix0, iy1, 0:2].dot([u, 1.0 - v])\n\n g11 = self.gradients[ix1, iy1, 0:2].dot([1.0 - u, 1.0 - v])\n\n\n\n # Interpolate\n\n a = interpolate(u, g10, g00, self.smoothstep)\n\n b = interpolate(u, g11, g01, self.smoothstep)\n\n c = interpolate(v, b, a, self.smoothstep)\n\n\n\n return c * inv_sqrt_2\n\n\n\n def rotate(self, dt):\n\n \"\"\"\n\n Rotate the gradient vectors.\n\n \"\"\"\n\n (nx, ny, _) = self.gradients.shape\n\n\n\n for xi in range(nx):\n\n for yi in range(ny):\n\n x = self.gradients[xi, yi, 0]\n\n y = self.gradients[xi, yi, 1]\n\n r = self.gradients[xi, yi, 2] * dt\n\n\n\n cs = math.cos(r)\n\n sn = math.sin(r)\n\n\n\n self.gradients[xi, yi, 0] = (x * cs) - (y * sn)\n\n self.gradients[xi, yi, 1] = (x * sn) + (y * cs)\n", "file_path": "script/arctk/rng/perlin.py", "rank": 87, "score": 66986.55818849656 }, { "content": "from .smooth import interpolate, smoothstep_1\n\nfrom .perlin import Perlin\n\n\n\n\n\nclass PerlinStack:\n\n \"\"\"\n\n Multiple resolution Perlin noise map.\n\n \"\"\"\n\n\n\n def __init__(self, resolutions, smoothstep):\n\n \"\"\"\n\n Construct a new instance,\n\n with a given resolution of gradient vectors.\n\n \"\"\"\n\n\n\n nz = len(resolutions)\n\n self.maps = []\n\n for res in resolutions:\n\n self.maps.append(Perlin(res, smoothstep))\n\n\n\n def sample(self, pos):\n\n \"\"\"\n\n Sample the noise map.\n\n Return a value in the range [-1:1]\n\n \"\"\"\n\n\n\n sum = 0.0\n\n for map in self.maps:\n\n sum += map.sample(pos)\n\n\n\n return sum / len(self.maps)\n\n\n\n def rotate(self, dt):\n\n \"\"\"\n\n Rotate the gradient vectors.\n\n \"\"\"\n\n\n\n for map in self.maps:\n\n map.rotate(dt)\n", "file_path": "script/arctk/rng/perlin_stack.py", "rank": 88, "score": 66096.35351223678 }, { "content": "#[inline]\n\nfn output_dir(dir: &Path) -> Result<PathBuf, std::io::Error> {\n\n create_dir_all(dir)?;\n\n Ok(dir.to_path_buf())\n\n}\n\n\n\n/// Set and get the input and output directories.\n\n/// Returned pair is (input, output).\n\n/// # Errors\n\n/// if the root installation directory can not be determined,\n\n/// or if one of the input or output directories could not be created.\n", "file_path": "src/util/install/dir.rs", "rank": 89, "score": 58842.40976140932 }, { "content": "//! Ray implementation.\n\n\n\nuse crate::{\n\n access,\n\n math::{Dir3, Pos3, Rot3, Vec3},\n\n};\n\n\n\n/// Ray structure.\n\n#[derive(Clone)]\n\npub struct Ray {\n\n /// Ray origin.\n\n pos: Pos3,\n\n /// Ray direction.\n\n dir: Dir3,\n\n}\n\n\n\nimpl Ray {\n\n access!(pos, pos_mut, Pos3);\n\n access!(dir, dir_mut, Dir3);\n\n\n", "file_path": "src/geom/rt/ray.rs", "rank": 90, "score": 50446.76186237207 }, { "content": " /// Construct a new instance.\n\n #[inline]\n\n #[must_use]\n\n pub fn new(pos: Pos3, mut dir: Dir3) -> Self {\n\n dir.renormalize();\n\n Self { pos, dir }\n\n }\n\n\n\n /// Destruct self into components.\n\n #[inline]\n\n #[must_use]\n\n pub const fn destruct(self) -> (Pos3, Dir3) {\n\n (self.pos, self.dir)\n\n }\n\n\n\n /// Move along the direction of travel a given distance.\n\n #[inline]\n\n pub fn travel(&mut self, dist: f64) {\n\n debug_assert!(dist > 0.0);\n\n\n", "file_path": "src/geom/rt/ray.rs", "rank": 91, "score": 50446.28695322512 }, { "content": " self.pos += self.dir.as_ref() * dist;\n\n }\n\n\n\n /// Rotate the photon with a given pitch and subsequent roll manoeuvre.\n\n #[inline]\n\n pub fn rotate(&mut self, pitch: f64, roll: f64) {\n\n let arbitrary_axis = if (1.0 - self.dir.z.abs()) >= 1.0e-1 {\n\n Vec3::z_axis()\n\n } else {\n\n Vec3::y_axis()\n\n };\n\n\n\n let pitch_axis = Dir3::new_normalize(self.dir.cross(&arbitrary_axis));\n\n let pitch_rot = Rot3::from_axis_angle(&pitch_axis, pitch);\n\n\n\n let roll_rot = Rot3::from_axis_angle(&self.dir, roll);\n\n\n\n self.dir = roll_rot * pitch_rot * self.dir;\n\n self.dir.renormalize();\n\n }\n\n}\n", "file_path": "src/geom/rt/ray.rs", "rank": 92, "score": 50436.60603734949 }, { "content": "//! Simulation input.\n\n\n\nuse crate::{\n\n chem::Reactor, fmt_report, geom::Grid, ord::Register, sim::reactor::Settings,\n\n util::fmt::Analyze,\n\n};\n\nuse ndarray::{Array3, Array4};\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\n/// Reactor simulation resources conglomerate.\n\npub struct Input<'a> {\n\n /// Register of known species.\n\n pub specs: &'a Register,\n\n /// Reactor processor.\n\n pub reactor: &'a Reactor,\n\n /// Map of diffusion coeffs.\n\n pub coeffs: &'a Array4<f64>,\n\n /// Map of source/sinks.\n\n pub sources: &'a Array4<f64>,\n\n /// Map of rate multipliers.\n", "file_path": "src/sim/reactor/input.rs", "rank": 93, "score": 50314.69180621646 }, { "content": "//! Simulation input.\n\n\n\nuse crate::{chem::Reactor, fmt_report, ord::Register, sim::flask::Settings};\n\nuse ndarray::Array1;\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\n/// Flask simulation resources conglomerate.\n\npub struct Input<'a> {\n\n /// Register of known species.\n\n pub specs: &'a Register,\n\n /// Sources.\n\n pub sources: &'a Array1<f64>,\n\n /// Reactor processor.\n\n pub reactor: &'a Reactor,\n\n /// General settings.\n\n pub sett: &'a Settings,\n\n}\n\n\n\nimpl<'a> Input<'a> {\n\n /// Construct a new instance.\n", "file_path": "src/sim/flask/input.rs", "rank": 94, "score": 50312.02916482369 }, { "content": " pub multipliers: &'a Array3<f64>,\n\n /// Measurement grid.\n\n pub grid: &'a Grid,\n\n /// General settings.\n\n pub sett: &'a Settings,\n\n}\n\n\n\nimpl<'a> Input<'a> {\n\n /// Construct a new instance.\n\n #[inline]\n\n #[must_use]\n\n pub const fn new(\n\n specs: &'a Register,\n\n reactor: &'a Reactor,\n\n coeffs: &'a Array4<f64>,\n\n sources: &'a Array4<f64>,\n\n multipliers: &'a Array3<f64>,\n\n grid: &'a Grid,\n\n sett: &'a Settings,\n\n ) -> Self {\n", "file_path": "src/sim/reactor/input.rs", "rank": 95, "score": 50311.508741572055 }, { "content": " #[inline]\n\n #[must_use]\n\n pub const fn new(\n\n specs: &'a Register,\n\n sources: &'a Array1<f64>,\n\n reactor: &'a Reactor,\n\n sett: &'a Settings,\n\n ) -> Self {\n\n Self {\n\n specs,\n\n sources,\n\n reactor,\n\n sett,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for Input<'_> {\n\n #[inline]\n\n fn fmt(&self, fmt: &mut Formatter) -> Result<(), Error> {\n\n writeln!(fmt, \"...\")?;\n\n fmt_report!(fmt, self.specs, \"species\");\n\n fmt_report!(fmt, self.sources, \"sources\");\n\n fmt_report!(fmt, self.reactor, \"reactor\");\n\n fmt_report!(fmt, self.sett, \"settings\");\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/sim/flask/input.rs", "rank": 96, "score": 50310.42062353654 }, { "content": "//! Render input.\n\n\n\nuse crate::{\n\n fmt_report,\n\n geom::{Camera, Tree},\n\n img::Gradient,\n\n ord::Set,\n\n sim::render::{Attribute, Settings, Shader},\n\n util::gradient::to_string,\n\n};\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\n/// Rendering simulation resources conglomerate.\n\npub struct Input<'a> {\n\n /// Gradients.\n\n pub grads: &'a Set<Gradient>,\n\n /// Attributes.\n\n pub attrs: &'a Set<Attribute<'a>>,\n\n /// Capturing camera.\n\n pub cam: &'a Camera,\n", "file_path": "src/sim/render/input.rs", "rank": 97, "score": 50308.24830093672 }, { "content": "//! Simulation input.\n\n\n\nuse crate::{\n\n fmt_report,\n\n geom::{Grid, Tree},\n\n ord::{Register, Set},\n\n sim::cartographer::{Attribute, Settings},\n\n};\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\n/// Cartographer simulation resources conglomerate.\n\npub struct Input<'a> {\n\n /// Material register.\n\n pub mat_reg: &'a Register,\n\n /// Attributes.\n\n pub attrs: &'a Set<Attribute>,\n\n /// Hit-scan tree.\n\n pub tree: &'a Tree<'a, Attribute>,\n\n /// Measurement grid.\n\n pub grid: &'a Grid,\n", "file_path": "src/sim/cartographer/input.rs", "rank": 98, "score": 50308.225824965666 }, { "content": "//! Simulation input.\n\n\n\nuse crate::{\n\n fmt_report,\n\n geom::{Grid, Tree},\n\n ord::{Register, Set},\n\n phys::{Light, Material},\n\n sim::mcrt::{Attribute, Settings},\n\n};\n\nuse std::fmt::{Display, Error, Formatter};\n\n\n\n/// MCRT simulation resources conglomerate.\n\npub struct Input<'a> {\n\n /// Spectrometer register.\n\n pub spec_reg: &'a Register,\n\n /// Materials.\n\n pub mats: &'a Set<Material>,\n\n /// Attributes.\n\n pub attrs: &'a Set<Attribute<'a>>,\n\n /// Emission light.\n", "file_path": "src/sim/mcrt/input.rs", "rank": 99, "score": 50308.165523506184 } ]
Rust
pseudos/src/main.rs
yokljo/pseudos
a16a66bd4a0edc48100e3d5e285ef897b8699375
use std::cmp::Ordering; use libpseudos::dos_event_handler::{DosEventHandler, DosInterruptResult, KeyModType, KeyPressInfo, MachineType, PortStates}; use libpseudos::dos_file_system::StandardDosFileSystem; use libpseudos::exe_loader::MzHeader; use xachtsechs::machine8086::Machine8086; use xachtsechs::types::{Reg, RegHalf, StepResult}; use sdl2::image::{LoadTexture, INIT_PNG}; use sdl2::event::Event; use sdl2::keyboard::Keycode; use sdl2::rect::Rect; use sdl2::render::{WindowCanvas, Texture}; use sdl2::audio::AudioSpecDesired; use std::time::{SystemTime, UNIX_EPOCH}; use std::path::Path; const SCANCODE_LETTERS: &[u8] = b"qwertyuiopasdfghjklzxcvbnm"; fn scancode_to_key_info(keycode: Keycode, shifted: bool) -> Option<KeyPressInfo> { let key_index = keycode as u8; let (scan_code, ascii_char, shifted_ascii_char) = match keycode { _ if (b'a' ..= b'z').contains(&(keycode as u8)) => { let lower_ascii_char = SCANCODE_LETTERS.iter().position(|c| *c == key_index).unwrap() as u8 + 0x10; (lower_ascii_char, key_index, key_index + 0x20) } Keycode::Num0 => (0x0b, 0x30, 0x29), Keycode::Num1 => (0x02, 0x31, 0x21), Keycode::Num2 => (0x03, 0x32, 0x40), Keycode::Num3 => (0x04, 0x33, 0x23), Keycode::Num4 => (0x05, 0x34, 0x24), Keycode::Num5 => (0x06, 0x35, 0x25), Keycode::Num6 => (0x07, 0x36, 0x5e), Keycode::Num7 => (0x08, 0x37, 0x26), Keycode::Num8 => (0x09, 0x38, 0x2a), Keycode::Num9 => (0x0a, 0x39, 0x28), Keycode::Backspace => (0x0e, 0x08, 0x08), Keycode::Delete => (0x53, 0x00, 0x2e), Keycode::Insert => (0x52, 0x00, 0x30), Keycode::Slash => (0x35, 0x2f, 0x3f), Keycode::Down => (0x50, 0, 0x32), Keycode::Up => (0x48, 0, 0x38), Keycode::Left => (0x4b, 0, 0x34), Keycode::Right => (0x4d, 0, 0x36), Keycode::Return => (0x1c, 0x0d, 0x0d), Keycode::Escape => (0x01, 0x1b, 0x1b), Keycode::Space => (0x39, 0x20, 0x20), Keycode::Tab => (0x0f, 0x09, 0), Keycode::PageUp => (0x49, 0, 0x39), Keycode::PageDown => (0x51, 0, 0x33), _ if (Keycode::F1 as u8 ..= Keycode::F12 as u8).contains(&(keycode as u8)) => { (0x3b + (keycode as u8 - Keycode::F1 as u8), 0, 0) } _ => return None }; Some(KeyPressInfo{scan_code, ascii_char: if shifted { shifted_ascii_char } else { ascii_char }}) } fn get_ms_from_duration(duration: std::time::Duration) -> usize { (duration.as_secs() * 1000) as usize + duration.subsec_millis() as usize } pub fn vga_colour_to_rgb(colour: u8) -> (u8, u8, u8) { match colour { 0x0 => (0x00, 0x00, 0x00), 0x1 => (0x00, 0x00, 0xAA), 0x2 => (0x00, 0xAA, 0x00), 0x3 => (0x00, 0xAA, 0xAA), 0x4 => (0xAA, 0x00, 0x00), 0x5 => (0xAA, 0x00, 0xAA), 0x6 => (0xAA, 0x55, 0x00), 0x7 => (0xAA, 0xAA, 0xAA), 0x8 => (0x55, 0x55, 0x55), 0x9 => (0x55, 0x55, 0xFF), 0xA => (0x55, 0xFF, 0x55), 0xB => (0x55, 0xFF, 0xFF), 0xC => (0xFF, 0x55, 0x55), 0xD => (0xFF, 0x55, 0xFF), 0xE => (0xFF, 0xFF, 0x55), 0xF => (0xFF, 0xFF, 0xFF), _ => (0, 0, 0) } } struct DosConsole { machine: Machine8086, dos_event_handler: DosEventHandler, current_run_time_ms: usize, } impl DosConsole { fn draw_screen(&mut self, canvas: &mut WindowCanvas, dosfont_tex: &mut Texture, redraw_all: bool) { let screen_mem = &self.machine.memory[0xb8000..0xb8000+0x1000]; let screen_width = 80; let screen_height = 25; for y in 0 .. screen_height { for x in 0 .. screen_width { let char_index = (x + (y * screen_width)) * 2; let ref char_code = screen_mem[char_index]; let ref colour = screen_mem[char_index + 1]; let colour_fore = colour & 0x0f; let mut colour_back = (colour & 0xf0) >> 4; let mut blinking = false; if colour_back >= 8 { colour_back -= 8; blinking = true; } let fore_rgb = vga_colour_to_rgb(colour_fore); let back_rgb = vga_colour_to_rgb(colour_back); let char_rect = Rect::new(8 * (*char_code as i32), 0, 8, 14); let dest_rect = Rect::new(8 * (x as i32), 14 * (y as i32), 8, 14); canvas.set_draw_color(sdl2::pixels::Color::RGB(back_rgb.0, back_rgb.1, back_rgb.2)); canvas.fill_rect(dest_rect).ok(); if !blinking || self.current_run_time_ms % 450 < 225 { dosfont_tex.set_color_mod(fore_rgb.0, fore_rgb.1, fore_rgb.2); canvas.copy(&dosfont_tex, Some(char_rect), Some(dest_rect)).expect("Render failed"); } } } } fn update_keymod(&mut self, keymod: sdl2::keyboard::Mod) { self.dos_event_handler.set_key_mod(KeyModType::Shift, keymod.contains(sdl2::keyboard::LSHIFTMOD) || keymod.contains(sdl2::keyboard::RSHIFTMOD)); self.dos_event_handler.set_key_mod(KeyModType::Ctrl, keymod.contains(sdl2::keyboard::LCTRLMOD) || keymod.contains(sdl2::keyboard::RCTRLMOD)); self.dos_event_handler.set_key_mod(KeyModType::Alt, keymod.contains(sdl2::keyboard::LALTMOD) || keymod.contains(sdl2::keyboard::RALTMOD)); } fn run(&mut self) { let mut step_count = 0; let scale = 2; let sdl_context = sdl2::init().unwrap(); let render_width = 640; let render_height = 350; let sdl_video = sdl_context.video().unwrap(); let _sdl_image = sdl2::image::init(INIT_PNG).unwrap(); let window = sdl_video.window("PseuDOS", render_width * scale, render_height * scale) .position_centered() .build() .unwrap(); let (window_width, window_height) = window.size(); let mut canvas = window.into_canvas().software().build().unwrap(); let texture_creator = canvas.texture_creator(); let dosfont_file = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/res/dosfont.png")); let mut dosfont_tex = texture_creator.load_texture(dosfont_file).unwrap(); let mut running = true; canvas.set_scale(scale as f32, scale as f32).ok(); canvas.set_viewport(Rect::new(((window_width / scale) as i32 / 2 - render_width as i32 / 2) as i32, ((window_height / scale) as i32 / 2 - render_height as i32 / 2) as i32, render_width, render_height)); let start_time_ms = get_ms_from_duration(SystemTime::now().duration_since(UNIX_EPOCH).unwrap()); let mut last_time_ms = start_time_ms; self.draw_screen(&mut canvas, &mut dosfont_tex, true); while running { for event in sdl_context.event_pump().unwrap().poll_iter() { match event { Event::Quit{..} => { running = false; } Event::Window{..} => { self.draw_screen(&mut canvas, &mut dosfont_tex, true); } Event::KeyDown{keycode: keycode_opt, keymod, ..} => { self.update_keymod(keymod); let shifted = keymod.contains(sdl2::keyboard::LSHIFTMOD) || keymod.contains(sdl2::keyboard::RSHIFTMOD); if let Some(keycode) = keycode_opt { if let Some(key_info) = scancode_to_key_info(keycode, shifted) { self.dos_event_handler.key_press_queue.push_back(key_info); } } } _ => {} } } self.machine.interrupt_on_next_step(0x08); self.dos_event_handler.seconds_since_start += 54.9451/1000.; self.dos_event_handler.set_cga_vertial_retrace(true); let num_opcodes_to_exec = 4000; for _ in 0..num_opcodes_to_exec { match self.machine.step(&mut self.dos_event_handler) { Ok(StepResult::Interrupt) => { match self.dos_event_handler.result { DosInterruptResult::ShouldReturn => { self.machine.return_from_interrupt(); } DosInterruptResult::ShouldReturnAndWaitForEvents => { self.machine.return_from_interrupt(); break; } DosInterruptResult::ShouldBlockForKeypress => { break; } } } Err(err) => { eprintln!("Step error: {}", err); return; } _ => {} } step_count += 1; } /*if self.machine.number_of_parsed_instructions > 2000000 { //println!("MEM: {:?}", &machine.memory[0xb8000..0xb8000+0x1000]); /*use std::io::Write; println!("ds: {}", self.machine.get_reg_u16(Reg::DS)); let mut file = std::fs::File::create("memdmp.dat").unwrap(); file.write_all(&self.machine.memory);*/ let ds = self.machine.get_reg_u16(Reg::DS) as u32; /*for i in 0..16 { let addr = (ds<<4)+((i<<9)+0x8d0a); let length = self.machine.peek_u16(addr) as u32; let mut arrstr = "[".to_string(); //println!("length: {}", length); for sound_index in 0..length { let sound_addr = addr + ((sound_index * 2) + 2); let entry = self.machine.peek_u16(sound_addr); arrstr += &format!("{}, ", entry); //println!("> {}", entry); } arrstr += "]"; println!("{}", arrstr); }*/ for i in (0..256*2).step_by(2) { let addr = (ds<<4)+(i+0x89f4); let num = self.machine.peek_u16(addr); println!("{}: {}", i/2, num); } panic!(); }*/ self.draw_screen(&mut canvas, &mut dosfont_tex, false); self.current_run_time_ms += 5; canvas.present(); } } } fn main() { let mut file = std::fs::File::open("./junk/dos/ZZT.EXE").unwrap(); let exe_header = MzHeader::parse(&mut file).unwrap(); println!("{:#?}", exe_header); let mut machine = Machine8086::new(1024*1024*1); exe_header.load_into_machine(&mut machine, &mut file); let mut event_handler = DosEventHandler { machine_type: MachineType::EGA, video_mode: MachineType::EGA.lookup_video_mode(3).unwrap(), port_states: PortStates::new(), file_system: Box::new(StandardDosFileSystem::new("./junk/dos".into())), disk_trasnsfer_address: 0, seconds_since_start: 0., key_mod: 0, result: DosInterruptResult::ShouldReturn, key_press_queue: std::collections::VecDeque::new(), }; event_handler.init_machine(&mut machine); let mut console = DosConsole { machine, dos_event_handler: event_handler, current_run_time_ms: 0, }; console.run(); }
use std::cmp::Ordering; use libpseudos::dos_event_handler::{DosEventHandler, DosInterruptResult, KeyModType, KeyPressInfo, MachineType, PortStates}; use libpseudos::dos_file_system::StandardDosFileSystem; use libpseudos::exe_loader::MzHeader; use xachtsechs::machine8086::Machine8086; use xachtsechs::types::{Reg, RegHalf, StepResult}; use sdl2::image::{LoadTexture, INIT_PNG}; use sdl2::event::Event; use sdl2::keyboard::Keycode; use sdl2::rect::Rect; use sdl2::render::{WindowCanvas, Texture}; use sdl2::audio::AudioSpecDesired; use std::time::{SystemTime, UNIX_EPOCH}; use std::path::Path; const SCANCODE_LETTERS: &[u8] = b"qwertyuiopasdfghjklzxcvbnm"; fn scancode_to_key_info(keycode: Keycode, shifted: bool) -> Option<KeyPressInfo> { let key_index = keycode as u8; let (scan_code, ascii_char, shifted_ascii_char) = match keycode { _ if (b'a' ..= b'z').contains(&(keycode as u8)) => { let lower_ascii_char = SCANCODE_LETTERS.iter().position(|c| *c == key_index).unwrap() as u8 + 0x10; (lower_ascii_char, key_index, key_index + 0x20) } Keycode::Num0 => (0x0b, 0x30, 0x29), Keycode::Num1 => (0x02, 0x31, 0x21), Keycode::Num2 => (0x03, 0x32, 0x40), Keycode::Num3 => (0x04, 0x33, 0x23), Keycode::Num4 => (0x05, 0x34, 0x24), Keycode::Num5 => (0x06, 0x35, 0x25), Keycode::Num6 => (0x07, 0x36, 0x5e), Keycode::Num7 => (0x08, 0x37, 0x26), Keycode::Num8 => (0x09, 0x38, 0x2a), Keycode::Num9 => (0x0a, 0x39, 0x28), Keycode::Backspace => (0x0e, 0x08, 0x08), Keycode::Delete => (0x53, 0x00, 0x2e), Keycode::Insert => (0x52, 0x00, 0x30), Keycode::Slash => (0x35, 0x2f, 0x3f), Keycode::Down => (0x50, 0, 0x32), Keycode::Up => (0x48, 0, 0x38), Keycode::Left => (0x4b, 0, 0x34), Keycode::Right => (0x4d, 0, 0x36), Keycode::Return => (0x1c, 0x0d, 0x0d), Keycode::Escape => (0x01, 0x1b, 0x1b), Keycode::Space => (0x39, 0x20, 0x20), Keycode::Tab => (0x0f, 0x09, 0), Keycode::PageUp => (0x49, 0, 0x39), Keycode::PageDown => (0x51, 0, 0x33), _ if (Keycode::F1 as u8 ..= Keycode::F12 as u8).contains(&(keycode as u8)) => { (0x3b + (keycode as u8 - Keycode::F1 as u8), 0, 0) } _ => return None }; Some(KeyPressInfo{scan_code, ascii_char: if shifted { shifted_ascii_char } else { ascii_char }}) } fn get_ms_from_duration(duration: std::time::Duration) -> usize { (duration.as_secs() * 1000) as usize + duration.subsec_millis() as usize } pub fn vga_colour_to_rgb(colour: u8) -> (u8, u8, u8) { match colour { 0x0 => (0x00, 0x00, 0x00), 0x1 => (0x00, 0x00, 0xAA), 0x2 => (0x00, 0xAA, 0x00), 0x3 => (0x00, 0xAA, 0xAA), 0x4 => (0xAA, 0x00, 0x00), 0x5 => (0xAA, 0x00, 0xAA), 0x6 => (0xAA, 0x55, 0x00), 0x7 => (0xAA, 0xAA, 0xAA), 0x8 => (0x55, 0x55, 0x55), 0x9 => (0x55, 0x55, 0xFF), 0xA => (0x55, 0xFF, 0x55), 0xB => (0x55, 0xFF, 0xFF), 0xC => (0xFF, 0x55, 0x55), 0xD => (0xFF, 0x55, 0xFF), 0xE => (0xFF, 0xFF, 0x55), 0xF => (0xFF, 0xFF, 0xFF), _ => (0, 0, 0) } } struct DosConsole { machine: Machine8086, dos_event_handler: DosEventHandler, current_run_time_ms: usize, } impl DosConsole { fn draw_screen(&mut self, canvas: &mut WindowCanvas, dosfont_tex: &mut Texture, redraw_all: bool) { let screen_mem = &self.machine.memory[0xb8000..0xb8000+0x1000]; let screen_width = 80; let screen_height = 25; for y in 0 .. screen_height { for x in 0 .. screen_width { let char_index = (x + (y * screen_width)) * 2; let ref char_code = screen_mem[char_index]; let ref colour = screen_mem[char_index + 1]; let colour_fore = colour & 0x0f; let mut colour_back = (colour & 0xf0) >> 4; let mut blinking = false; if colour_back >= 8 { colour_back -= 8; blinking = true; } let fore_rgb = vga_colour_to_rgb(colour_fore); let back_rgb = vga_colour_to_rgb(colour_back); let char_rect = Rect::new(8 * (*char_code as i32), 0, 8, 14); let dest_rect = Rect::new(8 * (x as i32), 14 * (y as i32), 8, 14); canvas.set_draw_color(sdl2::pixels::Color::RGB(back_rgb.0, back_rgb.1, back_rgb.2)); canvas.fill_rect(dest_rect).ok();
} } } fn update_keymod(&mut self, keymod: sdl2::keyboard::Mod) { self.dos_event_handler.set_key_mod(KeyModType::Shift, keymod.contains(sdl2::keyboard::LSHIFTMOD) || keymod.contains(sdl2::keyboard::RSHIFTMOD)); self.dos_event_handler.set_key_mod(KeyModType::Ctrl, keymod.contains(sdl2::keyboard::LCTRLMOD) || keymod.contains(sdl2::keyboard::RCTRLMOD)); self.dos_event_handler.set_key_mod(KeyModType::Alt, keymod.contains(sdl2::keyboard::LALTMOD) || keymod.contains(sdl2::keyboard::RALTMOD)); } fn run(&mut self) { let mut step_count = 0; let scale = 2; let sdl_context = sdl2::init().unwrap(); let render_width = 640; let render_height = 350; let sdl_video = sdl_context.video().unwrap(); let _sdl_image = sdl2::image::init(INIT_PNG).unwrap(); let window = sdl_video.window("PseuDOS", render_width * scale, render_height * scale) .position_centered() .build() .unwrap(); let (window_width, window_height) = window.size(); let mut canvas = window.into_canvas().software().build().unwrap(); let texture_creator = canvas.texture_creator(); let dosfont_file = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/res/dosfont.png")); let mut dosfont_tex = texture_creator.load_texture(dosfont_file).unwrap(); let mut running = true; canvas.set_scale(scale as f32, scale as f32).ok(); canvas.set_viewport(Rect::new(((window_width / scale) as i32 / 2 - render_width as i32 / 2) as i32, ((window_height / scale) as i32 / 2 - render_height as i32 / 2) as i32, render_width, render_height)); let start_time_ms = get_ms_from_duration(SystemTime::now().duration_since(UNIX_EPOCH).unwrap()); let mut last_time_ms = start_time_ms; self.draw_screen(&mut canvas, &mut dosfont_tex, true); while running { for event in sdl_context.event_pump().unwrap().poll_iter() { match event { Event::Quit{..} => { running = false; } Event::Window{..} => { self.draw_screen(&mut canvas, &mut dosfont_tex, true); } Event::KeyDown{keycode: keycode_opt, keymod, ..} => { self.update_keymod(keymod); let shifted = keymod.contains(sdl2::keyboard::LSHIFTMOD) || keymod.contains(sdl2::keyboard::RSHIFTMOD); if let Some(keycode) = keycode_opt { if let Some(key_info) = scancode_to_key_info(keycode, shifted) { self.dos_event_handler.key_press_queue.push_back(key_info); } } } _ => {} } } self.machine.interrupt_on_next_step(0x08); self.dos_event_handler.seconds_since_start += 54.9451/1000.; self.dos_event_handler.set_cga_vertial_retrace(true); let num_opcodes_to_exec = 4000; for _ in 0..num_opcodes_to_exec { match self.machine.step(&mut self.dos_event_handler) { Ok(StepResult::Interrupt) => { match self.dos_event_handler.result { DosInterruptResult::ShouldReturn => { self.machine.return_from_interrupt(); } DosInterruptResult::ShouldReturnAndWaitForEvents => { self.machine.return_from_interrupt(); break; } DosInterruptResult::ShouldBlockForKeypress => { break; } } } Err(err) => { eprintln!("Step error: {}", err); return; } _ => {} } step_count += 1; } /*if self.machine.number_of_parsed_instructions > 2000000 { //println!("MEM: {:?}", &machine.memory[0xb8000..0xb8000+0x1000]); /*use std::io::Write; println!("ds: {}", self.machine.get_reg_u16(Reg::DS)); let mut file = std::fs::File::create("memdmp.dat").unwrap(); file.write_all(&self.machine.memory);*/ let ds = self.machine.get_reg_u16(Reg::DS) as u32; /*for i in 0..16 { let addr = (ds<<4)+((i<<9)+0x8d0a); let length = self.machine.peek_u16(addr) as u32; let mut arrstr = "[".to_string(); //println!("length: {}", length); for sound_index in 0..length { let sound_addr = addr + ((sound_index * 2) + 2); let entry = self.machine.peek_u16(sound_addr); arrstr += &format!("{}, ", entry); //println!("> {}", entry); } arrstr += "]"; println!("{}", arrstr); }*/ for i in (0..256*2).step_by(2) { let addr = (ds<<4)+(i+0x89f4); let num = self.machine.peek_u16(addr); println!("{}: {}", i/2, num); } panic!(); }*/ self.draw_screen(&mut canvas, &mut dosfont_tex, false); self.current_run_time_ms += 5; canvas.present(); } } } fn main() { let mut file = std::fs::File::open("./junk/dos/ZZT.EXE").unwrap(); let exe_header = MzHeader::parse(&mut file).unwrap(); println!("{:#?}", exe_header); let mut machine = Machine8086::new(1024*1024*1); exe_header.load_into_machine(&mut machine, &mut file); let mut event_handler = DosEventHandler { machine_type: MachineType::EGA, video_mode: MachineType::EGA.lookup_video_mode(3).unwrap(), port_states: PortStates::new(), file_system: Box::new(StandardDosFileSystem::new("./junk/dos".into())), disk_trasnsfer_address: 0, seconds_since_start: 0., key_mod: 0, result: DosInterruptResult::ShouldReturn, key_press_queue: std::collections::VecDeque::new(), }; event_handler.init_machine(&mut machine); let mut console = DosConsole { machine, dos_event_handler: event_handler, current_run_time_ms: 0, }; console.run(); }
if !blinking || self.current_run_time_ms % 450 < 225 { dosfont_tex.set_color_mod(fore_rgb.0, fore_rgb.1, fore_rgb.2); canvas.copy(&dosfont_tex, Some(char_rect), Some(dest_rect)).expect("Render failed"); }
if_condition
[ { "content": "// http://www.bioscentral.com/misc/bda.htm\n\npub fn initialise_bios_data_area(machine: &mut Machine8086) {\n\n\t// The BIOS Data Area starts at the start of the 0x40 segment.\n\n\t// Equipment\n\n\tmachine.set_data_u16(&BIOS_EQUIPMENT, 0x0061);\n\n\t// Memory size in KB\n\n\tmachine.set_data_u16(&BIOS_MEMORY_SIZE_KB, 640);\n\n\t// Text column count for the video mode\n\n\tmachine.set_data_u16(&BIOS_TEXT_COLUMN_COUNT, 80);\n\n\t// Port for video I/O\n\n\tmachine.set_data_u16(&BIOS_VIDEO_IO_PORT_ADDRESS, 0xd403);\n\n}\n", "file_path": "libpseudos/src/bios_loader.rs", "rank": 1, "score": 137970.39528111246 }, { "content": "// https://en.wikipedia.org/wiki/Program_Segment_Prefix\n\nfn initialise_dos_program_segment_prefix(machine: &mut Machine8086, program_size: usize, command_line_tail: &[u8]) -> Result<(), String> {\n\n\t// The DS register will be the PSP location when a program starts.\n\n\tlet psp_start = (EXE_ORIGIN_PARAGRAPH * EXE_PARAGRAPH_BYTES) as u32; //machine.get_seg_origin(Reg::DS);\n\n\t// CP/M exit: Always 20h\n\n\t//machine.poke_u16(psp_start + 0x00, 0x20);\n\n\t// These values are probably all wrong:\n\n\t\n\n\t// Segment after the memeory allocated to the program.\n\n\tdbg!((psp_start, program_size));\n\n\tmachine.poke_u16(psp_start + 0x02, 0xa000);\n\n\t\n\n\t// +1 for the 0x0d teminator character.\n\n\tlet command_line_tail_len = command_line_tail.len() + 1;\n\n\tif command_line_tail_len > 0xff {\n\n\t\treturn Err(format!(\"Command line tail too long: {}\", command_line_tail.len()));\n\n\t}\n\n\tmachine.poke_u8(psp_start + 0x80, command_line_tail_len as u8);\n\n\tlet mut current_command_line_pos = psp_start + 0x81;\n\n\tfor byte in command_line_tail {\n\n\t\tmachine.poke_u8(current_command_line_pos, *byte);\n\n\t\tcurrent_command_line_pos += 1;\n\n\t}\n\n\tmachine.poke_u8(current_command_line_pos, 0x0d);\n\n\t\n\n\tOk(())\n\n}\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 2, "score": 118760.87009831348 }, { "content": "// https://ss64.com/nt/syntax-wildcards.html\n\nfn filename_matches_spec(filename: &DosFileName, search_spec: &[u8]) -> bool {\n\n\tlet match_against_spec = |text: &[u8], spec: &[u8]| {\n\n\t\t//dbg!((ascii_filename_to_string(text), ascii_filename_to_string(spec)));\n\n\t\tlet mut spec_pos = 0;\n\n\t\tlet mut just_processed_star = false;\n\n\t\tfor c in text {\n\n\t\t\tif let Some(&spec_char) = spec.get(spec_pos) {\n\n\t\t\t\tif spec_char == b'*' {\n\n\t\t\t\t\tif let Some(next_spec_char) = spec.get(spec_pos + 1) {\n\n\t\t\t\t\t\tif *c == *next_spec_char {\n\n\t\t\t\t\t\t\tspec_pos += 1;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\tjust_processed_star = true;\n\n\t\t\t\t} else if spec_char == b'?' {\n\n\t\t\t\t\tspec_pos += 1;\n\n\t\t\t\t} else if *c == spec_char {\n\n\t\t\t\t\tspec_pos += 1;\n\n\t\t\t\t} else {\n\n\t\t\t\t\treturn false;\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 4, "score": 95577.73125242532 }, { "content": "fn split_filename(filename: &[u8]) -> (&[u8], Option<&[u8]>) {\n\n\tif let Some(dot_pos) = filename.iter().rposition(|c| *c == b'.') {\n\n\t\tlet after_dot = &filename[dot_pos + 1..];\n\n\t\tif after_dot.len() <= 3 {\n\n\t\t\t(&filename[..dot_pos], Some(after_dot))\n\n\t\t} else {\n\n\t\t\t(&filename[..dot_pos], Some(&after_dot[..3]))\n\n\t\t}\n\n\t} else {\n\n\t\t(filename, None)\n\n\t}\n\n}\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 5, "score": 73900.44402357051 }, { "content": "fn ascii_filename_to_string(ascii: &[u8]) -> String {\n\n\tascii.iter().map(|c| c.to_ascii_uppercase() as char).collect()\n\n}\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 6, "score": 59457.940019286776 }, { "content": "fn real_to_dos_name(filename: &str, extra_index: Option<usize>) -> DosFileName {\n\n\tlet mut ascii_name = vec![];\n\n\tfor c in filename.chars() {\n\n\t\tif c <= 255 as char {\n\n\t\t\tascii_name.push((c as u8).to_ascii_uppercase());\n\n\t\t} else {\n\n\t\t\tascii_name.push(b'_');\n\n\t\t}\n\n\t}\n\n\tlet (file_title, file_ext) = split_filename(&ascii_name);\n\n\tlet mut short_title = file_title.to_vec();\n\n\tshort_title.truncate(8);\n\n\tlet mut short_ext = file_ext.unwrap_or(&[]).to_vec();\n\n\tshort_ext.truncate(3);\n\n\t\n\n\tlet mut title_index_text = vec![];\n\n\tif let Some(extra_index) = extra_index {\n\n\t\ttitle_index_text.push(b'~');\n\n\t\textra_index.to_string().chars().for_each(|c| title_index_text.push(c as u8));\n\n\t}\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 9, "score": 38929.02150574989 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nstruct DosFileName {\n\n\ttitle: Vec<u8>,\n\n\text: Vec<u8>,\n\n}\n\n\n\nimpl DosFileName {\n\n\tfn parse(dos_filename: &[u8]) -> DosFileName {\n\n\t\tlet (title, ext) = split_filename(dos_filename);\n\n\t\tDosFileName{title: title.to_ascii_uppercase(), ext: ext.unwrap_or(&[]).to_ascii_uppercase()}\n\n\t}\n\n\n\n\tfn real_dos_name(&self) -> Vec<u8> {\n\n\t\tlet mut result = self.title.clone();\n\n\t\tif !self.ext.is_empty() {\n\n\t\t\tresult.push(b'.');\n\n\t\t\tresult.extend(&self.ext);\n\n\t\t}\n\n\t\tresult\n\n\t}\n\n}\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 10, "score": 38112.51931475403 }, { "content": "#[derive(Debug)]\n\nstruct DirListingCache {\n\n\tdir_path: std::path::PathBuf,\n\n\treal_to_dos_names: HashMap<String, DosFileName>,\n\n\tdos_to_real_names: HashMap<DosFileName, String>,\n\n}\n\n\n\nimpl DirListingCache {\n\n\tfn new(dir_path: std::path::PathBuf) -> DirListingCache {\n\n\t\tlet mut dir_listing = DirListingCache {\n\n\t\t\tdir_path,\n\n\t\t\treal_to_dos_names: HashMap::new(),\n\n\t\t\tdos_to_real_names: HashMap::new(),\n\n\t\t};\n\n\t\tdir_listing.list_dir(&mut |_|{});\n\n\t\tdir_listing\n\n\t}\n\n\t\n\n\tfn get_dos_name(&mut self, real_filename: &str) -> DosFileName {\n\n\t\tif let Some(existing_dos_name) = self.real_to_dos_names.get(real_filename) {\n\n\t\t\texisting_dos_name.clone()\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 11, "score": 38112.51931475403 }, { "content": "pub trait DosFileSystem : std::fmt::Debug {\n\n\t/// Returns a file handle if successful. Error code if not.\n\n\tfn create(&mut self, filename: &[u8], attributes: u16) -> Result<u16, DosErrorCode>;\n\n\t/// Returns a file handle if successful. Error code if not.\n\n\tfn open(&mut self, filename: &[u8], access_mode: DosFileAccessMode) -> Result<u16, DosErrorCode>;\n\n\t/// Retruns error code if close failed.\n\n\tfn close(&mut self, handle: u16) -> Result<(), DosErrorCode>;\n\n\t/// Returns the byte count read. Error code if read failed.\n\n\tfn read(&mut self, handle: u16, destination: &mut [u8]) -> Result<u16, DosErrorCode>;\n\n\t/// Returns the byte count written. Error code if write failed.\n\n\tfn write(&mut self, handle: u16, data: &[u8]) -> Result<u16, DosErrorCode>;\n\n\t/// Returns the new position within the file relative to the start. Error code if seek failed.\n\n\tfn seek(&mut self, handle: u16, offset: u32, origin: DosFileSeekOrigin) -> Result<u32, DosErrorCode>;\n\n\t/// Returns the new file length.\n\n\tfn truncate(&mut self, handle: u16) -> Result<u32, DosErrorCode>;\n\n\tfn find_first_file(&mut self, destination: &mut [u8], attributes: u16, search_spec: &[u8]) -> Result<(), DosErrorCode>;\n\n\tfn find_next_file(&mut self, destination: &mut [u8]) -> Result<(), DosErrorCode>;\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 13, "score": 30690.46005600589 }, { "content": "fn std_file_error_to_dos_error(err: std::io::Error) -> DosErrorCode {\n\n\tmatch err.kind() {\n\n\t\tstd::io::ErrorKind::NotFound => DosErrorCode::FileNotFound,\n\n\t\tstd::io::ErrorKind::PermissionDenied => DosErrorCode::AccessDenied,\n\n\t\tstd::io::ErrorKind::AlreadyExists => DosErrorCode::FileAlreadyExists,\n\n\t\t_ => {\n\n\t\t\teprintln!(\"Unexpected file error: {:?}\", err);\n\n\t\t\tDosErrorCode::PathNotFound\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl DosFileSystem for StandardDosFileSystem {\n\n\tfn create(&mut self, filename: &[u8], attributes: u16) -> Result<u16, DosErrorCode> {\n\n\t\tlet real_filepath = self.get_real_filepath(filename);\n\n\t\tlet slot = self.get_empty_slot();\n\n\t\tmatch std::fs::File::create(real_filepath) {\n\n\t\t\tOk(file) => {\n\n\t\t\t\tself.file_handles[slot] = Some(file);\n\n\t\t\t\tOk(slot as u16 + 1)\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 14, "score": 23687.422673615223 }, { "content": "\t\tmachine.set_data_u16(&BIOS_VIDEO_IO_PORT_ADDRESS, 0x3d4 as u16);\n\n\t\tmachine.set_data_u16(&BIOS_TEXT_ROW_COUNT, self.video_mode.text_dims.1 as u16);\n\n\t\tmachine.set_data_u16(&BIOS_CHAR_HEIGHT, self.video_mode.char_pixel_dims.1 as u16);\n\n\t}\n\n\n\n\t/*fn set_video_mode(&mut self, machine: &mut Machine8086, mode_index: u8) {\n\n\t\tself.video_mode = self.lookup_video_mode(mode_index).unwrap();\n\n\t\t\n\n\t}*/\n\n\t\n\n\tpub fn set_key_mod(&mut self, mod_type: KeyModType, on: bool) {\n\n\t\tlet bit = match mod_type {\n\n\t\t\tKeyModType::Shift => 0b0001,\n\n\t\t\tKeyModType::Ctrl => 0b0100,\n\n\t\t\tKeyModType::Alt => 0b1000,\n\n\t\t};\n\n\t\tif on {\n\n\t\t\tself.key_mod |= bit;\n\n\t\t} else {\n\n\t\t\tself.key_mod &= !bit;\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 18, "score": 11.400872612701948 }, { "content": "\n\n#[derive(Debug)]\n\npub struct DosEventHandler {\n\n\tpub machine_type: MachineType,\n\n\tpub video_mode: VideoMode,\n\n\tpub port_states: PortStates,\n\n\tpub file_system: Box<DosFileSystem>,\n\n\tpub disk_trasnsfer_address: u32,\n\n\tpub seconds_since_start: f64,\n\n\tpub key_mod: u8,\n\n\tpub result: DosInterruptResult,\n\n\tpub key_press_queue: VecDeque<KeyPressInfo>,\n\n}\n\n\n\nimpl DosEventHandler {\n\n\tpub fn init_machine(&mut self, machine: &mut Machine8086) {\n\n\t\t//self.set_video_mode(3);\n\n\t\tmachine.set_data_u8(&BIOS_VIDEO_MODE_INDEX, self.video_mode.mode_index);\n\n\t\tmachine.set_data_u16(&BIOS_TEXT_COLUMN_COUNT, self.video_mode.text_dims.0 as u16);\n\n\t\tmachine.set_data_u16(&BIOS_TEXT_PAGE_BYTES, self.video_mode.text_page_bytes as u16);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 21, "score": 11.087851686308111 }, { "content": "\t\t\t_ => panic!(\"Unknown video func: 0x{:x}\", video_int)\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl EventHandler for DosEventHandler {\n\n\tfn handle_interrupt(&mut self, machine: &mut Machine8086, interrupt_index: u8) {\n\n\t\t// https://www.shsu.edu/~csc_tjm/spring2001/cs272/interrupt.html\n\n\t\t//println!(\"Handle interrupt: 0x{:x}\", interrupt_index);\n\n\t\tself.result = DosInterruptResult::ShouldReturn;\n\n\t\t\n\n\t\tmatch interrupt_index {\n\n\t\t\t// BIOS Interrupts (0x00-0x1F):\n\n\t\t\t0x02 => {\n\n\t\t\t\t// Non-maskable interrupt\n\n\t\t\t\tpanic!(\"Memory corruption error, apparently...\");\n\n\t\t\t}\n\n\t\t\t0x04 => {\n\n\t\t\t\t// Overflow\n\n\t\t\t\tpanic!(\"Overflow\");\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 22, "score": 10.7411154241869 }, { "content": "use crate::dos_error_codes::DosErrorCode;\n\nuse crate::dos_file_system::{DosFileAccessMode, DosFileSeekOrigin, DosFileSystem};\n\nuse crate::bios_loader::*;\n\n\n\nuse xachtsechs::types::{EventHandler, Flag, Reg, RegHalf};\n\nuse xachtsechs::machine8086::{INTERRUPT_TABLE_ENTRY_BYTES, Machine8086};\n\n\n\nuse std::collections::VecDeque;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum DosInterruptResult {\n\n\tShouldReturn,\n\n\tShouldReturnAndWaitForEvents,\n\n\tShouldBlockForKeypress,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum MachineType {\n\n\tEGA,\n\n}\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 23, "score": 10.52957912412754 }, { "content": "\t\tmatch self.file_handles.iter().position(|ref slot| slot.is_none()) {\n\n\t\t\tSome(pos) => pos,\n\n\t\t\tNone => {\n\n\t\t\t\tlet pos = self.file_handles.len();\n\n\t\t\t\tself.file_handles.push(None);\n\n\t\t\t\tpos\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn get_file_from_handle(&mut self, handle: u16) -> Result<&mut std::fs::File, DosErrorCode> {\n\n\t\tif handle == 0 {\n\n\t\t\tErr(DosErrorCode::InvalidFileHandle)\n\n\t\t} else {\n\n\t\t\tlet handle_index = (handle - 1) as usize;\n\n\t\t\tif let Some(Some(ref mut file)) = self.file_handles.get_mut(handle_index) {\n\n\t\t\t\tOk(file)\n\n\t\t\t} else {\n\n\t\t\t\tErr(DosErrorCode::InvalidFileHandle)\n\n\t\t\t}\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 24, "score": 10.23755596691949 }, { "content": "\t\t}\n\n\t}\n\n\t\n\n\tpub fn set_cga_vertial_retrace(&mut self, vertical_retrace: bool) {\n\n\t\tif vertical_retrace {\n\n\t\t\tself.port_states.cga_status_register |= 0b1000u16;\n\n\t\t} else {\n\n\t\t\tself.port_states.cga_status_register &= !0b1000u16;\n\n\t\t\t// Toggle the first bit.\n\n\t\t\tself.port_states.cga_status_register ^= 0b1u16;\n\n\t\t}\n\n\t}\n\n\n\n\tfn get_page_origin_address(&self, machine: &Machine8086, video_page: u8) -> u32 {\n\n\t\tlet page_bytes = machine.get_data_u16(&BIOS_TEXT_PAGE_BYTES);\n\n\t\tself.video_mode.text_address + (video_page as u32 * page_bytes as u32)\n\n\t}\n\n\t\n\n\tfn get_screen_character_address(&self, machine: &Machine8086, page_origin_address: u32, x: u8, y: u8) -> u32 {\n\n\t\tlet bytes_per_char = 2;\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 25, "score": 10.209780735189549 }, { "content": "\t\t\t\t\t\t// TODO 776406\n\n\t\t\t\t\t\t// Get system time.\n\n\t\t\t\t\t\tlet hundredths = ((self.seconds_since_start * 100.) as usize % 100) as u8;\n\n\t\t\t\t\t\tlet second = (self.seconds_since_start as usize % 60) as u8;\n\n\t\t\t\t\t\tlet minute = ((self.seconds_since_start / 60.) as usize % 60) as u8;\n\n\t\t\t\t\t\tlet hour = ((self.seconds_since_start / 60. / 60.) as usize % 24) as u8;\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::CX, RegHalf::High, hour);\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::CX, RegHalf::Low, minute);\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::DX, RegHalf::High, second);\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::DX, RegHalf::Low, hundredths);\n\n\t\t\t\t\t\tself.result = DosInterruptResult::ShouldReturnAndWaitForEvents;\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x30 => {\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::Low, 5);\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::High, 22);\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::BX, RegHalf::High, 0xff);\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::BX, RegHalf::Low, 0);\n\n\t\t\t\t\t\tmachine.set_reg_u16(Reg::CX, 0);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x33 => {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 27, "score": 9.107959503687814 }, { "content": "\t\t\t\t\t\tmatch self.file_system.find_first_file(rest_of_mem, file_attributes, &search_spec) {\n\n\t\t\t\t\t\t\tOk(()) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x4f => {\n\n\t\t\t\t\t\t// Find next matching file after the last 0x4e/0x4f interrupt.\n\n\t\t\t\t\t\tlet rest_of_mem = &mut machine.memory[self.disk_trasnsfer_address as usize..];\n\n\t\t\t\t\t\tmatch self.file_system.find_next_file(rest_of_mem) {\n\n\t\t\t\t\t\t\tOk(()) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 28, "score": 9.10220346309191 }, { "content": "\n\nimpl MachineType {\n\n\tpub fn lookup_video_mode(&self, mode_index: u8) -> Result<VideoMode, String> {\n\n\t\tmatch self {\n\n\t\t\tMachineType::EGA => {\n\n\t\t\t\tfor video_mode in &EGA_MODES {\n\n\t\t\t\t\tif video_mode.mode_index == mode_index {\n\n\t\t\t\t\t\treturn Ok(video_mode.clone());\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t\t\n\n\t\tErr(format!(\"Couldn't find video mode for {:?}: 0x{:02x}\", self, mode_index))\n\n\t}\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 29, "score": 8.865214061554244 }, { "content": "use crate::bios_loader::initialise_bios_data_area;\n\n\n\nuse xachtsechs::types::{DataLocation8, DataLocation16, Reg};\n\nuse xachtsechs::machine8086::Machine8086;\n\nuse byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\nuse std::io::Seek;\n\n\n\n// https://wiki.osdev.org/MZ\n\n\n\nconst EXE_PARAGRAPH_BYTES: usize = 16;\n\n// The Program Segment Prefix is 256 bytes in size, which is 16 paragraphs.\n\nconst EXE_PROGRAM_SEGMENT_PREFIX_PARAGRAPHS: usize = 16;\n\nconst EXE_BLOCK_BYTES: usize = 512;\n\n// This is the paragraph where the EXE file puts the code data.\n\nconst EXE_ORIGIN_PARAGRAPH: usize = 0x100;\n\n\n\n#[derive(Debug)]\n\npub struct MzHeader {\n\n\tsignature: u16,\n\n\tlast_block_bytes: u16,\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 30, "score": 8.818771742308822 }, { "content": "\t\t\t\t\t\t\tmatch self.file_system.read(handle, destination) {\n\n\t\t\t\t\t\t\t\tOk(read_count) => {\n\n\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, read_count);\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x40 => {\n\n\t\t\t\t\t\t// WRITE\n\n\t\t\t\t\t\tlet handle = machine.get_reg_u16(Reg::BX);\n\n\t\t\t\t\t\tlet count = machine.get_reg_u16(Reg::CX) as usize;\n\n\t\t\t\t\t\tlet destination_addr = machine.get_seg_reg(Reg::DS, Reg::DX) as usize;\n\n\t\t\t\t\t\tlet rest_of_mem = &mut machine.memory[destination_addr..];\n\n\t\t\t\t\t\t\n\n\t\t\t\t\t\tif rest_of_mem.len() < count {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 31, "score": 8.522866222585144 }, { "content": "\t\tlet column_count = machine.get_data_u16(&BIOS_TEXT_COLUMN_COUNT);\n\n\t\tpage_origin_address + (((y as u32 * column_count as u32) + x as u32) * bytes_per_char)\n\n\t}\n\n\t\n\n\tfn handle_interrupt_10h(&mut self, machine: &mut Machine8086) {\n\n\t\t// Video (http://www.ctyme.com/intr/int-10.htm)\n\n\t\tlet video_int = machine.get_reg_u8(Reg::AX, RegHalf::High);\n\n\t\t//println!(\"Video interrupt: 0x{:x}\", video_int);\n\n\t\tmatch video_int {\n\n\t\t\t0x00 => {\n\n\t\t\t\t// TODO: Set video mode.\n\n\t\t\t}\n\n\t\t\t0x01 => {\n\n\t\t\t\t// TODO: Set text-mode cursor shape.\n\n\t\t\t}\n\n\t\t\t0x02 => {\n\n\t\t\t\t// Set cursor position.\n\n\t\t\t\tlet bh = machine.get_reg_u8(Reg::BX, RegHalf::High);\n\n\t\t\t\tlet video_page = if bh == 0xff { machine.get_data_u8(&BIOS_ACTIVE_VIDEO_PAGE) } else { bh };\n\n\t\t\t\t\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 32, "score": 8.497209654051526 }, { "content": "\t\t\t\t\t\tmatch io_func {\n\n\t\t\t\t\t\t\t0 => {\n\n\t\t\t\t\t\t\t\t// Get device information\n\n\t\t\t\t\t\t\t\t// TODO\n\n\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, 1);\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t_ => println!(\"Unknown IO func: 0x{:x}\", io_func)\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x4a => {\n\n\t\t\t\t\t\t// Modify Allocated Memory Block (SETBLOCK)\n\n\t\t\t\t\t\tunimplemented!();\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x4e => {\n\n\t\t\t\t\t\t// Find first matching file for a filename glob.\n\n\t\t\t\t\t\tlet file_attributes = machine.get_reg_u16(Reg::CX);\n\n\t\t\t\t\t\tlet search_spec_addr = machine.get_seg_reg(Reg::DS, Reg::DX);\n\n\t\t\t\t\t\tlet search_spec = machine.read_null_terminated_string(search_spec_addr);\n\n\t\t\t\t\t\tlet rest_of_mem = &mut machine.memory[self.disk_trasnsfer_address as usize..];\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 33, "score": 8.478554086645277 }, { "content": "\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x3f => {\n\n\t\t\t\t\t\t// READ\n\n\t\t\t\t\t\tlet handle = machine.get_reg_u16(Reg::BX);\n\n\t\t\t\t\t\tlet count = machine.get_reg_u16(Reg::CX) as usize;\n\n\t\t\t\t\t\tlet destination_addr = machine.get_seg_reg(Reg::DS, Reg::DX) as usize;\n\n\t\t\t\t\t\tlet rest_of_mem = &mut machine.memory[destination_addr..];\n\n\t\t\t\t\t\t\n\n\t\t\t\t\t\tif rest_of_mem.len() < count {\n\n\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, DosErrorCode::InsufficientMemory as u16);\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tlet destination = &mut rest_of_mem[..count];\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 34, "score": 8.44573156671981 }, { "content": "\t\t\t\tmatch func11 {\n\n\t\t\t\t\t0x30 => {\n\n\t\t\t\t\t\t// TODO: Get font information\n\n\t\t\t\t\t\t\n\n\t\t\t\t\t\t// Copying ZETA:\n\n\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => panic!(\"Unknown video 0x11 func: 0x{:x}\", func11)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t0x12 => {\n\n\t\t\t\t// Alternate function select\n\n\t\t\t\tlet func12 = machine.get_reg_u8(Reg::BX, RegHalf::Low);\n\n\t\t\t\tmatch func12 {\n\n\t\t\t\t\t0x30 => {\n\n\t\t\t\t\t\t// TODO: Select vertical resolution.\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => panic!(\"Unknown video 0x12 func: 0x{:x}\", func12)\n\n\t\t\t\t}\n\n\t\t\t}\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 35, "score": 8.118230046232078 }, { "content": "\t\t}\n\n\t}\n\n\t\n\n\tfn find_first_file(&mut self, destination: &mut [u8], attributes: u16, search_spec: &[u8]) -> Result<(), DosErrorCode> {\n\n\t\tlet real_filepath = self.get_real_filepath(search_spec);\n\n\t\tlet mut file_queue = VecDeque::new();\n\n\t\tself.dir_listing.list_dir(&mut |dos_name| {\n\n\t\t\t//dbg!(ascii_filename_to_string(&dos_name.real_dos_name()));\n\n\t\t\tif filename_matches_spec(&dos_name, search_spec) {\n\n\t\t\t\tfile_queue.push_back(dos_name);\n\n\t\t\t}\n\n\t\t});\n\n\t\tself.current_file_queue = Some(file_queue);\n\n\t\t\n\n\t\tself.find_next_file(destination)\n\n\t}\n\n\t\n\n\tfn find_next_file(&mut self, destination: &mut [u8]) -> Result<(), DosErrorCode> {\n\n\t\tif let Some(ref mut current_file_queue) = self.current_file_queue {\n\n\t\t\tif let Some(ref next_file) = current_file_queue.pop_front() {\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 36, "score": 7.964577963301156 }, { "content": "\t\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, read_count);\n\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x42 => {\n\n\t\t\t\t\t\t// SEEK\n\n\t\t\t\t\t\tlet handle = machine.get_reg_u16(Reg::BX);\n\n\t\t\t\t\t\tlet offset = ((machine.get_reg_u16(Reg::CX) as u32) << 16) + machine.get_reg_u16(Reg::DX) as u32;\n\n\t\t\t\t\t\tlet origin_mode = match machine.get_reg_u8(Reg::AX, RegHalf::Low) {\n\n\t\t\t\t\t\t\t0 => Some(DosFileSeekOrigin::Start),\n\n\t\t\t\t\t\t\t1 => Some(DosFileSeekOrigin::Current),\n\n\t\t\t\t\t\t\t2 => Some(DosFileSeekOrigin::End),\n\n\t\t\t\t\t\t\t_ => None,\n\n\t\t\t\t\t\t};\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 37, "score": 7.902461588662305 }, { "content": "\tpub fn data_end(&self) -> usize {\n\n\t\tlet subtract_bytes = if self.last_block_bytes > 0 {\n\n\t\t\tEXE_BLOCK_BYTES - self.last_block_bytes as usize\n\n\t\t} else {\n\n\t\t\t0\n\n\t\t};\n\n\t\t(self.file_block_count as usize * EXE_BLOCK_BYTES) - subtract_bytes\n\n\t}\n\n\t\n\n\tpub fn extract_data<StreamType>(&self, stream: &mut StreamType) -> Result<Vec<u8>, std::io::Error>\n\n\t\twhere StreamType: std::io::Read + std::io::Seek\n\n\t{\n\n\t\tstream.seek(std::io::SeekFrom::Start(self.data_start() as u64));\n\n\t\tlet data_length = self.data_end() - self.data_start();\n\n\t\tlet mut result = vec![];\n\n\t\tresult.resize(data_length, 0);\n\n\t\tstream.read(&mut result)?;\n\n\t\tOk(result)\n\n\t}\n\n\t\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 38, "score": 7.838348870557988 }, { "content": "\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, handle);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x3d => {\n\n\t\t\t\t\t\t// OPEN\n\n\t\t\t\t\t\tlet filename_addr = machine.get_seg_reg(Reg::DS, Reg::DX);\n\n\t\t\t\t\t\tlet filename = machine.read_null_terminated_string(filename_addr);\n\n\t\t\t\t\t\tlet access_mode = match machine.get_reg_u8(Reg::AX, RegHalf::Low) {\n\n\t\t\t\t\t\t\t0 => Some(DosFileAccessMode::ReadOnly),\n\n\t\t\t\t\t\t\t1 => Some(DosFileAccessMode::WriteOnly),\n\n\t\t\t\t\t\t\t2 => Some(DosFileAccessMode::ReadWrite),\n\n\t\t\t\t\t\t\t_ => None,\n\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\t\n\n\t\t\t\t\t\tif let Some(access_mode) = access_mode {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 39, "score": 7.79464562381335 }, { "content": "\t\t\t\t\t\t\tmachine.poke_u8(char_addr, 0);\n\n\t\t\t\t\t\t\tmachine.poke_u8(char_addr + 1, blank_char_attributes);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t0x08 => {\n\n\t\t\t\t// Read char and attributes at cursor position\n\n\t\t\t\t// TODO\n\n\t\t\t\t/*let bh = machine.get_reg_u8(Reg::BX, RegHalf::High);\n\n\t\t\t\tlet bl = machine.get_reg_u8(Reg::BX, RegHalf::Low);\n\n\t\t\t\tlet video_page = if bh == 0xff { machine.get_data_u8(&BIOS_ACTIVE_VIDEO_PAGE) } else { bh };\n\n\t\t\t\tlet (cursor_x, cursor_y) = split_u16_high_low(machine.get_data_u16(&BIOS_CURSOR_POSITION[video_page as usize]));\n\n\t\t\t\tlet page_bytes = machine.get_data_u16(&BIOS_TEXT_PAGE_BYTES);\n\n\t\t\t\tlet column_count = machine.get_data_u16(&BIOS_TEXT_COLUMN_COUNT);\n\n\t\t\t\tlet bytes_per_char = 2;\n\n\t\t\t\tlet addr = self.video_mode.text_address + (video_page as u32 * page_bytes as u32) + (((cursor_y as u32 * column_count as u32) + cursor_x as u32) * bytes_per_char);\n\n\t\t\t\tdbg!((addr, video_page, column_count, cursor_x, cursor_y, bl, bh));\n\n\t\t\t\tlet char_colour_attrs = machine.peek_u16(addr);\n\n\t\t\t\tmachine.set_reg_u16(Reg::AX, char_colour_attrs);*/\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 40, "score": 7.361793690819898 }, { "content": "\t\t\t\tOk(slot as u16 + 1)\n\n\t\t\t}\n\n\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn close(&mut self, handle: u16) -> Result<(), DosErrorCode> {\n\n\t\tif handle == 0 {\n\n\t\t\tErr(DosErrorCode::InvalidFileHandle)\n\n\t\t} else {\n\n\t\t\tlet handle_index = (handle - 1) as usize;\n\n\t\t\tif let Some(Some(ref mut file)) = self.file_handles.get_mut(handle_index) {\n\n\t\t\t\tself.file_handles[handle_index] = None;\n\n\t\t\t\tOk(())\n\n\t\t\t} else {\n\n\t\t\t\tErr(DosErrorCode::InvalidFileHandle)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\t\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 41, "score": 7.348876612250925 }, { "content": "\t\t\tport_61: 0,\n\n\t\t\tcrt_index_register: 0,\n\n\t\t\tcga_status_register: 0,\n\n\t\t\tcga_palette_register: 0,\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub struct KeyPressInfo {\n\n\tpub scan_code: u8,\n\n\tpub ascii_char: u8,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum KeyModType {\n\n\tShift,\n\n\tCtrl,\n\n\tAlt,\n\n}\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 42, "score": 7.2962520007309415 }, { "content": "\n\n#[derive(Debug)]\n\npub struct StandardDosFileSystem {\n\n\troot_path: std::path::PathBuf,\n\n\tfile_handles: Vec<Option<std::fs::File>>,\n\n\tdir_listing: DirListingCache,\n\n\tcurrent_file_queue: Option<VecDeque<DosFileName>>,\n\n}\n\n\n\nimpl StandardDosFileSystem {\n\n\tpub fn new(root_path: std::path::PathBuf) -> StandardDosFileSystem {\n\n\t\tStandardDosFileSystem {\n\n\t\t\troot_path: root_path.clone(),\n\n\t\t\tfile_handles: vec![],\n\n\t\t\tcurrent_file_queue: None,\n\n\t\t\tdir_listing: DirListingCache::new(root_path.clone()),\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn get_empty_slot(&mut self) -> usize {\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 43, "score": 6.973500409734184 }, { "content": "use xachtsechs::machine8086::Machine8086;\n\nuse xachtsechs::types::{DataLocation8, DataLocation16};\n\n\n\npub const BIOS_START: u32 = 0x40 << 4;\n\nconst fn bios_off_u8(offset: u32) -> DataLocation8 {\n\n\tDataLocation8::MemoryAbs(BIOS_START + offset)\n\n}\n\nconst fn bios_off_u16(offset: u32) -> DataLocation16 {\n\n\tDataLocation16::MemoryAbs(BIOS_START + offset)\n\n}\n\n\n\npub const BIOS_EQUIPMENT: DataLocation16 = bios_off_u16(10);\n\npub const BIOS_MEMORY_SIZE_KB: DataLocation16 = bios_off_u16(0x13);\n\npub const BIOS_VIDEO_MODE_INDEX: DataLocation8 = bios_off_u8(0x49);\n\npub const BIOS_TEXT_COLUMN_COUNT: DataLocation16 = bios_off_u16(0x4a);\n\npub const BIOS_TEXT_PAGE_BYTES: DataLocation16 = bios_off_u16(0x4c);\n\npub const BIOS_CURSOR_POSITION: [DataLocation16; 8] = [\n\n\tbios_off_u16(0x50), bios_off_u16(0x52), bios_off_u16(0x54), bios_off_u16(0x56),\n\n\tbios_off_u16(0x58), bios_off_u16(0x5a), bios_off_u16(0x5c), bios_off_u16(0x5e),\n\n];\n\npub const BIOS_ACTIVE_VIDEO_PAGE: DataLocation8 = bios_off_u8(0x62);\n\npub const BIOS_VIDEO_IO_PORT_ADDRESS: DataLocation16 = bios_off_u16(0x63);\n\npub const BIOS_SYSTEM_TIMER_COUNTER_ADDR_U32: u32 = BIOS_START + 0x6c;\n\npub const BIOS_SYSTEM_TIMER_COUNTER_LOW: DataLocation16 = bios_off_u16(0x6c);\n\npub const BIOS_SYSTEM_TIMER_COUNTER_HIGH: DataLocation16 = bios_off_u16(0x6e);\n\npub const BIOS_TEXT_ROW_COUNT: DataLocation16 = bios_off_u16(0x84);\n\npub const BIOS_CHAR_HEIGHT: DataLocation16 = bios_off_u16(0x85);\n\n\n\n// http://www.bioscentral.com/misc/bda.htm\n", "file_path": "libpseudos/src/bios_loader.rs", "rank": 44, "score": 6.798721120674484 }, { "content": "\t\t\t\t\t\tif let Some(origin_mode) = origin_mode {\n\n\t\t\t\t\t\t\tmatch self.file_system.seek(handle, offset, origin_mode) {\n\n\t\t\t\t\t\t\t\tOk(new_file_position) => {\n\n\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, (new_file_position & 0xffff) as u16);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::DX, ((new_file_position >> 16) & 0xffff) as u16);\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, DosErrorCode::InvalidData as u16);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x44 => {\n\n\t\t\t\t\t\t// I/O control\n\n\t\t\t\t\t\tlet io_func = machine.get_reg_u8(Reg::AX, RegHalf::Low);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 45, "score": 6.790332639638862 }, { "content": "\t\t\t\t\t\t\tmatch self.file_system.open(&filename, access_mode) {\n\n\t\t\t\t\t\t\t\tOk(handle) => {\n\n\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, handle);\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, DosErrorCode::InvalidFileAccessMode as u16);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x3e => {\n\n\t\t\t\t\t\t// CLOSE\n\n\t\t\t\t\t\tlet handle = machine.get_reg_u16(Reg::BX);\n\n\t\t\t\t\t\tmatch self.file_system.close(handle) {\n\n\t\t\t\t\t\t\tOk(_) => {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 46, "score": 6.761239012688179 }, { "content": "\t\tvalue\n\n\t}\n\n\t\n\n\tfn handle_port_output(&mut self, machine: &mut Machine8086, port_index: u16, value: u16) {\n\n\t\t//println!(\"Port out({}): {}\", port_index, value);\n\n\t\tmatch port_index {\n\n\t\t\t0x42 => {\n\n\t\t\t\t// TODO: PIT cassette and speaker\n\n\t\t\t\tprintln!(\"PIT: {}\", value);\n\n\t\t\t}\n\n\t\t\t0x43 => {\n\n\t\t\t\t// TODO: Programmable interrupt timer (PIT), control register\n\n\t\t\t\tprintln!(\"PIT Control: {}\", value);\n\n\t\t\t}\n\n\t\t\t0x61 => {\n\n\t\t\t\tself.port_states.port_61 = value;\n\n\t\t\t}\n\n\t\t\t0x201 => {\n\n\t\t\t\t// TODO: Something about joystick one-shots?\n\n\t\t\t}\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 47, "score": 6.75218179279819 }, { "content": "\t\t\t\t\n\n\t\t\t\t// Copying ZETA to test that it actually works:\n\n\t\t\t\tlet bh = machine.get_reg_u8(Reg::BX, RegHalf::High);\n\n\t\t\t\tlet bl = machine.get_reg_u8(Reg::BX, RegHalf::Low);\n\n\t\t\t\tlet addr = self.video_mode.text_address + (((bl as u32 * 80 as u32) + bh as u32) * 2);\n\n\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::Low, machine.peek_u8(addr));\n\n\t\t\t\tmachine.set_reg_u8(Reg::BX, RegHalf::High, machine.peek_u8(addr + 1));\n\n\t\t\t}\n\n\t\t\t0x0f => {\n\n\t\t\t\t// Get current video mode\n\n\t\t\t\tlet text_column_count = machine.get_data_u16(&BIOS_TEXT_COLUMN_COUNT);\n\n\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::High, text_column_count as u8);\n\n\t\t\t\t// Video modes covered in: http://www.ctyme.com/intr/rb-0069.htm\n\n\t\t\t\t// 3 is the 80x25 colour mode\n\n\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::Low, 3);\n\n\t\t\t\t// Active display page (http://www.ctyme.com/intr/rb-0091.htm)\n\n\t\t\t\tmachine.set_reg_u8(Reg::BX, RegHalf::High, machine.get_data_u8(&BIOS_ACTIVE_VIDEO_PAGE));\n\n\t\t\t}\n\n\t\t\t0x11 => {\n\n\t\t\t\tlet func11 = machine.get_reg_u8(Reg::AX, RegHalf::Low);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 49, "score": 6.592542587743466 }, { "content": "\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, DosErrorCode::InvalidData as u16);\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tif count == 0 {\n\n\t\t\t\t\t\t\t\t// Count of 0 truncates or extends the file to the current position.\n\n\t\t\t\t\t\t\t\tmatch self.file_system.truncate(handle) {\n\n\t\t\t\t\t\t\t\t\tOk(_) => {\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, 0);\n\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t\tErr(error_code) => {\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, true);\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, error_code as u16);\n\n\t\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\t\tlet write_data = &rest_of_mem[..count];\n\n\t\t\t\t\t\t\t\tmatch self.file_system.write(handle, write_data) {\n\n\t\t\t\t\t\t\t\t\tOk(read_count) => {\n\n\t\t\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 50, "score": 6.555193342175896 }, { "content": "\t\t\t\tlet dh = machine.get_reg_u8(Reg::DX, RegHalf::High);\n\n\t\t\t\tlet dl = machine.get_reg_u8(Reg::DX, RegHalf::Low);\n\n\t\t\t\tlet cursor_pos_data = ((dh as u16) << 8) + dl as u16;\n\n\t\t\t\tmachine.set_data_u16(&BIOS_CURSOR_POSITION[video_page as usize], cursor_pos_data);\n\n\t\t\t}\n\n\t\t\t0x03 => {\n\n\t\t\t\t// TODO: Get cursor position and size.\n\n\t\t\t}\n\n\t\t\t0x06 => {\n\n\t\t\t\t// Scroll the text up within a rectangular area on the active page.\n\n\t\t\t\tlet video_page = machine.get_data_u8(&BIOS_ACTIVE_VIDEO_PAGE);\n\n\t\t\t\tlet num_lines = machine.get_reg_u8(Reg::AX, RegHalf::Low);\n\n\t\t\t\tlet blank_char_attributes = machine.get_reg_u8(Reg::BX, RegHalf::High);\n\n\t\t\t\tlet rect_top = machine.get_reg_u8(Reg::CX, RegHalf::High);\n\n\t\t\t\tlet rect_left = machine.get_reg_u8(Reg::CX, RegHalf::Low);\n\n\t\t\t\tlet rect_bottom = machine.get_reg_u8(Reg::DX, RegHalf::High);\n\n\t\t\t\tlet rect_right = machine.get_reg_u8(Reg::DX, RegHalf::Low);\n\n\t\t\t\tlet page_addr = self.get_page_origin_address(machine, video_page);\n\n\t\t\t\t\n\n\t\t\t\tif num_lines == 0 {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 51, "score": 6.376225452544536 }, { "content": "\t\n\n\tfn handle_port_input(&mut self, machine: &mut Machine8086, port_index: u16) -> u16 {\n\n\t\t// http://bochs.sourceforge.net/techspec/PORTS.LST\n\n\t\tlet value = match port_index {\n\n\t\t\t0x61 => {\n\n\t\t\t\t// \"Keyboard Controller\" control register.\n\n\t\t\t\tself.port_states.port_61\n\n\t\t\t}\n\n\t\t\t0x201 => {\n\n\t\t\t\t// TODO: Read joystick values.\n\n\t\t\t\t0xf0\n\n\t\t\t}\n\n\t\t\t0x3da => {\n\n\t\t\t\tlet status = self.port_states.cga_status_register;\n\n\t\t\t\tself.set_cga_vertial_retrace(false);\n\n\t\t\t\tstatus\n\n\t\t\t}\n\n\t\t\t_ => panic!(\"Unhandled input port index: 0x{:02x}\", port_index)\n\n\t\t};\n\n\t\t//println!(\"Port in({}): {}\", port_index, value);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 53, "score": 6.267708989579392 }, { "content": "\t\t\t\t\t\t// Read key status\n\n\t\t\t\t\t\tif let Some(key_press_info) = self.key_press_queue.front() {\n\n\t\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::High, key_press_info.scan_code);\n\n\t\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::Low, key_press_info.ascii_char);\n\n\t\t\t\t\t\t\tmachine.set_flag(Flag::Zero, false);\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tmachine.set_reg_u16(Reg::AX, 0);\n\n\t\t\t\t\t\t\tmachine.set_flag(Flag::Zero, true);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x02 => {\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::Low, self.key_mod);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => panic!(\"Unknown keyboard interrupt: 0x{:x}\", key_int)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t0x1c => {\n\n\t\t\t\t// User timer tick, emitted by 0x08.\n\n\t\t\t}\n\n\t\t\t\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 54, "score": 6.191310332301429 }, { "content": "\tpub fn load_into_machine<StreamType>(&self, machine: &mut Machine8086, stream: &mut StreamType)\n\n\t\twhere StreamType: std::io::Read + std::io::Seek\n\n\t{\n\n\t\tmachine.set_reg_u16(Reg::SP, self.initial_sp);\n\n\t\tmachine.set_reg_u16(Reg::IP, self.initial_ip);\n\n\t\t\n\n\t\tlet segment_offset = (EXE_ORIGIN_PARAGRAPH + EXE_PROGRAM_SEGMENT_PREFIX_PARAGRAPHS) as u16;\n\n\t\tmachine.set_reg_u16(Reg::SS, self.initial_ss.wrapping_add(segment_offset));\n\n\t\tmachine.set_reg_u16(Reg::CS, self.initial_cs.wrapping_add(segment_offset));\n\n\t\t\n\n\t\tmachine.set_reg_u16(Reg::DS, EXE_ORIGIN_PARAGRAPH as u16);\n\n\t\tmachine.set_reg_u16(Reg::ES, EXE_ORIGIN_PARAGRAPH as u16);\n\n\t\t\n\n\t\tlet exe_data = self.extract_data(stream).unwrap();\n\n\t\tmachine.insert_contiguous_bytes(&exe_data, (EXE_ORIGIN_PARAGRAPH + 16) * EXE_PARAGRAPH_BYTES);\n\n\t\t\n\n\t\tinitialise_bios_data_area(machine);\n\n\t\tinitialise_dos_program_segment_prefix(machine, exe_data.len(), b\"\");\n\n\t\t\n\n\t\t/*for (i, b) in machine.memory[10000..20000].iter().enumerate() {\n\n\t\t\tprintln!(\"{}: {:02x}\", i + 10000, b);\n\n\t\t}\n\n\t\tpanic!();*/\n\n\t}\n\n}\n\n\n\n// https://en.wikipedia.org/wiki/Program_Segment_Prefix\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 55, "score": 6.00104894398846 }, { "content": "\t\t\t0x14 => {\n\n\t\t\t\t// Serial port services\n\n\t\t\t\tlet serial_int = machine.get_reg_u8(Reg::AX, RegHalf::High);\n\n\t\t\t\t//println!(\"Serial port interrupt: {}\", serial_int);\n\n\t\t\t}\n\n\t\t\t0x16 => {\n\n\t\t\t\t// Keyboard driver\n\n\t\t\t\tlet key_int = machine.get_reg_u8(Reg::AX, RegHalf::High);\n\n\t\t\t\t//println!(\"Keyboard Interrupt: 0x{:x}\", key_int);\n\n\t\t\t\tmatch key_int {\n\n\t\t\t\t\t0x00 => {\n\n\t\t\t\t\t\t// Wait for keypress and read character.\n\n\t\t\t\t\t\tif let Some(key_press_info) = self.key_press_queue.pop_front() {\n\n\t\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::High, key_press_info.scan_code);\n\n\t\t\t\t\t\t\tmachine.set_reg_u8(Reg::AX, RegHalf::Low, key_press_info.ascii_char);\n\n\t\t\t\t\t\t} else {\n\n\t\t\t\t\t\t\tself.result = DosInterruptResult::ShouldBlockForKeypress;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x01 => {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 56, "score": 5.779856601119603 }, { "content": "\tfn read(&mut self, handle: u16, destination: &mut [u8]) -> Result<u16, DosErrorCode> {\n\n\t\tlet file = self.get_file_from_handle(handle)?;\n\n\t\tmatch file.read(destination) {\n\n\t\t\tOk(read_count) => Ok(read_count as u16),\n\n\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn write(&mut self, handle: u16, data: &[u8]) -> Result<u16, DosErrorCode> {\n\n\t\tlet file = self.get_file_from_handle(handle)?;\n\n\t\tmatch file.write(data) {\n\n\t\t\tOk(written_count) => Ok(written_count as u16),\n\n\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn seek(&mut self, handle: u16, offset: u32, origin: DosFileSeekOrigin) -> Result<u32, DosErrorCode> {\n\n\t\tlet file = self.get_file_from_handle(handle)?;\n\n\t\tlet seek_from = match origin {\n\n\t\t\tDosFileSeekOrigin::Start => std::io::SeekFrom::Start(offset as u64),\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 57, "score": 5.752023035645354 }, { "content": "\t\t\t\t\t\t// Modify Ctrl+Break shortcut functionality.\n\n\t\t\t\t\t\t// TODO\n\n\t\t\t\t\t\tmachine.set_reg_u8(Reg::DX, RegHalf::Low, 0);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x35 => {\n\n\t\t\t\t\t\t// Get an entry of the interrupt vector/table (IP:CS) and store it in ES:BX.\n\n\t\t\t\t\t\tlet entry_addr = machine.get_reg_u8(Reg::AX, RegHalf::Low) as u32 * INTERRUPT_TABLE_ENTRY_BYTES as u32;\n\n\t\t\t\t\t\tlet interrupt_ip = machine.peek_u16(entry_addr);\n\n\t\t\t\t\t\tlet interrupt_cs = machine.peek_u16(entry_addr + 2);\n\n\t\t\t\t\t\tmachine.set_reg_u16(Reg::BX, interrupt_ip);\n\n\t\t\t\t\t\tmachine.set_reg_u16(Reg::ES, interrupt_cs);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x3c => {\n\n\t\t\t\t\t\t// CREATE\n\n\t\t\t\t\t\tlet filename_addr = machine.get_seg_reg(Reg::DS, Reg::DX);\n\n\t\t\t\t\t\tlet filename = machine.read_null_terminated_string(filename_addr);\n\n\t\t\t\t\t\tlet attributes = machine.get_reg_u16(Reg::CX);\n\n\t\t\t\t\t\tmatch self.file_system.create(&filename, attributes) {\n\n\t\t\t\t\t\t\tOk(handle) => {\n\n\t\t\t\t\t\t\t\tmachine.set_flag(Flag::Carry, false);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 58, "score": 5.3387206034237185 }, { "content": "\t\t\t// This is the DOS interrupt.\n\n\t\t\t// http://spike.scu.edu.au/~barry/interrupts.html\n\n\t\t\t// http://stanislavs.org/helppc/int_21.html\n\n\t\t\t0x21 => {\n\n\t\t\t\tlet dos_int = machine.get_reg_u8(Reg::AX, RegHalf::High);\n\n\t\t\t\t//println!(\"DOS Interrupt: 0x{:x}\", dos_int);\n\n\t\t\t\tmatch dos_int {\n\n\t\t\t\t\t0x1a => {\n\n\t\t\t\t\t\t// Set the Disk Transfer Address\n\n\t\t\t\t\t\tself.disk_trasnsfer_address = machine.get_seg_reg(Reg::DS, Reg::DX);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x25 => {\n\n\t\t\t\t\t\t// Get ES:BX and store it as an entry of the interrupt vector/table (as the IP:CS).\n\n\t\t\t\t\t\tlet entry_addr = machine.get_reg_u8(Reg::AX, RegHalf::Low) as u32 * INTERRUPT_TABLE_ENTRY_BYTES as u32;\n\n\t\t\t\t\t\tlet interrupt_ip = machine.get_reg_u16(Reg::DX);\n\n\t\t\t\t\t\tlet interrupt_cs = machine.get_reg_u16(Reg::DS);\n\n\t\t\t\t\t\tmachine.poke_u16(entry_addr, interrupt_ip);\n\n\t\t\t\t\t\tmachine.poke_u16(entry_addr + 2, interrupt_cs);\n\n\t\t\t\t\t}\n\n\t\t\t\t\t0x2c => {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 59, "score": 5.19830703577858 }, { "content": "\t\t\tself.real_to_dos_names.insert(real_name.clone(), dos_filename.clone());\n\n\t\t\treal_name\n\n\t\t}\n\n\t}\n\n\n\n\tfn list_dir(&mut self, on_found_file: &mut FnMut(DosFileName)) {\n\n\t\tif let Ok(read_dir) = std::fs::read_dir(&self.dir_path) {\n\n\t\t\tfor dir_file in read_dir {\n\n\t\t\t\tif let Ok(dir_file_entry) = dir_file {\n\n\t\t\t\t\tif let Ok(dir_file_entry_name) = dir_file_entry.file_name().into_string() {\n\n\t\t\t\t\t\ton_found_file(self.get_dos_name(&dir_file_entry_name));\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\t\n\n\t//fn get_real_name(filename: &[u8])\n\n}\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 60, "score": 5.15317162706773 }, { "content": "\tfile_block_count: u16,\n\n\trelocation_items: u16,\n\n\theader_paragraph_count: u16,\n\n\tminimum_memory_paragraphs: u16,\n\n\tmaximum_memory_paragraphs: u16,\n\n\tinitial_ss: u16,\n\n\tinitial_sp: u16,\n\n\tchecksum: u16,\n\n\tinitial_ip: u16,\n\n\tinitial_cs: u16,\n\n\trelocation_table: u16,\n\n\toverlay: u16,\n\n\toverlay_information: u16,\n\n}\n\n\n\nimpl MzHeader {\n\n\tpub fn byte_size() -> usize {\n\n\t\t28\n\n\t}\n\n\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 61, "score": 5.081845924260051 }, { "content": "\t\t\t\t}\n\n\t\t\t} else {\n\n\t\t\t\treturn false;\n\n\t\t\t}\n\n\t\t}\n\n\t\tif just_processed_star {\n\n\t\t\tspec_pos += 1;\n\n\t\t}\n\n\t\tspec_pos == spec.len()\n\n\t};\n\n\t\n\n\tlet (spec_title, spec_ext) = split_filename(search_spec);\n\n\tlet title_matches = match_against_spec(&filename.title, spec_title);\n\n\tlet ext_matches = if let Some(spec_ext) = spec_ext {\n\n\t\tmatch_against_spec(&filename.ext, spec_ext)\n\n\t} else {\n\n\t\ttrue\n\n\t};\n\n\ttitle_matches && ext_matches\n\n}\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 62, "score": 4.959451932277631 }, { "content": "pub mod bios_loader;\n\npub mod dos_event_handler;\n\npub mod dos_error_codes;\n\npub mod dos_file_system;\n\npub mod exe_loader;\n\n\n\n// https://en.wikipedia.org/wiki/Program_Segment_Prefix\n\n// https://toonormal.com/2018/06/07/notes-ms-dos-dev-for-intel-8086-cpus-using-a-modern-pc/\n\n// - \"DOS programs require that all programs start at the 256 byte boundary\"\n\n// https://www.daniweb.com/programming/software-development/threads/291076/whats-org-100h\n", "file_path": "libpseudos/src/lib.rs", "rank": 63, "score": 4.6590578215965 }, { "content": "\t\t\t\t\t// Clear the window.\n\n\t\t\t\t\tfor y in rect_top ..= rect_bottom {\n\n\t\t\t\t\t\tfor x in rect_left ..= rect_right {\n\n\t\t\t\t\t\t\tlet char_addr = self.get_screen_character_address(machine, page_addr, x, y);\n\n\t\t\t\t\t\t\tmachine.poke_u8(char_addr, 0);\n\n\t\t\t\t\t\t\tmachine.poke_u8(char_addr + 1, blank_char_attributes);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\n\t\t\t\t\tfor y in rect_top ..= (rect_bottom - num_lines) {\n\n\t\t\t\t\t\tfor x in rect_left ..= rect_right {\n\n\t\t\t\t\t\t\tlet from_addr = self.get_screen_character_address(machine, page_addr, x, y + 1);\n\n\t\t\t\t\t\t\tlet to_addr = self.get_screen_character_address(machine, page_addr, x, y);\n\n\t\t\t\t\t\t\tlet char_data = machine.peek_u16(from_addr);\n\n\t\t\t\t\t\t\tmachine.poke_u16(to_addr, char_data);\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\tfor y in (rect_bottom - num_lines + 1) ..= rect_bottom {\n\n\t\t\t\t\t\tfor x in rect_left ..= rect_right {\n\n\t\t\t\t\t\t\tlet char_addr = self.get_screen_character_address(machine, page_addr, x, y);\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 64, "score": 4.577028538642965 }, { "content": "\t\t\tfile_block_count,\n\n\t\t\trelocation_items,\n\n\t\t\theader_paragraph_count,\n\n\t\t\tminimum_memory_paragraphs,\n\n\t\t\tmaximum_memory_paragraphs,\n\n\t\t\tinitial_ss,\n\n\t\t\tinitial_sp,\n\n\t\t\tchecksum,\n\n\t\t\tinitial_ip,\n\n\t\t\tinitial_cs,\n\n\t\t\trelocation_table,\n\n\t\t\toverlay,\n\n\t\t\toverlay_information,\n\n\t\t})\n\n\t}\n\n\t\n\n\tpub fn data_start(&self) -> usize {\n\n\t\tself.header_paragraph_count as usize * EXE_PARAGRAPH_BYTES\n\n\t}\n\n\t\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 65, "score": 4.378429765480269 }, { "content": "\t\t\tDosFileSeekOrigin::Current => std::io::SeekFrom::Current(offset as i64),\n\n\t\t\tDosFileSeekOrigin::End => std::io::SeekFrom::End(offset as i64),\n\n\t\t};\n\n\t\tmatch file.seek(seek_from) {\n\n\t\t\tOk(file_pos) => Ok(file_pos as u32),\n\n\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn truncate(&mut self, handle: u16) -> Result<u32, DosErrorCode> {\n\n\t\tlet file = self.get_file_from_handle(handle)?;\n\n\t\t// TODO: Use file.stream_position() when it is stabilised:\n\n\t\tmatch file.seek(std::io::SeekFrom::Current(0)) {\n\n\t\t\tOk(current_pos) => {\n\n\t\t\t\tmatch file.set_len(current_pos) {\n\n\t\t\t\t\tOk(_) => Ok(current_pos as u32),\n\n\t\t\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 66, "score": 4.190471507220664 }, { "content": "}\n\n\n\npub const EGA_MODES: [VideoMode; 1] = [\n\n\tVideoMode {\n\n\t\tmode_index: 3, vga_mode: VGAMode::Text, pixel_dims: (640, 480), text_dims: (80, 25),\n\n\t\tchar_pixel_dims: (8, 14), text_address: 0xb8000, text_page_count: 8, text_page_bytes: 0x1000,\n\n\t},\n\n];\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct PortStates {\n\n\tport_61: u16,\n\n\tcrt_index_register: u16,\n\n\tcga_status_register: u16,\n\n\tcga_palette_register: u16,\n\n}\n\n\n\nimpl PortStates {\n\n\tpub fn new() -> PortStates {\n\n\t\tPortStates {\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 67, "score": 4.0776649176976 }, { "content": "\t\t} else {\n\n\t\t\tlet mut dos_name = real_to_dos_name(&real_filename, None);\n\n\t\t\tlet mut name_index = 1;\n\n\t\t\twhile self.dos_to_real_names.contains_key(&dos_name) {\n\n\t\t\t\tdos_name = real_to_dos_name(&real_filename, Some(name_index));\n\n\t\t\t\tname_index += 1;\n\n\t\t\t}\n\n\t\t\tself.dos_to_real_names.insert(dos_name.clone(), real_filename.to_string());\n\n\t\t\tself.real_to_dos_names.insert(real_filename.to_string(), dos_name.clone());\n\n\t\t\tdos_name\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn get_real_name(&mut self, dos_filename: &DosFileName) -> String {\n\n\t\tself.list_dir(&mut |_|{});\n\n\t\tif let Some(existing_real_name) = self.dos_to_real_names.get(&dos_filename) {\n\n\t\t\texisting_real_name.clone()\n\n\t\t} else {\n\n\t\t\tlet mut real_name = ascii_filename_to_string(&dos_filename.real_dos_name());\n\n\t\t\tself.dos_to_real_names.insert(dos_filename.clone(), real_name.clone());\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 68, "score": 3.844003074161902 }, { "content": "\t\t\t\tlet next_name = next_file.real_dos_name();\n\n\t\t\t\t// http://stanislavs.org/helppc/int_21-4e.html\n\n\t\t\t\tlet filename_off = 0x1e;\n\n\t\t\t\tdestination[0x15..=filename_off].iter_mut().for_each(|b| *b = 0);\n\n\t\t\t\tlet filename_dest = &mut destination[filename_off..];\n\n\t\t\t\tfilename_dest[..next_name.len()].clone_from_slice(&next_name);\n\n\t\t\t\tfilename_dest[next_name.len()] = 0;\n\n\t\t\t\tOk(())\n\n\t\t\t} else {\n\n\t\t\t\tErr(DosErrorCode::NoMoreFiles)\n\n\t\t\t}\n\n\t\t} else {\n\n\t\t\tErr(DosErrorCode::NoMoreFiles)\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\tuse super::*;\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 70, "score": 3.6666442149695153 }, { "content": "\t\t\t}\n\n\t\t\tErr(err) => Err(std_file_error_to_dos_error(err)),\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn open(&mut self, filename: &[u8], access_mode: DosFileAccessMode) -> Result<u16, DosErrorCode> {\n\n\t\t// TODO: 776655\n\n\t\tlet real_filepath = self.get_real_filepath(filename);\n\n\t\tlet slot = self.get_empty_slot();\n\n\t\t\n\n\t\tlet mut open_options = std::fs::OpenOptions::new();\n\n\n\n\t\topen_options\n\n\t\t\t.read(access_mode == DosFileAccessMode::ReadOnly || access_mode == DosFileAccessMode::ReadWrite)\n\n\t\t\t.write(access_mode == DosFileAccessMode::WriteOnly || access_mode == DosFileAccessMode::ReadWrite)\n\n\t\t\t.create(access_mode == DosFileAccessMode::WriteOnly || access_mode == DosFileAccessMode::ReadWrite);\n\n\t\t\n\n\t\tmatch open_options.open(real_filepath) {\n\n\t\t\tOk(file) => {\n\n\t\t\t\tself.file_handles[slot] = Some(file);\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 72, "score": 3.5933956550235044 }, { "content": "use crate::dos_error_codes::DosErrorCode;\n\n\n\nuse std::io::{Read, Write, Seek};\n\nuse std::collections::{HashMap, VecDeque};\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 73, "score": 3.3476322354743626 }, { "content": "\t\t\t\t\t\t\t}\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => panic!(\"Unknown DOS interrupt: 0x{:x}\", dos_int)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t0x33 => {\n\n\t\t\t\t// Mouse function calls\n\n\t\t\t\t// http://stanislavs.org/helppc/int_33.html\n\n\t\t\t\tlet mouse_func = machine.get_reg_u16(Reg::AX);\n\n\t\t\t\tmatch mouse_func {\n\n\t\t\t\t\t0 => {\n\n\t\t\t\t\t\t// TODO get mouse installed flag\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => panic!(\"Unknown mouse function: 0x{:x}\", mouse_func)\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t_ => panic!(\"Unknown interrupt: 0x{:x}\", interrupt_index)\n\n\t\t}\n\n\t}\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 74, "score": 3.298195526080748 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq)]\n\n#[repr(u8)]\n\npub enum DosErrorCode {\n\n\tFileNotFound = 0x02,\n\n\tPathNotFound = 0x03,\n\n\tNoFileHandlesLeft = 0x04,\n\n\tAccessDenied = 0x05,\n\n\tInvalidFileHandle = 0x06,\n\n\tInsufficientMemory = 0x08,\n\n\tInvalidFileAccessMode = 0x0c,\n\n\tInvalidData = 0x0d,\n\n\tNoMoreFiles = 0x12,\n\n\tFileAlreadyExists = 0x50,\n\n}\n", "file_path": "libpseudos/src/dos_error_codes.rs", "rank": 75, "score": 3.291649787865955 }, { "content": "\t\t}\n\n\t}\n\n\t\n\n\t/*fn get_real_filepath(&self, filename: &[u8]) -> std::path::PathBuf {\n\n\t\tif filename.contains(&b'\\\\') {\n\n\t\t\tunimplemented!(\"DOS directory mapping to real directories\");\n\n\t\t}\n\n\t\tlet mut string_filename = String::from_utf8_lossy(filename).into_owned();\n\n\t\t\n\n\t\tif let Ok(read_dir) = std::fs::read_dir(&self.root_path) {\n\n\t\t\tfor dir_file in read_dir {\n\n\t\t\t\tif let Ok(dir_file_entry) = dir_file {\n\n\t\t\t\t\tif let Ok(dir_file_entry_name) = dir_file_entry.file_name().into_string() {\n\n\t\t\t\t\t\tif dir_file_entry_name.to_uppercase() == string_filename.to_uppercase() {\n\n\t\t\t\t\t\t\tstring_filename = dir_file_entry_name;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t\tself.root_path.join(string_filename)\n\n\t}*/\n\n\t\n\n\tfn get_real_filepath(&mut self, filename: &[u8]) -> std::path::PathBuf {\n\n\t\tlet real_name = self.dir_listing.get_real_name(&DosFileName::parse(filename));\n\n\t\tself.root_path.join(real_name)\n\n\t}\n\n}\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 76, "score": 3.2254426696352003 }, { "content": "pub enum DosFileAccessMode {\n\n\tReadOnly,\n\n\tWriteOnly,\n\n\tReadWrite,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum DosFileSeekOrigin {\n\n\tStart,\n\n\tCurrent,\n\n\tEnd,\n\n}\n\n\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 77, "score": 2.857405874611259 }, { "content": "\t\t\t}\n\n\t\t\t0x08 => {\n\n\t\t\t\t// Timer interrupt. This is supposed to be injected by an external source exactly\n\n\t\t\t\t// 18.2 times per second.\n\n\t\t\t\t// TODO 777497\n\n\t\t\t\tlet timer_low = machine.get_data_u16(&BIOS_SYSTEM_TIMER_COUNTER_LOW);\n\n\t\t\t\tlet timer_high = machine.get_data_u16(&BIOS_SYSTEM_TIMER_COUNTER_HIGH);\n\n\t\t\t\tlet timer = timer_low as u32 + ((timer_high as u32) << 16);\n\n\t\t\t\tlet new_timer = timer.wrapping_add(1);\n\n\t\t\t\t//println!(\"Time: {}\", new_timer);\n\n\t\t\t\tlet new_timer_low = (new_timer & 0xffff) as u16;\n\n\t\t\t\tlet new_timer_high = ((new_timer >> 16) & 0xffff) as u16;\n\n\t\t\t\tmachine.set_data_u16(&BIOS_SYSTEM_TIMER_COUNTER_LOW, new_timer_low);\n\n\t\t\t\tmachine.set_data_u16(&BIOS_SYSTEM_TIMER_COUNTER_HIGH, new_timer_high);\n\n\t\t\t\t// Emit user timer tick.\n\n\t\t\t\tmachine.interrupt_on_next_step(0x1c);\n\n\t\t\t}\n\n\t\t\t0x10 => {\n\n\t\t\t\tself.handle_interrupt_10h(machine);\n\n\t\t\t}\n", "file_path": "libpseudos/src/dos_event_handler.rs", "rank": 78, "score": 2.5512709266896367 }, { "content": "\tpub fn parse(stream: &mut std::io::Read) -> Result<MzHeader, String> {\n\n\t\tlet signature = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read signature: {}\", e))?;\n\n\t\tlet last_block_bytes = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read last_block_bytes: {}\", e))?;\n\n\t\tlet file_block_count = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read file_block_count: {}\", e))?;\n\n\t\tlet relocation_items = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read relocation_items: {}\", e))?;\n\n\t\tlet header_paragraph_count = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read header_paragraph_count: {}\", e))?;\n\n\t\tlet minimum_memory_paragraphs = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read minimum_memory_paragraphs: {}\", e))?;\n\n\t\tlet maximum_memory_paragraphs = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read maximum_memory_paragraphs: {}\", e))?;\n\n\t\tlet initial_ss = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read initial_ss: {}\", e))?;\n\n\t\tlet initial_sp = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read initial_sp: {}\", e))?;\n\n\t\tlet checksum = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read checksum: {}\", e))?;\n\n\t\tlet initial_ip = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read initial_ip: {}\", e))?;\n\n\t\tlet initial_cs = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read initial_cs: {}\", e))?;\n\n\t\tlet relocation_table = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read relocation_table: {}\", e))?;\n\n\t\tlet overlay = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read overlay: {}\", e))?;\n\n\t\tlet overlay_information = stream.read_u16::<LittleEndian>().map_err(|e| format!(\"Failed to read overlay_information: {}\", e))?;\n\n\t\t\n\n\t\tOk(MzHeader {\n\n\t\t\tsignature,\n\n\t\t\tlast_block_bytes,\n", "file_path": "libpseudos/src/exe_loader.rs", "rank": 79, "score": 1.1408945464472282 }, { "content": "\t#[test] fn test_dir_listing_cache() {\n\n\t\tlet mut dir_listing = DirListingCache::new();\n\n\t\tassert_eq!(String::from_utf8_lossy(&dir_listing.get_dos_name(\"foot.text\").real_dos_name()), String::from_utf8_lossy(b\"FOOT.TEX\"));\n\n\t\tassert_eq!(String::from_utf8_lossy(&dir_listing.get_dos_name(\"foot.text2\").real_dos_name()), String::from_utf8_lossy(b\"FOOT~1.TEX\"));\n\n\t\tassert_eq!(String::from_utf8_lossy(&dir_listing.get_dos_name(\"filewithlongname.txt\").real_dos_name()), String::from_utf8_lossy(b\"FILEWITH.TXT\"));\n\n\t\tassert_eq!(String::from_utf8_lossy(&dir_listing.get_dos_name(\"filewithlongername.txt\").real_dos_name()), String::from_utf8_lossy(b\"FILEWI~1.TXT\"));\n\n\t\tassert_eq!(String::from_utf8_lossy(&dir_listing.get_dos_name(\"filewithlongerername.txt\").real_dos_name()), String::from_utf8_lossy(b\"FILEWI~2.TXT\"));\n\n\t}\n\n}\n", "file_path": "libpseudos/src/dos_file_system.rs", "rank": 80, "score": 0.9462822421090551 } ]
Rust
src/lib.rs
cdisselkoen/llvm-ir-analysis
f690c660070fe881a33d055cd30d5f4c352c69f5
mod call_graph; mod control_dep_graph; mod control_flow_graph; mod dominator_tree; mod functions_by_type; pub use crate::call_graph::CallGraph; pub use crate::control_dep_graph::ControlDependenceGraph; pub use crate::control_flow_graph::{CFGNode, ControlFlowGraph}; pub use crate::dominator_tree::{DominatorTree, PostDominatorTree}; pub use crate::functions_by_type::FunctionsByType; use llvm_ir::{Function, Module}; use log::debug; use std::cell::{Ref, RefCell}; use std::collections::HashMap; pub struct ModuleAnalysis<'m> { module: &'m Module, call_graph: SimpleCache<CallGraph<'m>>, functions_by_type: SimpleCache<FunctionsByType<'m>>, fn_analyses: HashMap<&'m str, FunctionAnalysis<'m>>, } impl<'m> ModuleAnalysis<'m> { pub fn new(module: &'m Module) -> Self { Self { module, call_graph: SimpleCache::new(), functions_by_type: SimpleCache::new(), fn_analyses: module.functions.iter() .map(|f| (f.name.as_str(), FunctionAnalysis::new(f))) .collect(), } } pub fn module(&self) -> &'m Module { self.module } pub fn call_graph(&self) -> Ref<CallGraph<'m>> { self.call_graph.get_or_insert_with(|| { let functions_by_type = self.functions_by_type(); debug!("computing single-module call graph"); CallGraph::new(std::iter::once(self.module), &functions_by_type) }) } pub fn functions_by_type(&self) -> Ref<FunctionsByType<'m>> { self.functions_by_type.get_or_insert_with(|| { debug!("computing single-module functions-by-type"); FunctionsByType::new(std::iter::once(self.module)) }) } pub fn fn_analysis<'s>(&'s self, func_name: &str) -> &'s FunctionAnalysis<'m> { self.fn_analyses.get(func_name) .unwrap_or_else(|| panic!("Function named {:?} not found in the Module", func_name)) } } pub struct CrossModuleAnalysis<'m> { modules: Vec<&'m Module>, call_graph: SimpleCache<CallGraph<'m>>, functions_by_type: SimpleCache<FunctionsByType<'m>>, module_analyses: HashMap<&'m str, ModuleAnalysis<'m>>, } impl<'m> CrossModuleAnalysis<'m> { pub fn new(modules: impl IntoIterator<Item = &'m Module>) -> Self { let modules: Vec<&'m Module> = modules.into_iter().collect(); let module_analyses = modules .iter() .copied() .map(|m| (m.name.as_str(), ModuleAnalysis::new(m))) .collect(); Self { modules, call_graph: SimpleCache::new(), functions_by_type: SimpleCache::new(), module_analyses, } } pub fn modules<'s>(&'s self) -> impl Iterator<Item = &'m Module> + 's { self.modules.iter().copied() } pub fn functions<'s>(&'s self) -> impl Iterator<Item = &'m Function> + 's { self.modules().map(|m| m.functions.iter()).flatten() } pub fn call_graph(&self) -> Ref<CallGraph<'m>> { self.call_graph.get_or_insert_with(|| { let functions_by_type = self.functions_by_type(); debug!("computing multi-module call graph"); CallGraph::new(self.modules(), &functions_by_type) }) } pub fn functions_by_type(&self) -> Ref<FunctionsByType<'m>> { self.functions_by_type.get_or_insert_with(|| { debug!("computing multi-module functions-by-type"); FunctionsByType::new(self.modules()) }) } pub fn module_analysis<'s>(&'s self, mod_name: &str) -> &'s ModuleAnalysis<'m> { self.module_analyses.get(mod_name) .unwrap_or_else(|| panic!("Module named {:?} not found in the CrossModuleAnalysis", mod_name)) } pub fn get_func_by_name(&self, func_name: &str) -> Option<(&'m Function, &'m Module)> { let mut retval = None; for &module in &self.modules { if let Some(func) = module.get_func_by_name(func_name) { match retval { None => retval = Some((func, module)), Some((_, retmod)) => panic!("Multiple functions found with name {:?}: one in module {:?}, another in module {:?}", func_name, &retmod.name, &module.name), } } } retval } } pub struct FunctionAnalysis<'m> { function: &'m Function, control_flow_graph: SimpleCache<ControlFlowGraph<'m>>, dominator_tree: SimpleCache<DominatorTree<'m>>, postdominator_tree: SimpleCache<PostDominatorTree<'m>>, control_dep_graph: SimpleCache<ControlDependenceGraph<'m>>, } impl<'m> FunctionAnalysis<'m> { pub fn new(function: &'m Function) -> Self { Self { function, control_flow_graph: SimpleCache::new(), dominator_tree: SimpleCache::new(), postdominator_tree: SimpleCache::new(), control_dep_graph: SimpleCache::new(), } } pub fn control_flow_graph(&self) -> Ref<ControlFlowGraph<'m>> { self.control_flow_graph.get_or_insert_with(|| { debug!("computing control flow graph for {}", &self.function.name); ControlFlowGraph::new(self.function) }) } pub fn dominator_tree(&self) -> Ref<DominatorTree<'m>> { self.dominator_tree.get_or_insert_with(|| { let cfg = self.control_flow_graph(); debug!("computing dominator tree for {}", &self.function.name); DominatorTree::new(&cfg) }) } pub fn postdominator_tree(&self) -> Ref<PostDominatorTree<'m>> { self.postdominator_tree.get_or_insert_with(|| { let cfg = self.control_flow_graph(); debug!("computing postdominator tree for {}", &self.function.name); PostDominatorTree::new(&cfg) }) } pub fn control_dependence_graph(&self) -> Ref<ControlDependenceGraph<'m>> { self.control_dep_graph.get_or_insert_with(|| { let cfg = self.control_flow_graph(); let postdomtree = self.postdominator_tree(); debug!("computing control dependence graph for {}", &self.function.name); ControlDependenceGraph::new(&cfg, &postdomtree) }) } } struct SimpleCache<T> { data: RefCell<Option<T>>, } impl<T> SimpleCache<T> { fn new() -> Self { Self { data: RefCell::new(None), } } fn get_or_insert_with(&self, f: impl FnOnce() -> T) -> Ref<T> { let need_mutable_borrow = self.data.borrow().is_none(); if need_mutable_borrow { let old_val = self.data.borrow_mut().replace(f()); debug_assert!(old_val.is_none()); } Ref::map(self.data.borrow(), |o| { o.as_ref().expect("should be populated now") }) } }
mod call_graph; mod control_dep_graph; mod control_flow_graph; mod dominator_tree; mod functions_by_type; pub use crate::call_graph::CallGraph; pub use crate::control_dep_graph::ControlDependenceGraph; pub use crate::control_flow_graph::{CFGNode, ControlFlowGraph}; pub use crate::dominator_tree::{DominatorTree, PostDominatorTree}; pub use crate::functions_by_type::FunctionsByType; use llvm_ir::{Function, Module}; use log::debug; use std::cell::{Ref, RefCell}; use std::collections::HashMap; pub struct ModuleAnalysis<'m> { module: &'m Module, call_graph: SimpleCache<CallGraph<'m>>, functions_by_type: SimpleCache<FunctionsByType<'m>>, fn_analyses: HashMap<&'m str, FunctionAnalysis<'m>>, } impl<'m> ModuleAnalysis<'m> { pub fn new(module: &'m Module) -> Self { Self { module, call_graph: SimpleCache::new(), functions_by_type: SimpleCache::new(), fn_analyses: module.functions.iter() .map(|f| (f.name.as_str(), FunctionAnalysis::new(f))) .collect(), } } pub fn module(&self) -> &'m Module { self.module }
pub fn functions_by_type(&self) -> Ref<FunctionsByType<'m>> { self.functions_by_type.get_or_insert_with(|| { debug!("computing single-module functions-by-type"); FunctionsByType::new(std::iter::once(self.module)) }) } pub fn fn_analysis<'s>(&'s self, func_name: &str) -> &'s FunctionAnalysis<'m> { self.fn_analyses.get(func_name) .unwrap_or_else(|| panic!("Function named {:?} not found in the Module", func_name)) } } pub struct CrossModuleAnalysis<'m> { modules: Vec<&'m Module>, call_graph: SimpleCache<CallGraph<'m>>, functions_by_type: SimpleCache<FunctionsByType<'m>>, module_analyses: HashMap<&'m str, ModuleAnalysis<'m>>, } impl<'m> CrossModuleAnalysis<'m> { pub fn new(modules: impl IntoIterator<Item = &'m Module>) -> Self { let modules: Vec<&'m Module> = modules.into_iter().collect(); let module_analyses = modules .iter() .copied() .map(|m| (m.name.as_str(), ModuleAnalysis::new(m))) .collect(); Self { modules, call_graph: SimpleCache::new(), functions_by_type: SimpleCache::new(), module_analyses, } } pub fn modules<'s>(&'s self) -> impl Iterator<Item = &'m Module> + 's { self.modules.iter().copied() } pub fn functions<'s>(&'s self) -> impl Iterator<Item = &'m Function> + 's { self.modules().map(|m| m.functions.iter()).flatten() } pub fn call_graph(&self) -> Ref<CallGraph<'m>> { self.call_graph.get_or_insert_with(|| { let functions_by_type = self.functions_by_type(); debug!("computing multi-module call graph"); CallGraph::new(self.modules(), &functions_by_type) }) } pub fn functions_by_type(&self) -> Ref<FunctionsByType<'m>> { self.functions_by_type.get_or_insert_with(|| { debug!("computing multi-module functions-by-type"); FunctionsByType::new(self.modules()) }) } pub fn module_analysis<'s>(&'s self, mod_name: &str) -> &'s ModuleAnalysis<'m> { self.module_analyses.get(mod_name) .unwrap_or_else(|| panic!("Module named {:?} not found in the CrossModuleAnalysis", mod_name)) } pub fn get_func_by_name(&self, func_name: &str) -> Option<(&'m Function, &'m Module)> { let mut retval = None; for &module in &self.modules { if let Some(func) = module.get_func_by_name(func_name) { match retval { None => retval = Some((func, module)), Some((_, retmod)) => panic!("Multiple functions found with name {:?}: one in module {:?}, another in module {:?}", func_name, &retmod.name, &module.name), } } } retval } } pub struct FunctionAnalysis<'m> { function: &'m Function, control_flow_graph: SimpleCache<ControlFlowGraph<'m>>, dominator_tree: SimpleCache<DominatorTree<'m>>, postdominator_tree: SimpleCache<PostDominatorTree<'m>>, control_dep_graph: SimpleCache<ControlDependenceGraph<'m>>, } impl<'m> FunctionAnalysis<'m> { pub fn new(function: &'m Function) -> Self { Self { function, control_flow_graph: SimpleCache::new(), dominator_tree: SimpleCache::new(), postdominator_tree: SimpleCache::new(), control_dep_graph: SimpleCache::new(), } } pub fn control_flow_graph(&self) -> Ref<ControlFlowGraph<'m>> { self.control_flow_graph.get_or_insert_with(|| { debug!("computing control flow graph for {}", &self.function.name); ControlFlowGraph::new(self.function) }) } pub fn dominator_tree(&self) -> Ref<DominatorTree<'m>> { self.dominator_tree.get_or_insert_with(|| { let cfg = self.control_flow_graph(); debug!("computing dominator tree for {}", &self.function.name); DominatorTree::new(&cfg) }) } pub fn postdominator_tree(&self) -> Ref<PostDominatorTree<'m>> { self.postdominator_tree.get_or_insert_with(|| { let cfg = self.control_flow_graph(); debug!("computing postdominator tree for {}", &self.function.name); PostDominatorTree::new(&cfg) }) } pub fn control_dependence_graph(&self) -> Ref<ControlDependenceGraph<'m>> { self.control_dep_graph.get_or_insert_with(|| { let cfg = self.control_flow_graph(); let postdomtree = self.postdominator_tree(); debug!("computing control dependence graph for {}", &self.function.name); ControlDependenceGraph::new(&cfg, &postdomtree) }) } } struct SimpleCache<T> { data: RefCell<Option<T>>, } impl<T> SimpleCache<T> { fn new() -> Self { Self { data: RefCell::new(None), } } fn get_or_insert_with(&self, f: impl FnOnce() -> T) -> Ref<T> { let need_mutable_borrow = self.data.borrow().is_none(); if need_mutable_borrow { let old_val = self.data.borrow_mut().replace(f()); debug_assert!(old_val.is_none()); } Ref::map(self.data.borrow(), |o| { o.as_ref().expect("should be populated now") }) } }
pub fn call_graph(&self) -> Ref<CallGraph<'m>> { self.call_graph.get_or_insert_with(|| { let functions_by_type = self.functions_by_type(); debug!("computing single-module call graph"); CallGraph::new(std::iter::once(self.module), &functions_by_type) }) }
function_block-full_function
[ { "content": "pub fn may_panic(a: i32) -> i32 {\n\n if a > 2 {\n\n panic!(\"a > 2\");\n\n } else {\n\n return 1;\n\n }\n\n}\n", "file_path": "tests/bcfiles/panic.rs", "rank": 0, "score": 69531.15541423167 }, { "content": "#[test]\n\nfn call_graph() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let callgraph = analysis.call_graph();\n\n\n\n // none of these functions have calls or are called\n\n for func_name in FUNC_NAMES {\n\n assert_eq!(callgraph.callers(func_name).count(), 0);\n\n assert_eq!(callgraph.callees(func_name).count(), 0);\n\n }\n\n}\n\n\n", "file_path": "tests/basic.rs", "rank": 1, "score": 67640.57916540265 }, { "content": "#[test]\n\nfn call_graph() {\n\n init_logging();\n\n let module = Module::from_bc_path(CALL_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let callgraph = analysis.call_graph();\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"simple_callee\").sorted().collect();\n\n assert_eq!(callers, vec![\n\n \"caller_with_loop\",\n\n \"conditional_caller\",\n\n \"recursive_and_normal_caller\",\n\n \"simple_caller\",\n\n \"twice_caller\",\n\n ]);\n\n let callees: Vec<&str> = callgraph.callees(\"simple_callee\").sorted().collect();\n\n assert!(callees.is_empty());\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"simple_caller\").sorted().collect();\n\n assert_eq!(callers, vec![\"nested_caller\"]);\n", "file_path": "tests/call.rs", "rank": 2, "score": 67640.57916540265 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n\n/// loop.c and loop.bc are taken from [`haybale`]'s test suite\n\n///\n\n/// [`haybale`]: https://crates.io/crates/haybale\n\nconst LOOP_BC_PATH: &'static str = \"tests/bcfiles/loop.bc\";\n\n\n", "file_path": "tests/loop.rs", "rank": 3, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn has_switch_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // ___ / | \\ ___\n\n // / / | | | \\ \\\n\n // | | | | | \\ \\\n\n // | | | | | | \\\n\n // 4 5 7 | 10 11 12\n\n // \\ \\ \\ | / / /\n\n // \\ \\ _ | __ / /\n\n // \\ ___ | _____ /\n\n // |\n\n // 14\n\n\n\n let bb2_name = Name::from(2);\n", "file_path": "tests/basic.rs", "rank": 4, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn has_switch_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // ___ / | \\ ___\n\n // / / | | | \\ \\\n\n // | | | | | \\ \\\n\n // | | | | | | \\\n\n // 4 5 7 | 10 11 12\n\n // \\ \\ \\ | / / /\n\n // \\ \\ _ | __ / /\n\n // \\ ___ | _____ /\n\n // |\n\n // 14\n\n\n\n let domtree = analysis.fn_analysis(\"has_switch\").dominator_tree();\n", "file_path": "tests/basic.rs", "rank": 5, "score": 46941.81035336772 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n\n/// basic.c and basic.bc are taken from [`haybale`]'s test suite\n\n///\n\n/// [`haybale`]: https://crates.io/crates/haybale\n\nconst BASIC_BC_PATH: &'static str = \"tests/bcfiles/basic.bc\";\n\n\n\n/// Function names in basic.bc\n\nconst FUNC_NAMES: &'static [&'static str] = &[\n\n \"no_args_zero\",\n\n \"no_args_nozero\",\n\n \"one_arg\",\n\n \"two_args\",\n\n \"three_args\",\n\n \"four_args\",\n\n \"five_args\",\n\n \"binops\",\n", "file_path": "tests/basic.rs", "rank": 6, "score": 46941.81035336772 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n\n/// call.c / call.bc, functionptr.c / functionptr.bc, and crossmod.c /\n\n/// crossmod.bc are all taken from [`haybale`]'s test suite\n\n///\n\n/// [`haybale`]: https://crates.io/crates/haybale\n\nconst CALL_BC_PATH: &'static str = \"tests/bcfiles/call.bc\";\n\nconst FUNCTIONPTR_BC_PATH: &'static str = \"tests/bcfiles/functionptr.bc\";\n\nconst CROSSMOD_BC_PATH: &'static str = \"tests/bcfiles/crossmod.bc\";\n\n\n", "file_path": "tests/call.rs", "rank": 7, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn trivial_domtrees() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n for func_name in &[\n\n \"no_args_zero\",\n\n \"no_args_nozero\",\n\n \"one_arg\",\n\n \"two_args\",\n\n \"three_args\",\n\n \"four_args\",\n\n \"five_args\",\n\n \"binops\",\n\n \"conditional_with_and\",\n\n \"int8t\",\n\n \"int16t\",\n\n \"int32t\",\n\n \"int64t\",\n", "file_path": "tests/basic.rs", "rank": 8, "score": 46941.81035336772 }, { "content": "fn init_logging() {\n\n // capture log messages with test harness\n\n let _ = env_logger::builder().is_test(true).try_init();\n\n}\n\n\n\n/// panic.c and panic.bc are taken from [`haybale`]'s test suite\n\n///\n\n/// [`haybale`]: https://crates.io/crates/haybale\n\nconst PANIC_BC_PATH: &'static str = \"tests/bcfiles/panic.bc\";\n\n\n", "file_path": "tests/rustpanic.rs", "rank": 9, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn while_loop_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"while_loop\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1\n\n // | _\n\n // | / \\ (self-loop on 6)\n\n // 6 -- /\n\n // |\n\n // |\n\n // 12\n\n\n\n let bb1_name = Name::from(1);\n\n let _bb1_node = CFGNode::Block(&bb1_name);\n\n let bb6_name = Name::from(6);\n\n let bb6_node = CFGNode::Block(&bb6_name);\n", "file_path": "tests/loop.rs", "rank": 10, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn has_switch_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"has_switch\").control_flow_graph();\n\n\n\n // CFG:\n\n // 2\n\n // ___ / | \\ ___\n\n // / / | | | \\ \\\n\n // | | | | | \\ \\\n\n // | | | | | | \\\n\n // 4 5 7 | 10 11 12\n\n // \\ \\ \\ | / / /\n\n // \\ \\ _ | __ / /\n\n // \\ ___ | _____ /\n\n // |\n\n // 14\n\n\n", "file_path": "tests/basic.rs", "rank": 11, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn for_loop_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"for_loop\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1 _\n\n // | \\ / \\\n\n // | 9 -- /\n\n // | /\n\n // 6\n\n\n\n let bb1_name = Name::from(1);\n\n let _bb1_node = CFGNode::Block(&bb1_name);\n\n let bb6_name = Name::from(6);\n\n let bb6_node = CFGNode::Block(&bb6_name);\n\n let bb9_name = Name::from(9);\n\n let bb9_node = CFGNode::Block(&bb9_name);\n", "file_path": "tests/loop.rs", "rank": 12, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn trivial_cfgs() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n for func_name in &[\n\n \"no_args_zero\",\n\n \"no_args_nozero\",\n\n \"one_arg\",\n\n \"two_args\",\n\n \"three_args\",\n\n \"four_args\",\n\n \"five_args\",\n\n \"binops\",\n\n \"conditional_with_and\",\n\n \"int8t\",\n\n \"int16t\",\n\n \"int32t\",\n\n \"int64t\",\n\n \"mixed_bitwidths\",\n\n ] {\n\n let cfg = analysis.fn_analysis(func_name).control_flow_graph();\n\n let entry = cfg.entry();\n\n assert_eq!(cfg.preds(entry).count(), 0);\n\n let succs = cfg.succs(entry).collect::<Vec<_>>();\n\n assert_eq!(succs, vec![CFGNode::Return]);\n\n }\n\n}\n\n\n", "file_path": "tests/basic.rs", "rank": 13, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn while_loop_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"while_loop\");\n\n\n\n // CFG:\n\n // 1\n\n // | _\n\n // | / \\ (self-loop on 6)\n\n // 6 -- /\n\n // |\n\n // |\n\n // 12\n\n\n\n let domtree = fn_analysis.dominator_tree();\n\n assert_eq!(domtree.idom(&Name::from(1)), None);\n\n assert_eq!(domtree.idom(&Name::from(6)), Some(&Name::from(1)));\n\n assert_eq!(domtree.idom(&Name::from(12)), Some(&Name::from(6)));\n\n assert_eq!(domtree.idom_of_return(), &Name::from(12));\n\n\n\n let postdomtree = fn_analysis.postdominator_tree();\n\n assert_eq!(postdomtree.ipostdom(&Name::from(1)), Some(CFGNode::Block(&Name::from(6))));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(6)), Some(CFGNode::Block(&Name::from(12))));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(12)), Some(CFGNode::Return));\n\n}\n\n\n", "file_path": "tests/loop.rs", "rank": 14, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn for_loop_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1 _\n\n // | \\ / \\\n\n // | 9 -- /\n\n // | /\n\n // 6\n\n\n\n let cdg = analysis.fn_analysis(\"for_loop\").control_dependence_graph();\n\n\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(1)).count(), 0);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(6)).count(), 0);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(9)).sorted().collect::<Vec<_>>(), vec![&Name::from(1), &Name::from(9)]);\n\n\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(1)).count(), 0);\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(6)).count(), 0);\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(9)).sorted().collect::<Vec<_>>(), vec![&Name::from(1), &Name::from(9)]);\n\n}\n\n\n", "file_path": "tests/loop.rs", "rank": 15, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn for_loop_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"for_loop\");\n\n\n\n // CFG:\n\n // 1 _\n\n // | \\ / \\\n\n // | 9 -- /\n\n // | /\n\n // 6\n\n\n\n let domtree = fn_analysis.dominator_tree();\n\n assert_eq!(domtree.idom(&Name::from(1)), None);\n\n assert_eq!(domtree.idom(&Name::from(6)), Some(&Name::from(1)));\n\n assert_eq!(domtree.idom(&Name::from(9)), Some(&Name::from(1)));\n\n assert_eq!(domtree.idom_of_return(), &Name::from(6));\n\n\n\n let postdomtree = fn_analysis.postdominator_tree();\n\n assert_eq!(postdomtree.ipostdom(&Name::from(1)), Some(CFGNode::Block(&Name::from(6))));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(6)), Some(CFGNode::Return));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(9)), Some(CFGNode::Block(&Name::from(6))));\n\n}\n\n\n", "file_path": "tests/loop.rs", "rank": 16, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn while_loop_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1\n\n // | _\n\n // | / \\ (self-loop on 6)\n\n // 6 -- /\n\n // |\n\n // |\n\n // 12\n\n\n\n let cdg = analysis.fn_analysis(\"while_loop\").control_dependence_graph();\n\n\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(1)).count(), 0);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(6)).collect::<Vec<_>>(), vec![&Name::from(6)]);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(12)).count(), 0);\n", "file_path": "tests/loop.rs", "rank": 17, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn functions_by_type() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fbt = analysis.functions_by_type();\n\n\n\n let functy = module.types.func_type(\n\n module.types.void(),\n\n vec![],\n\n false,\n\n );\n\n assert_eq!(fbt.functions_with_type(&functy).count(), 0);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i32(),\n\n vec![],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n", "file_path": "tests/basic.rs", "rank": 18, "score": 46941.81035336772 }, { "content": "#[test]\n\nfn conditional_nozero_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"conditional_nozero\").control_flow_graph();\n\n\n\n // CFG:\n\n // 2\n\n // | \\\n\n // | 4\n\n // | | \\\n\n // | | 8\n\n // | 6 | \\\n\n // | | 10 12\n\n // | | | |\n\n // | | | /\n\n // \\ | / /\n\n // 14\n\n\n", "file_path": "tests/basic.rs", "rank": 19, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_false_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"conditional_false\").control_flow_graph();\n\n\n\n // CFG:\n\n // 2\n\n // / \\\n\n // 4 8\n\n // \\ /\n\n // 12\n\n\n\n let bb2_name = Name::from(2);\n\n let _bb2_node = CFGNode::Block(&bb2_name);\n\n let bb4_name = Name::from(4);\n\n let bb4_node = CFGNode::Block(&bb4_name);\n\n let bb8_name = Name::from(8);\n\n let bb8_node = CFGNode::Block(&bb8_name);\n", "file_path": "tests/basic.rs", "rank": 20, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_false_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // / \\\n\n // 4 8\n\n // \\ /\n\n // 12\n\n\n\n let bb2_name = Name::from(2);\n\n let _bb2_node = CFGNode::Block(&bb2_name);\n\n let bb4_name = Name::from(4);\n\n let bb4_node = CFGNode::Block(&bb4_name);\n\n let bb8_name = Name::from(8);\n\n let bb8_node = CFGNode::Block(&bb8_name);\n\n let bb12_name = Name::from(12);\n", "file_path": "tests/basic.rs", "rank": 21, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn search_array_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"search_array\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1 _\n\n // | / \\\n\n // 4 -- /\n\n // |\n\n // 11 <---- \\\n\n // / \\ |\n\n // 19 16 --> /\n\n // \\ /\n\n // 21\n\n\n\n let bb1_name = Name::from(1);\n\n let _bb1_node = CFGNode::Block(&bb1_name);\n", "file_path": "tests/loop.rs", "rank": 22, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn loop_with_cond_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"loop_with_cond\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1\n\n // |\n\n // 6 <---\n\n // | \\ \\\n\n // | 10 |\n\n // | / | |\n\n // 13 / |\n\n // | / /\n\n // 16 --->\n\n // |\n\n // 20\n\n\n", "file_path": "tests/loop.rs", "rank": 23, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn functionptr_call_graph() {\n\n init_logging();\n\n let module = Module::from_bc_path(FUNCTIONPTR_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fbt = analysis.functions_by_type();\n\n let callgraph = analysis.call_graph();\n\n\n\n let footype_functions: Vec<&str> = fbt.functions_with_type(&module.types.func_type(\n\n module.types.i32(),\n\n vec![module.types.i32(), module.types.i32()],\n\n false,\n\n )).sorted().collect();\n\n assert_eq!(footype_functions, vec![\"bar\", \"foo\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"foo\").sorted().collect();\n\n assert_eq!(callers, vec![\"calls_fptr\", \"calls_through_struct\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"foo\").sorted().collect();\n\n assert!(callees.is_empty());\n\n\n", "file_path": "tests/call.rs", "rank": 24, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn nested_loop_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"nested_loop\");\n\n\n\n // CFG:\n\n // 1\n\n // | \\\n\n // | 5 <----\n\n // | | _ \\\n\n // | | / | |\n\n // | 13 -- / |\n\n // | | /\n\n // | 10 ---->\n\n // | /\n\n // 7\n\n\n\n let domtree = fn_analysis.dominator_tree();\n", "file_path": "tests/loop.rs", "rank": 25, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_false_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // / \\\n\n // 4 8\n\n // \\ /\n\n // 12\n\n\n\n let bb2_name = Name::from(2);\n\n let bb4_name = Name::from(4);\n\n let bb8_name = Name::from(8);\n\n let bb12_name = Name::from(12);\n\n\n\n let cdg = analysis.fn_analysis(\"conditional_false\").control_dependence_graph();\n\n\n", "file_path": "tests/basic.rs", "rank": 26, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_true_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // / \\\n\n // 4 8\n\n // \\ /\n\n // 12\n\n\n\n let bb2_name = Name::from(2);\n\n let _bb2_node = CFGNode::Block(&bb2_name);\n\n let bb4_name = Name::from(4);\n\n let bb4_node = CFGNode::Block(&bb4_name);\n\n let bb8_name = Name::from(8);\n\n let bb8_node = CFGNode::Block(&bb8_name);\n\n let bb12_name = Name::from(12);\n", "file_path": "tests/basic.rs", "rank": 27, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn begin_panic_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(PANIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"_ZN3std9panicking11begin_panic17h5ae0871c3ba84f98E\");\n\n\n\n // CFG:\n\n // start\n\n // / \\\n\n // cleanup bb2\n\n // | / \\\n\n // | / bb4\n\n // | | / \\\n\n // | | / \\\n\n // | cleanup1 unreachable\n\n // | / |\n\n // | bb3 (unreachable)\n\n // | /\n\n // bb6\n", "file_path": "tests/rustpanic.rs", "rank": 28, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn begin_panic_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(PANIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"_ZN3std9panicking11begin_panic17h5ae0871c3ba84f98E\").control_flow_graph();\n\n\n\n // CFG:\n\n // start\n\n // / \\\n\n // cleanup bb2\n\n // | / \\\n\n // | / bb4\n\n // | | / \\\n\n // | | / \\\n\n // | cleanup1 unreachable\n\n // | / |\n\n // | bb3 (unreachable)\n\n // | /\n\n // bb6\n", "file_path": "tests/rustpanic.rs", "rank": 29, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_nozero_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // | \\\n\n // | 4\n\n // | | \\\n\n // | | 8\n\n // | 6 | \\\n\n // | | 10 12\n\n // | | | |\n\n // | | | /\n\n // \\ | / /\n\n // 14\n\n\n\n let domtree = analysis.fn_analysis(\"conditional_nozero\").dominator_tree();\n", "file_path": "tests/basic.rs", "rank": 30, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn nested_loop_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"nested_loop\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1\n\n // | \\\n\n // | 5 <----\n\n // | | _ \\\n\n // | | / | |\n\n // | 13 -- / |\n\n // | | /\n\n // | 10 ---->\n\n // | /\n\n // 7\n\n\n\n let bb1_name = Name::from(1);\n", "file_path": "tests/loop.rs", "rank": 31, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn nested_loop_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1\n\n // | \\\n\n // | 5 <----\n\n // | | _ \\\n\n // | | / | |\n\n // | 13 -- / |\n\n // | | /\n\n // | 10 ---->\n\n // | /\n\n // 7\n\n\n\n let cdg = analysis.fn_analysis(\"nested_loop\").control_dependence_graph();\n\n\n", "file_path": "tests/loop.rs", "rank": 32, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_true_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"conditional_true\").control_flow_graph();\n\n\n\n // CFG:\n\n // 2\n\n // / \\\n\n // 4 8\n\n // \\ /\n\n // 12\n\n\n\n let bb2_name = Name::from(2);\n\n let _bb2_node = CFGNode::Block(&bb2_name);\n\n let bb4_name = Name::from(4);\n\n let bb4_node = CFGNode::Block(&bb4_name);\n\n let bb8_name = Name::from(8);\n\n let bb8_node = CFGNode::Block(&bb8_name);\n", "file_path": "tests/basic.rs", "rank": 33, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn loop_with_cond_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"loop_with_cond\");\n\n\n\n // CFG:\n\n // 1\n\n // |\n\n // 6 <---\n\n // | \\ \\\n\n // | 10 |\n\n // | / | |\n\n // 13 / |\n\n // | / /\n\n // 16 --->\n\n // |\n\n // 20\n\n\n", "file_path": "tests/loop.rs", "rank": 34, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn search_array_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1 _\n\n // | / \\\n\n // 4 -- /\n\n // |\n\n // 11 <---- \\\n\n // / \\ |\n\n // 19 16 --> /\n\n // \\ /\n\n // 21\n\n\n\n let cdg = analysis.fn_analysis(\"search_array\").control_dependence_graph();\n\n\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(1)).count(), 0);\n", "file_path": "tests/loop.rs", "rank": 35, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn begin_panic_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(PANIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // start\n\n // / \\\n\n // cleanup bb2\n\n // | / \\\n\n // | / bb4\n\n // | | / \\\n\n // | | / \\\n\n // | cleanup1 unreachable\n\n // | / |\n\n // | bb3 (unreachable)\n\n // | /\n\n // bb6\n\n // | \\\n", "file_path": "tests/rustpanic.rs", "rank": 36, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn trivial_control_deps() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n for func_name in &[\n\n \"no_args_zero\",\n\n \"no_args_nozero\",\n\n \"one_arg\",\n\n \"two_args\",\n\n \"three_args\",\n\n \"four_args\",\n\n \"five_args\",\n\n \"binops\",\n\n \"conditional_with_and\",\n\n \"int8t\",\n\n \"int16t\",\n\n \"int32t\",\n\n \"int64t\",\n\n \"mixed_bitwidths\",\n\n ] {\n\n let cdg = analysis.fn_analysis(func_name).control_dependence_graph();\n\n let entry = cdg.entry();\n\n assert_eq!(cdg.get_control_dependencies(entry).count(), 0);\n\n assert_eq!(cdg.get_control_dependents(entry).count(), 0);\n\n }\n\n}\n\n\n", "file_path": "tests/basic.rs", "rank": 37, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_nozero_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // | \\\n\n // | 4\n\n // | | \\\n\n // | | 8\n\n // | 6 | \\\n\n // | | 10 12\n\n // | | | |\n\n // | | | /\n\n // \\ | / /\n\n // 14\n\n\n\n let bb2_name = Name::from(2);\n", "file_path": "tests/basic.rs", "rank": 38, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn search_array_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"search_array\");\n\n\n\n // CFG:\n\n // 1 _\n\n // | / \\\n\n // 4 -- /\n\n // |\n\n // 11 <---- \\\n\n // / \\ |\n\n // 19 16 --> /\n\n // \\ /\n\n // 21\n\n\n\n let domtree = fn_analysis.dominator_tree();\n\n assert_eq!(domtree.idom(&Name::from(1)), None);\n", "file_path": "tests/loop.rs", "rank": 39, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn loop_with_cond_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1\n\n // |\n\n // 6 <---\n\n // | \\ \\\n\n // | 10 |\n\n // | / | |\n\n // 13 / |\n\n // | / /\n\n // 16 --->\n\n // |\n\n // 20\n\n\n\n let cdg = analysis.fn_analysis(\"loop_with_cond\").control_dependence_graph();\n", "file_path": "tests/loop.rs", "rank": 40, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn crossmod_call_graph() {\n\n init_logging();\n\n let call_module = Module::from_bc_path(CALL_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let crossmod_module = Module::from_bc_path(CROSSMOD_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let modules = [call_module, crossmod_module];\n\n let analysis = CrossModuleAnalysis::new(&modules);\n\n let callgraph = analysis.call_graph();\n\n\n\n // this function isn't involved in cross-module calls, it should still have the same results\n\n let callers: Vec<&str> = callgraph.callers(\"conditional_caller\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"conditional_caller\").sorted().collect();\n\n assert_eq!(callees, vec![\"simple_callee\"]);\n\n\n\n // this function also isn't involved in cross-module calls; it sits in the other module\n\n let callers: Vec<&str> = callgraph.callers(\"cross_module_nested_near_caller\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"cross_module_nested_near_caller\").sorted().collect();\n", "file_path": "tests/call.rs", "rank": 41, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn conditional_true_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(BASIC_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 2\n\n // / \\\n\n // 4 8\n\n // \\ /\n\n // 12\n\n\n\n let bb2_name = Name::from(2);\n\n let bb4_name = Name::from(4);\n\n let bb8_name = Name::from(8);\n\n let bb12_name = Name::from(12);\n\n\n\n let cdg = analysis.fn_analysis(\"conditional_true\").control_dependence_graph();\n\n\n", "file_path": "tests/basic.rs", "rank": 42, "score": 45608.18855370594 }, { "content": "#[test]\n\nfn loop_inside_cond_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"loop_inside_cond\");\n\n\n\n // CFG:\n\n // 1 _\n\n // / \\ / \\\n\n // 11 5 -- /\n\n // \\ /\n\n // 12\n\n\n\n let domtree = fn_analysis.dominator_tree();\n\n assert_eq!(domtree.idom(&Name::from(1)), None);\n\n assert_eq!(domtree.idom(&Name::from(5)), Some(&Name::from(1)));\n\n assert_eq!(domtree.idom(&Name::from(11)), Some(&Name::from(1)));\n\n assert_eq!(domtree.idom(&Name::from(12)), Some(&Name::from(1)));\n\n assert_eq!(domtree.idom_of_return(), &Name::from(12));\n\n\n\n let postdomtree = fn_analysis.postdominator_tree();\n\n assert_eq!(postdomtree.ipostdom(&Name::from(1)), Some(CFGNode::Block(&Name::from(12))));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(5)), Some(CFGNode::Block(&Name::from(12))));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(11)), Some(CFGNode::Block(&Name::from(12))));\n\n assert_eq!(postdomtree.ipostdom(&Name::from(12)), Some(CFGNode::Return));\n\n}\n\n\n", "file_path": "tests/loop.rs", "rank": 44, "score": 44393.98558665465 }, { "content": "#[test]\n\nfn loop_inside_cond_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1 _\n\n // / \\ / \\\n\n // 11 5 -- /\n\n // \\ /\n\n // 12\n\n\n\n let cdg = analysis.fn_analysis(\"loop_inside_cond\").control_dependence_graph();\n\n\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(1)).count(), 0);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(5)).sorted().collect::<Vec<_>>(), vec![&Name::from(1), &Name::from(5)]);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(11)).collect::<Vec<_>>(), vec![&Name::from(1)]);\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(12)).count(), 0);\n\n\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(1)).count(), 0);\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(5)).sorted().collect::<Vec<_>>(), vec![&Name::from(1), &Name::from(5)]);\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(11)).collect::<Vec<_>>(), vec![&Name::from(1)]);\n\n assert_eq!(cdg.get_control_dependencies(&Name::from(12)).count(), 0);\n\n}\n\n\n", "file_path": "tests/loop.rs", "rank": 45, "score": 44393.98558665465 }, { "content": "#[test]\n\nfn loop_zero_iterations_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"loop_zero_iterations\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1\n\n // | \\\n\n // | 5 _\n\n // | | \\ / \\\n\n // | | 11 - /\n\n // | | /\n\n // | 8\n\n // | /\n\n // 18\n\n\n\n let bb1_name = Name::from(1);\n\n let _bb1_node = CFGNode::Block(&bb1_name);\n", "file_path": "tests/loop.rs", "rank": 46, "score": 44393.98558665465 }, { "content": "#[test]\n\nfn loop_inside_cond_cfg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let cfg = analysis.fn_analysis(\"loop_inside_cond\").control_flow_graph();\n\n\n\n // CFG:\n\n // 1 _\n\n // / \\ / \\\n\n // 11 5 -- /\n\n // \\ /\n\n // 12\n\n\n\n let bb1_name = Name::from(1);\n\n let _bb1_node = CFGNode::Block(&bb1_name);\n\n let bb5_name = Name::from(5);\n\n let bb5_node = CFGNode::Block(&bb5_name);\n\n let bb11_name = Name::from(11);\n\n let bb11_node = CFGNode::Block(&bb11_name);\n", "file_path": "tests/loop.rs", "rank": 47, "score": 44393.98558665465 }, { "content": "#[test]\n\nfn loop_zero_iterations_domtree() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n let fn_analysis = analysis.fn_analysis(\"loop_zero_iterations\");\n\n\n\n // CFG:\n\n // 1\n\n // | \\\n\n // | 5 _\n\n // | | \\ / \\\n\n // | | 11 - /\n\n // | | /\n\n // | 8\n\n // | /\n\n // 18\n\n\n\n let domtree = fn_analysis.dominator_tree();\n\n assert_eq!(domtree.idom(&Name::from(1)), None);\n", "file_path": "tests/loop.rs", "rank": 48, "score": 44393.98558665465 }, { "content": "#[test]\n\nfn loop_zero_iterations_cdg() {\n\n init_logging();\n\n let module = Module::from_bc_path(LOOP_BC_PATH)\n\n .unwrap_or_else(|e| panic!(\"Failed to parse module: {}\", e));\n\n let analysis = ModuleAnalysis::new(&module);\n\n\n\n // CFG:\n\n // 1\n\n // | \\\n\n // | 5 _\n\n // | | \\ / \\\n\n // | | 11 - /\n\n // | | /\n\n // | 8\n\n // | /\n\n // 18\n\n\n\n let cdg = analysis.fn_analysis(\"loop_zero_iterations\").control_dependence_graph();\n\n\n\n assert_eq!(cdg.get_imm_control_dependencies(&Name::from(1)).count(), 0);\n", "file_path": "tests/loop.rs", "rank": 49, "score": 44393.98558665465 }, { "content": "struct ControlDependenciesIterator<'m> {\n\n /// Currently implemented by computing all dependencies into a `HashSet` at\n\n /// the beginning and then iterating over that `HashSet`. But this may\n\n /// change, hence the opaque interface\n\n deps: std::collections::hash_set::IntoIter<&'m Name>,\n\n}\n\n\n\nimpl<'m> ControlDependenciesIterator<'m> {\n\n /// Get a new iterator which will iterate over the control dependencies of `block`\n\n fn new(cdg: &ControlDependenceGraph<'m>, block: &'m Name) -> Self {\n\n let mut worklist: Vec<&'m Name> = cdg.get_imm_control_dependencies(block).collect();\n\n let mut deps: HashSet<&'m Name> = HashSet::new();\n\n while let Some(block) = worklist.pop() {\n\n if deps.insert(block) {\n\n worklist.extend(cdg.get_imm_control_dependencies(block))\n\n }\n\n }\n\n Self {\n\n deps: deps.into_iter(),\n\n }\n", "file_path": "src/control_dep_graph.rs", "rank": 50, "score": 42669.41364483175 }, { "content": "struct ControlDependentsIterator<'m> {\n\n /// Currently implemented by computing all dependents into a `HashSet` at the\n\n /// beginning and then iterating over that `HashSet`. But this may change,\n\n /// hence the opaque interface\n\n deps: std::collections::hash_set::IntoIter<CFGNode<'m>>,\n\n}\n\n\n\nimpl<'m> ControlDependentsIterator<'m> {\n\n /// Get a new iterator which will iterate over the control dependents of `block`\n\n fn new(cdg: &ControlDependenceGraph<'m>, block: &'m Name) -> Self {\n\n let mut worklist: Vec<CFGNode<'m>> = cdg.get_imm_control_dependents(block).collect();\n\n let mut deps: HashSet<CFGNode<'m>> = HashSet::new();\n\n while let Some(node) = worklist.pop() {\n\n if deps.insert(node) {\n\n worklist.extend(cdg.get_imm_control_dependents_of_cfgnode(node))\n\n }\n\n }\n\n Self {\n\n deps: deps.into_iter(),\n\n }\n", "file_path": "src/control_dep_graph.rs", "rank": 51, "score": 42669.41364483175 }, { "content": "/// Contains state used when constructing the `DominatorTree` or `PostDominatorTree`\n\nstruct DomTreeBuilder<'m, 'a> {\n\n /// The `ControlFlowGraph` we're working from\n\n cfg: &'a ControlFlowGraph<'m>,\n\n\n\n /// Map from `CFGNode` to its rpo number.\n\n ///\n\n /// Unreachable blocks won't be in this map; all reachable blocks will have\n\n /// positive rpo numbers.\n\n rpo_numbers: HashMap<CFGNode<'m>, usize>,\n\n\n\n /// Map from `CFGNode` to the current estimate for its immediate dominator\n\n /// (the entry node maps to `None`).\n\n ///\n\n /// Unreachable blocks won't be in this map.\n\n idoms: HashMap<CFGNode<'m>, Option<CFGNode<'m>>>,\n\n}\n\n\n\nimpl<'m, 'a> DomTreeBuilder<'m, 'a> {\n\n /// Construct a new `DomTreeBuilder`.\n\n ///\n", "file_path": "src/dominator_tree.rs", "rank": 52, "score": 41254.279276737914 }, { "content": "int struct_driver() {\n\n volatile struct StructWithFuncPtr s = { 0 };\n\n s.fptr = get_function_ptr(true);\n\n s.anInt = 3;\n\n return calls_through_struct(&s);\n", "file_path": "tests/bcfiles/functionptr.c", "rank": 53, "score": 23708.027401059924 }, { "content": "__attribute((noinline)) int calls_through_struct(volatile struct StructWithFuncPtr *s) {\n\n return s->fptr(s->anInt, 2);\n", "file_path": "tests/bcfiles/functionptr.c", "rank": 54, "score": 23708.027401059924 }, { "content": "struct StructWithFuncPtr {\n\n int anInt;\n\n int (*fptr)(int, int);\n", "file_path": "tests/bcfiles/functionptr.c", "rank": 55, "score": 22807.78640313278 }, { "content": "__attribute__((noinline)) int cross_module_simple_caller(int x) {\n\n return simple_callee(x, 3);\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 56, "score": 21959.34059866313 }, { "content": "int cross_module_twice_caller(int x) {\n\n return simple_callee(x, 5) + simple_callee(x, 1);\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 57, "score": 21959.34059866313 }, { "content": "int cross_module_modify_global(int x) {\n\n global3 = x;\n\n return global3;\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 58, "score": 21955.854680690067 }, { "content": "int cross_module_read_global() {\n\n return global1;\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 59, "score": 21955.854680690067 }, { "content": "int cross_module_nested_far_caller(int x, int y) {\n\n return simple_caller(x + y);\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 60, "score": 21180.993232671517 }, { "content": "int cross_module_nested_near_caller(int x, int y) {\n\n return cross_module_simple_caller(x + y);\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 61, "score": 21180.993232671517 }, { "content": "int cross_module_modify_global_via_call(int x) {\n\n modify_global(x);\n\n return global3;\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 62, "score": 20458.959883884472 }, { "content": "int cross_module_read_global_via_call() {\n\n return read_global();\n", "file_path": "tests/bcfiles/crossmod.c", "rank": 63, "score": 20458.959883884472 }, { "content": "use llvm_ir::{Module, TypeRef};\n\nuse std::collections::{HashMap, HashSet};\n\n\n\n/// Allows you to iterate over all the functions in the analyzed `Module`(s) that\n\n/// have a specified type.\n\n///\n\n/// To construct a `FunctionsByType`, use [`ModuleAnalysis`](struct.ModuleAnalysis.html)\n\n/// or [`CrossModuleAnalysis`](struct.CrossModuleAnalysis.html).\n\npub struct FunctionsByType<'m> {\n\n map: HashMap<TypeRef, HashSet<&'m str>>,\n\n}\n\n\n\nimpl<'m> FunctionsByType<'m> {\n\n pub(crate) fn new(modules: impl IntoIterator<Item = &'m Module>) -> Self {\n\n let mut map: HashMap<TypeRef, HashSet<&'m str>> = HashMap::new();\n\n for module in modules {\n\n for func in &module.functions {\n\n map.entry(module.type_of(func)).or_default().insert(&func.name);\n\n }\n\n }\n", "file_path": "src/functions_by_type.rs", "rank": 64, "score": 13.04920359262889 }, { "content": "use crate::functions_by_type::FunctionsByType;\n\nuse either::Either;\n\nuse llvm_ir::{Constant, Instruction, Module, Name, Operand, Type};\n\nuse petgraph::prelude::*;\n\n\n\n/// The call graph for the analyzed `Module`(s): which functions may call which\n\n/// other functions.\n\n///\n\n/// To construct a `CallGraph`, use [`ModuleAnalysis`](struct.ModuleAnalysis.html)\n\n/// or [`CrossModuleAnalysis`](struct.CrossModuleAnalysis.html).\n\npub struct CallGraph<'m> {\n\n /// the call graph itself. Nodes are function names, and an edge from F to G\n\n /// indicates F may call G\n\n graph: DiGraphMap<&'m str, ()>,\n\n}\n\n\n\nimpl<'m> CallGraph<'m> {\n\n pub(crate) fn new(\n\n modules: impl IntoIterator<Item = &'m Module>,\n\n functions_by_type: &FunctionsByType<'m>,\n", "file_path": "src/call_graph.rs", "rank": 69, "score": 10.009005604778423 }, { "content": " Self {\n\n map,\n\n }\n\n }\n\n\n\n /// Iterate over all of the functions in the analyzed `Module`(s) that have\n\n /// the specified type\n\n pub fn functions_with_type<'s>(&'s self, ty: &TypeRef) -> impl Iterator<Item = &'m str> + 's {\n\n self.map.get(ty).into_iter().map(|hs| hs.iter().copied()).flatten()\n\n }\n\n}\n", "file_path": "src/functions_by_type.rs", "rank": 71, "score": 9.176691048155295 }, { "content": " let callers: Vec<&str> = callgraph.callers(\"bar\").sorted().collect();\n\n assert_eq!(callers, vec![\"calls_fptr\", \"calls_through_struct\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"bar\").sorted().collect();\n\n assert!(callees.is_empty());\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"calls_fptr\").sorted().collect();\n\n assert_eq!(callers, vec![\"fptr_driver\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"calls_fptr\").sorted().collect();\n\n assert_eq!(callees, vec![\"bar\", \"foo\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"get_function_ptr\").sorted().collect();\n\n assert_eq!(callers, vec![\"fptr_driver\", \"struct_driver\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"get_function_ptr\").sorted().collect();\n\n assert!(callees.is_empty());\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"calls_through_struct\").sorted().collect();\n\n assert_eq!(callers, vec![\"struct_driver\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"calls_through_struct\").sorted().collect();\n\n assert_eq!(callees, vec![\"bar\", \"foo\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"struct_driver\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"struct_driver\").sorted().collect();\n\n assert_eq!(callees, vec![\"calls_through_struct\", \"get_function_ptr\", \"llvm.lifetime.end.p0i8\", \"llvm.lifetime.start.p0i8\", \"llvm.memset.p0i8.i64\"]);\n\n}\n\n\n", "file_path": "tests/call.rs", "rank": 72, "score": 8.594818102081387 }, { "content": " /// given function.\n\n ///\n\n /// This analysis conservatively assumes that function pointers may point to\n\n /// any function in the analyzed `Module`(s) that has the appropriate type.\n\n ///\n\n /// Panics if the given function is not found in the analyzed `Module`(s).\n\n pub fn callers<'s>(&'s self, func_name: &'m str) -> impl Iterator<Item = &'m str> + 's {\n\n if !self.graph.contains_node(func_name) {\n\n panic!(\"callers(): function named {:?} not found in the Module(s)\", func_name)\n\n }\n\n self.graph.neighbors_directed(func_name, Direction::Incoming)\n\n }\n\n\n\n /// Get the names of functions in the analyzed `Module`(s) which may be\n\n /// called by the given function.\n\n ///\n\n /// This analysis conservatively assumes that function pointers may point to\n\n /// any function in the analyzed `Module`(s) that has the appropriate type.\n\n ///\n\n /// Panics if the given function is not found in the analyzed `Module`(s).\n\n pub fn callees<'s>(&'s self, func_name: &'m str) -> impl Iterator<Item = &'m str> + 's {\n\n if !self.graph.contains_node(func_name) {\n\n panic!(\"callees(): function named {:?} not found in the Module(s)\", func_name)\n\n }\n\n self.graph.neighbors_directed(func_name, Direction::Outgoing)\n\n }\n\n}\n", "file_path": "src/call_graph.rs", "rank": 73, "score": 7.904960452205474 }, { "content": " assert_eq!(callees, vec![\"cross_module_simple_caller\"]);\n\n\n\n // this function is called cross-module\n\n let callers: Vec<&str> = callgraph.callers(\"simple_callee\").sorted().collect();\n\n assert_eq!(callers, vec![\n\n \"caller_with_loop\",\n\n \"conditional_caller\",\n\n \"cross_module_simple_caller\",\n\n \"cross_module_twice_caller\",\n\n \"recursive_and_normal_caller\",\n\n \"simple_caller\",\n\n \"twice_caller\",\n\n ]);\n\n let callees: Vec<&str> = callgraph.callees(\"simple_callee\").sorted().collect();\n\n assert!(callees.is_empty());\n\n}\n", "file_path": "tests/call.rs", "rank": 74, "score": 7.739189004731104 }, { "content": "use crate::control_flow_graph::{CFGNode, ControlFlowGraph};\n\nuse llvm_ir::Name;\n\nuse petgraph::prelude::{Dfs, DiGraphMap, Direction};\n\nuse petgraph::visit::Walker;\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\n\n\n/// The dominator tree for a particular function.\n\n///\n\n/// To construct a `DominatorTree`, use\n\n/// [`FunctionAnalysis`](struct.FunctionAnalysis.html), which you can get\n\n/// from [`ModuleAnalysis`](struct.ModuleAnalysis.html).\n\npub struct DominatorTree<'m> {\n\n /// The graph itself. An edge from bbX to bbY indicates that bbX is the\n\n /// immediate dominator of bbY.\n\n ///\n\n /// That is:\n\n /// - bbX strictly dominates bbY, i.e., bbX appears on every control-flow\n\n /// path from the entry block to bbY (but bbX =/= bbY)\n\n /// - Of the blocks that strictly dominate bbY, bbX is the closest to bbY\n", "file_path": "src/dominator_tree.rs", "rank": 75, "score": 7.200884008228281 }, { "content": "use llvm_ir::{Function, Name, Terminator};\n\nuse petgraph::prelude::{DiGraphMap, Direction};\n\nuse std::fmt;\n\n\n\n/// The control flow graph for a particular function.\n\n///\n\n/// To construct a `ControlFlowGraph`, use\n\n/// [`FunctionAnalysis`](struct.FunctionAnalysis.html), which you can get\n\n/// from [`ModuleAnalysis`](struct.ModuleAnalysis.html).\n\npub struct ControlFlowGraph<'m> {\n\n /// The graph itself. Nodes are basic block names, and an edge from bbX to\n\n /// bbY indicates that control may (immediately) flow from bbX to bbY\n\n ///\n\n /// Or, an edge from bbX to `Return` indicates that the function may return\n\n /// from bbX\n\n pub(crate) graph: DiGraphMap<CFGNode<'m>, ()>,\n\n\n\n /// Entry node for the function\n\n pub(crate) entry_node: CFGNode<'m>,\n\n}\n", "file_path": "src/control_flow_graph.rs", "rank": 76, "score": 7.177045817519189 }, { "content": "use itertools::Itertools;\n\nuse llvm_ir::Module;\n\nuse llvm_ir_analysis::*;\n\n\n", "file_path": "tests/call.rs", "rank": 77, "score": 7.141013075374188 }, { "content": "use itertools::Itertools;\n\nuse llvm_ir::{Module, Name};\n\nuse llvm_ir_analysis::*;\n\n\n", "file_path": "tests/loop.rs", "rank": 78, "score": 7.046953404034628 }, { "content": "use itertools::Itertools;\n\nuse llvm_ir::{Module, Name};\n\nuse llvm_ir_analysis::*;\n\n\n", "file_path": "tests/basic.rs", "rank": 79, "score": 7.046953404034628 }, { "content": "use itertools::Itertools;\n\nuse llvm_ir::{Module, Name};\n\nuse llvm_ir_analysis::*;\n\n\n", "file_path": "tests/rustpanic.rs", "rank": 80, "score": 7.046953404034628 }, { "content": "use crate::control_flow_graph::{CFGNode, ControlFlowGraph};\n\nuse crate::dominator_tree::PostDominatorTree;\n\nuse llvm_ir::Name;\n\nuse petgraph::prelude::{DfsPostOrder, DiGraphMap, Direction};\n\nuse petgraph::visit::Walker;\n\nuse std::collections::HashSet;\n\n\n\n/// The control dependence graph for a particular function.\n\n/// https://en.wikipedia.org/wiki/Data_dependency#Control_Dependency\n\n///\n\n/// To construct a `ControlDependenceGraph`, use\n\n/// [`FunctionAnalysis`](struct.FunctionAnalysis.html), which you can get\n\n/// from [`ModuleAnalysis`](struct.ModuleAnalysis.html).\n\npub struct ControlDependenceGraph<'m> {\n\n /// The graph itself. An edge from bbX to bbY indicates that bbX has an\n\n /// immediate control dependence on bbY. A path from bbX to bbY indicates\n\n /// that bbX has a control dependence on bbY.\n\n graph: DiGraphMap<CFGNode<'m>, ()>,\n\n\n\n /// Entry node for the function\n", "file_path": "src/control_dep_graph.rs", "rank": 81, "score": 6.931078724233096 }, { "content": "## Getting started\n\n\n\n`llvm-ir-analysis` is on [crates.io](https://crates.io/crates/llvm-ir-analysis),\n\nso you can simply add it as a dependency in your `Cargo.toml`, selecting the\n\nfeature corresponding to the LLVM version you want:\n\n```toml\n\n[dependencies]\n\nllvm-ir-analysis = { version = \"0.3.1\", features = [\"llvm-12\"] }\n\n```\n\nCurrently, the supported LLVM versions are `llvm-8`, `llvm-9`, `llvm-10`,\n\n`llvm-11`, `llvm-12`, and `llvm-13`.\n\nThe corresponding LLVM library must be available on your system; see the\n\n[`llvm-sys`] README for more details and instructions.\n\n\n\nYou'll also need some LLVM IR to analyze, in the form of an [`llvm-ir`]\n\n[`Module`] or [`Function`].\n\nThis can be easily generated from an LLVM bitcode file; for more detailed\n\ninstructions, see [`llvm-ir`'s README](https://crates.io/crates/llvm-ir).\n\n\n\nOnce you have a `Module`, you can construct a [`ModuleAnalysis`] object:\n\n```rust\n\nlet module = Module::from_bc_path(...)?;\n\nlet analysis = ModuleAnalysis::new(&module);\n\n```\n\n\n\nYou can get `Module`-wide analyses such as `analysis.call_graph()`\n\ndirectly from the `ModuleAnalysis` object.\n\nYou can also get `Function`-level analyses such as the control-flow\n\ngraph using `analysis.fn_analysis(\"my_func\")`; or you can construct\n\na [`FunctionAnalysis`] directly with `FunctionAnalysis::new()`.\n\n\n\nFinally, you can get multi-module analyses such as a cross-module\n\ncall graph by starting with a [`CrossModuleAnalysis`] instead of just\n\na [`ModuleAnalysis`]. The [`CrossModuleAnalysis`] also provides a\n\n[`ModuleAnalysis`] for each of the included modules, again computed\n\nlazily on demand.\n\n\n\n[`llvm-ir`]: https://crates.io/crates/llvm-ir\n\n[`llvm-sys`]: https://crates.io/crates/llvm-sys\n\n[`Module`]: https://docs.rs/llvm-ir/0.8.1/llvm_ir/module/struct.Module.html\n\n[`Function`]: https://docs.rs/llvm-ir/0.8.1/llvm_ir/function/struct.Function.html\n\n[`ModuleAnalysis`]: https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.ModuleAnalysis.html\n\n[`FunctionAnalysis`]: https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.FunctionAnalysis.html\n\n[`CrossModuleAnalysis`]: https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.CrossModuleAnalysis.html\n", "file_path": "README.md", "rank": 84, "score": 6.600972737410652 }, { "content": " let callees: Vec<&str> = callgraph.callees(\"simple_caller\").sorted().collect();\n\n assert_eq!(callees, vec![\"simple_callee\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"conditional_caller\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"conditional_caller\").sorted().collect();\n\n assert_eq!(callees, vec![\"simple_callee\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"twice_caller\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"twice_caller\").sorted().collect();\n\n assert_eq!(callees, vec![\"simple_callee\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"nested_caller\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"nested_caller\").sorted().collect();\n\n assert_eq!(callees, vec![\"simple_caller\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"callee_with_loop\").sorted().collect();\n\n assert_eq!(callers, vec![\"caller_of_loop\"]);\n", "file_path": "tests/call.rs", "rank": 85, "score": 6.5855702429023735 }, { "content": " let callees: Vec<&str> = callgraph.callees(\"callee_with_loop\").sorted().collect();\n\n assert_eq!(callees, vec![\"llvm.lifetime.end.p0i8\", \"llvm.lifetime.start.p0i8\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"caller_of_loop\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"caller_of_loop\").sorted().collect();\n\n assert_eq!(callees, vec![\"callee_with_loop\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"caller_with_loop\").sorted().collect();\n\n assert!(callers.is_empty());\n\n let callees: Vec<&str> = callgraph.callees(\"caller_with_loop\").sorted().collect();\n\n assert_eq!(callees, vec![\"llvm.lifetime.end.p0i8\", \"llvm.lifetime.start.p0i8\", \"simple_callee\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"recursive_simple\").sorted().collect();\n\n assert_eq!(callers, vec![\"recursive_simple\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"recursive_simple\").sorted().collect();\n\n assert_eq!(callees, vec![\"recursive_simple\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"recursive_double\").sorted().collect();\n\n assert_eq!(callers, vec![\"recursive_double\"]);\n", "file_path": "tests/call.rs", "rank": 86, "score": 6.326990732102602 }, { "content": " let callees: Vec<&str> = callgraph.callees(\"recursive_double\").sorted().collect();\n\n assert_eq!(callees, vec![\"recursive_double\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"recursive_and_normal_caller\").sorted().collect();\n\n assert_eq!(callers, vec![\"recursive_and_normal_caller\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"recursive_and_normal_caller\").sorted().collect();\n\n assert_eq!(callees, vec![\"recursive_and_normal_caller\", \"simple_callee\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"mutually_recursive_a\").sorted().collect();\n\n assert_eq!(callers, vec![\"mutually_recursive_b\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"mutually_recursive_a\").sorted().collect();\n\n assert_eq!(callees, vec![\"mutually_recursive_b\"]);\n\n\n\n let callers: Vec<&str> = callgraph.callers(\"mutually_recursive_b\").sorted().collect();\n\n assert_eq!(callers, vec![\"mutually_recursive_a\"]);\n\n let callees: Vec<&str> = callgraph.callees(\"mutually_recursive_b\").sorted().collect();\n\n assert_eq!(callees, vec![\"mutually_recursive_a\"]);\n\n}\n\n\n", "file_path": "tests/call.rs", "rank": 87, "score": 6.291105266377688 }, { "content": " CFGNode::Block(block) => block,\n\n CFGNode::Return => panic!(\"Return node should not be entry\"), // perhaps you tried to call this on a reversed CFG? In-crate users can use the `entry_node` field directly if they need to account for the possibility of a reversed CFG\n\n }\n\n }\n\n\n\n /// Get the reversed CFG; i.e., the CFG where all edges have been reversed\n\n pub(crate) fn reversed(&self) -> Self {\n\n Self {\n\n graph: DiGraphMap::from_edges(\n\n self.graph.all_edges().map(|(a, b, _)| (b, a, ()))\n\n ),\n\n entry_node: CFGNode::Return,\n\n }\n\n }\n\n}\n", "file_path": "src/control_flow_graph.rs", "rank": 88, "score": 5.75336083439353 }, { "content": " /// (farthest from entry) along paths from the entry block to bbY\n\n pub(crate) graph: DiGraphMap<CFGNode<'m>, ()>,\n\n\n\n /// Entry node for the function\n\n pub(crate) entry_node: CFGNode<'m>,\n\n}\n\n\n\n/// The postdominator tree for a particular function.\n\n///\n\n/// To construct a `PostDominatorTree`, use\n\n/// [`FunctionAnalysis`](struct.FunctionAnalysis.html), which you can get\n\n/// from [`ModuleAnalysis`](struct.ModuleAnalysis.html).\n\npub struct PostDominatorTree<'m> {\n\n /// The graph itself. An edge from bbX to bbY indicates that bbX is the\n\n /// immediate postdominator of bbY.\n\n ///\n\n /// That is:\n\n /// - bbX strictly postdominates bbY, i.e., bbX appears on every control-flow\n\n /// path from bbY to the function exit (but bbX =/= bbY)\n\n /// - Of the blocks that strictly postdominate bbY, bbX is the closest to bbY\n\n /// (farthest from exit) along paths from bbY to the function exit\n\n pub(crate) graph: DiGraphMap<CFGNode<'m>, ()>,\n\n}\n\n\n\n/// Contains state used when constructing the `DominatorTree` or `PostDominatorTree`\n", "file_path": "src/dominator_tree.rs", "rank": 89, "score": 5.656245516835986 }, { "content": " let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"four_args\"]);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i32(),\n\n vec![module.types.i32(), module.types.i32(), module.types.i32(), module.types.i32(), module.types.i32()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"five_args\"]);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i8(),\n\n vec![module.types.i8(), module.types.i8()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"int8t\"]);\n\n\n\n let functy = module.types.func_type(\n", "file_path": "tests/basic.rs", "rank": 90, "score": 5.500234453411782 }, { "content": " module.types.i16(),\n\n vec![module.types.i16(), module.types.i16()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"int16t\"]);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i64(),\n\n vec![module.types.i64(), module.types.i64()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"int64t\"]);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i64(),\n\n vec![module.types.i8(), module.types.i16(), module.types.i32(), module.types.i64()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"mixed_bitwidths\"]);\n\n}\n\n\n", "file_path": "tests/basic.rs", "rank": 91, "score": 5.467059068802254 }, { "content": " assert_eq!(func_names, vec![\"no_args_nozero\", \"no_args_zero\"]);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i32(),\n\n vec![module.types.i32()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\"one_arg\"]);\n\n\n\n let functy = module.types.func_type(\n\n module.types.i32(),\n\n vec![module.types.i32(), module.types.i32()],\n\n false,\n\n );\n\n let func_names: Vec<&str> = fbt.functions_with_type(&functy).sorted().collect();\n\n assert_eq!(func_names, vec![\n\n \"binops\",\n\n \"conditional_false\",\n\n \"conditional_nozero\",\n", "file_path": "tests/basic.rs", "rank": 94, "score": 4.836197813811237 }, { "content": " Ordering::Greater => {\n\n node_a = self.idoms[&node_a].expect(\"entry node should have the smallest rpo number\");\n\n },\n\n Ordering::Equal => break,\n\n }\n\n }\n\n\n\n node_a\n\n }\n\n}\n\n\n\nimpl<'m> DominatorTree<'m> {\n\n pub(crate) fn new(cfg: &ControlFlowGraph<'m>) -> Self {\n\n Self {\n\n graph: DomTreeBuilder::new(cfg).build(),\n\n entry_node: cfg.entry_node,\n\n }\n\n }\n\n\n\n /// Get the immediate dominator of the basic block with the given `Name`.\n", "file_path": "src/dominator_tree.rs", "rank": 95, "score": 4.42214882493252 }, { "content": "# `llvm-ir-analysis`: Static analysis of LLVM IR\n\n\n\n[![crates.io](https://img.shields.io/crates/v/llvm-ir-analysis.svg)](https://crates.io/crates/llvm-ir-analysis)\n\n[![License](https://img.shields.io/badge/license-MIT-blue.svg)](https://raw.githubusercontent.com/cdisselkoen/llvm-ir-analysis/main/LICENSE)\n\n\n\nThis crate provides several simple static analyses of LLVM IR.\n\nIn particular, this crate computes the following on an [`llvm-ir`] `Module` or `Function`:\n\n\n\n- [`CallGraph`](https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.CallGraph.html)\n\n- [`ControlFlowGraph`](https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.ControlFlowGraph.html)\n\n- [`DominatorTree`](https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.DominatorTree.html)\n\n- [`PostDominatorTree`](https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.PostDominatorTree.html)\n\n- [`ControlDependenceGraph`](https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.ControlDependenceGraph.html)\n\n- [`FunctionsByType`](https://docs.rs/llvm-ir-analysis/0.3.1/llvm_ir_analysis/struct.FunctionsByType.html)\n\n\n\nThe above analyses are provided by the [`FunctionAnalysis`],\n\n[`ModuleAnalysis`], and [`CrossModuleAnalysis`] objects, which lazily compute\n\neach of these structures on demand and cache the results.\n\n\n", "file_path": "README.md", "rank": 96, "score": 4.266375545093782 }, { "content": " }\n\n\n\n Self {\n\n graph,\n\n entry_node: CFGNode::Block(&function.basic_blocks[0].name),\n\n }\n\n }\n\n\n\n /// Get the predecessors of the basic block with the given `Name`\n\n pub fn preds<'s>(&'s self, block: &'m Name) -> impl Iterator<Item = &'m Name> + 's {\n\n self.preds_of_cfgnode(CFGNode::Block(block))\n\n }\n\n\n\n /// Get the predecessors of the special `Return` node, i.e., get all blocks\n\n /// which may directly return\n\n pub fn preds_of_return<'s>(&'s self) -> impl Iterator<Item = &'m Name> + 's {\n\n self.preds_of_cfgnode(CFGNode::Return)\n\n }\n\n\n\n pub(crate) fn preds_of_cfgnode<'s>(&'s self, node: CFGNode<'m>) -> impl Iterator<Item = &'m Name> + 's {\n", "file_path": "src/control_flow_graph.rs", "rank": 97, "score": 4.2632412548595 }, { "content": " /// This is the same as `dominates()`, except that if\n\n /// `node_a == node_b`, this returns `false`.\n\n pub fn strictly_dominates(&self, node_a: CFGNode<'m>, node_b: CFGNode<'m>) -> bool {\n\n node_a != node_b && self.dominates(node_a, node_b)\n\n }\n\n\n\n /// Get the `Name` of the entry block for the function\n\n pub fn entry(&self) -> &'m Name {\n\n match self.entry_node {\n\n CFGNode::Block(block) => block,\n\n CFGNode::Return => panic!(\"Return node should not be entry\"),\n\n }\n\n }\n\n}\n\n\n\nimpl<'m> PostDominatorTree<'m> {\n\n pub(crate) fn new(cfg: &ControlFlowGraph<'m>) -> Self {\n\n // The postdominator relation for `cfg` is the dominator relation on\n\n // the reversed `cfg` (Cytron et al, p. 477)\n\n\n", "file_path": "src/dominator_tree.rs", "rank": 98, "score": 4.201472942964745 }, { "content": " ty => panic!(\"Expected function pointer to have pointer type, but got {:?}\", ty),\n\n };\n\n for target in functions_by_type.functions_with_type(&func_ty) {\n\n graph.add_edge(&f.name, target, ());\n\n }\n\n },\n\n Either::Left(_) => {}, // ignore calls to inline assembly\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n Self {\n\n graph,\n\n }\n\n }\n\n\n\n /// Get the names of functions in the analyzed `Module`(s) which may call the\n", "file_path": "src/call_graph.rs", "rank": 99, "score": 4.179501817244827 } ]
Rust
thcon/src/app/konsole.rs
theme-controller/thcon
646b8dac38994acff31cd90aae6fafa80bad168a
use crate::config::Config as ThconConfig; use crate::operation::Operation; use crate::AppConfig; use crate::Disableable; use crate::{ themeable::{ConfigError, ConfigState}, Themeable, }; use std::time::Duration; use anyhow::{Context, Result}; use dbus::blocking::Connection; use log::{debug, trace}; use serde::Deserialize; use xml::reader::{EventReader, XmlEvent}; #[derive(Debug, Deserialize, Disableable, AppConfig)] pub struct _Config { light: String, dark: String, #[serde(default)] disabled: bool, } pub struct Konsole { dbus: Connection, } impl Default for Konsole { fn default() -> Self { Self { dbus: Connection::new_session().unwrap(), } } } impl Konsole { fn get_services(&self) -> Result<Vec<String>> { let proxy = self .dbus .with_proxy("org.freedesktop.DBus", "/", Duration::from_millis(2500)); let (names,): (Vec<String>,) = proxy .method_call("org.freedesktop.DBus", "ListNames", ()) .context("Unable to retrieve konsole windows from DBus")?; let konsoles: Vec<String> = names .into_iter() .filter(|name| name.as_str().starts_with("org.kde.konsole-")) .collect(); trace!( "Found {} {}", konsoles.len(), if konsoles.len() == 1 { "service" } else { "services" }, ); Ok(konsoles) } fn get_session_ids(&self, service_id: &str) -> Result<Vec<String>> { let proxy = self .dbus .with_proxy(service_id, "/Sessions", Duration::from_millis(2500)); let (xml,): (String,) = proxy .method_call("org.freedesktop.DBus.Introspectable", "Introspect", ()) .with_context(|| { format!( "Unable to get konsole session ids for DBus service '{}'", service_id ) })?; let parser = EventReader::from_str(&xml); let mut depth = 0; let mut session_ids: Vec<String> = vec![]; for e in parser { match e { Ok(XmlEvent::StartElement { name, attributes, .. }) => { if depth == 1 && name.local_name == "node" { session_ids.extend(attributes.into_iter().filter_map(|attr| { if attr.name.local_name == "name" { Some(attr.value) } else { None } })); } depth += 1; } Ok(XmlEvent::EndElement { .. }) => depth -= 1, Err(e) => { return Err(e.into()); } _ => {} } } trace!( "Found {} {} in service {}", session_ids.len(), if session_ids.len() == 1 { "session" } else { "sessions" }, service_id ); Ok(session_ids) } fn set_profile_name( &self, service_id: &str, session_id: &str, profile_name: &str, ) -> Result<()> { let proxy = self.dbus.with_proxy( service_id, format!("/Sessions/{}", session_id), Duration::from_millis(2500), ); let _: () = proxy.method_call("org.kde.konsole.Session", "setProfile", (profile_name,))?; Ok(()) } fn set_default_profile(&self, service_id: &str, profile_name: &str) -> Result<()> { let proxy = self .dbus .with_proxy(service_id, "/Windows", Duration::from_millis(2500)); let (xml,): (String,) = proxy .method_call("org.freedesktop.DBus.Introspectable", "Introspect", ()) .with_context(|| { format!("Unable to retreive window for DBus service '{}", service_id) })?; let parser = EventReader::from_str(&xml); let mut depth = 0; let mut window_id: Option<String> = None; for e in parser { match e { Ok(XmlEvent::StartElement { name, attributes, .. }) => { if depth == 1 && name.local_name == "node" { window_id = attributes.into_iter().find_map(|attr| { if attr.name.local_name == "name" { Some(attr.value) } else { None } }); } depth += 1; } Ok(XmlEvent::EndElement { .. }) => depth -= 1, Err(e) => { return Err(e.into()); } _ => {} } } if let Some(window_id) = window_id { trace!("Found first window ID {}", window_id); let proxy = self.dbus.with_proxy( service_id, format!("/Windows/{}", window_id), Duration::from_millis(2500), ); proxy .method_call( "org.kde.konsole.Window", "setDefaultProfile", (profile_name,), ) .context("asdfasdf")?; } else { trace!("Found no Konsole windows; can't set default profile."); } Ok(()) } } impl Themeable for Konsole { fn config_state(&self, config: &ThconConfig) -> ConfigState { ConfigState::with_manual_config(config.konsole.as_ref().map(|c| c.inner.as_ref())) } fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> { let config = match self.config_state(config) { ConfigState::NoDefault => { return Err(ConfigError::RequiresManualConfig("konsole").into()) } ConfigState::Default => unreachable!(), ConfigState::Disabled => return Ok(()), ConfigState::Enabled => config.konsole.as_ref().unwrap().unwrap_inner_left(), }; let mut total_sessions = 0; let services: Vec<(String, Vec<String>)> = self .get_services()? .into_iter() .map(|service| { let session_ids = self.get_session_ids(&service).unwrap(); total_sessions += session_ids.len(); (service, session_ids) }) .collect(); let theme = match operation { Operation::Darken => &config.dark, Operation::Lighten => &config.light, }; if services.len() == 1 { debug!( "Found {} {}", total_sessions, if total_sessions == 1 { "session" } else { "sessions" }, ); } else { debug!( "Found {} {} across {} services", total_sessions, if total_sessions == 1 { "session" } else { "sessions" }, services.len(), ); } for (service_id, session_ids) in services.iter() { for session_id in session_ids.iter() { self.set_profile_name(service_id, session_id, theme)?; } } if let Some((session, _)) = services.get(0) { self.set_default_profile(session, theme)?; } Ok(()) } }
use crate::config::Config as ThconConfig; use crate::operation::Operation; use crate::AppConfig; use crate::Disableable; use crate::{ themeable::{ConfigError, ConfigState}, Themeable, }; use std::time::Duration; use anyhow::{Context, Result}; use dbus::blocking::Connection; use log::{debug, trace}; use serde::Deserialize; use xml::reader::{EventReader, XmlEvent}; #[derive(Debug, Deserialize, Disableable, AppConfig)] pub struct _Config { light: String, dark: String, #[serde(default)] disabled: bool, } pub struct Konsole { dbus: Connection, } impl Default for Konsole { fn default() -> Self { Self { dbus: Connection::new_session().unwrap(), } } } impl Konsole { fn get_services(&self) -> Result<Vec<String>> { let proxy = self .dbus .with_proxy("org.freedesktop.DBus", "/", Duration::from_millis(2500)); let (names,): (Vec<String>,) = proxy .method_call("org.freedesktop.DBus", "ListNames", ()) .context("Unable to retrieve konsole windows from DBus")?; let konsoles: Vec<String> = names .into_iter() .filter(|name| name.as_str().starts_with("org.kde.konsole-")) .collect(); trace!( "Found {} {}", konsoles.len(), if konsoles.len() == 1 { "service" } else { "services" }, ); Ok(konsoles) }
fn set_profile_name( &self, service_id: &str, session_id: &str, profile_name: &str, ) -> Result<()> { let proxy = self.dbus.with_proxy( service_id, format!("/Sessions/{}", session_id), Duration::from_millis(2500), ); let _: () = proxy.method_call("org.kde.konsole.Session", "setProfile", (profile_name,))?; Ok(()) } fn set_default_profile(&self, service_id: &str, profile_name: &str) -> Result<()> { let proxy = self .dbus .with_proxy(service_id, "/Windows", Duration::from_millis(2500)); let (xml,): (String,) = proxy .method_call("org.freedesktop.DBus.Introspectable", "Introspect", ()) .with_context(|| { format!("Unable to retreive window for DBus service '{}", service_id) })?; let parser = EventReader::from_str(&xml); let mut depth = 0; let mut window_id: Option<String> = None; for e in parser { match e { Ok(XmlEvent::StartElement { name, attributes, .. }) => { if depth == 1 && name.local_name == "node" { window_id = attributes.into_iter().find_map(|attr| { if attr.name.local_name == "name" { Some(attr.value) } else { None } }); } depth += 1; } Ok(XmlEvent::EndElement { .. }) => depth -= 1, Err(e) => { return Err(e.into()); } _ => {} } } if let Some(window_id) = window_id { trace!("Found first window ID {}", window_id); let proxy = self.dbus.with_proxy( service_id, format!("/Windows/{}", window_id), Duration::from_millis(2500), ); proxy .method_call( "org.kde.konsole.Window", "setDefaultProfile", (profile_name,), ) .context("asdfasdf")?; } else { trace!("Found no Konsole windows; can't set default profile."); } Ok(()) } } impl Themeable for Konsole { fn config_state(&self, config: &ThconConfig) -> ConfigState { ConfigState::with_manual_config(config.konsole.as_ref().map(|c| c.inner.as_ref())) } fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> { let config = match self.config_state(config) { ConfigState::NoDefault => { return Err(ConfigError::RequiresManualConfig("konsole").into()) } ConfigState::Default => unreachable!(), ConfigState::Disabled => return Ok(()), ConfigState::Enabled => config.konsole.as_ref().unwrap().unwrap_inner_left(), }; let mut total_sessions = 0; let services: Vec<(String, Vec<String>)> = self .get_services()? .into_iter() .map(|service| { let session_ids = self.get_session_ids(&service).unwrap(); total_sessions += session_ids.len(); (service, session_ids) }) .collect(); let theme = match operation { Operation::Darken => &config.dark, Operation::Lighten => &config.light, }; if services.len() == 1 { debug!( "Found {} {}", total_sessions, if total_sessions == 1 { "session" } else { "sessions" }, ); } else { debug!( "Found {} {} across {} services", total_sessions, if total_sessions == 1 { "session" } else { "sessions" }, services.len(), ); } for (service_id, session_ids) in services.iter() { for session_id in session_ids.iter() { self.set_profile_name(service_id, session_id, theme)?; } } if let Some((session, _)) = services.get(0) { self.set_default_profile(session, theme)?; } Ok(()) } }
fn get_session_ids(&self, service_id: &str) -> Result<Vec<String>> { let proxy = self .dbus .with_proxy(service_id, "/Sessions", Duration::from_millis(2500)); let (xml,): (String,) = proxy .method_call("org.freedesktop.DBus.Introspectable", "Introspect", ()) .with_context(|| { format!( "Unable to get konsole session ids for DBus service '{}'", service_id ) })?; let parser = EventReader::from_str(&xml); let mut depth = 0; let mut session_ids: Vec<String> = vec![]; for e in parser { match e { Ok(XmlEvent::StartElement { name, attributes, .. }) => { if depth == 1 && name.local_name == "node" { session_ids.extend(attributes.into_iter().filter_map(|attr| { if attr.name.local_name == "name" { Some(attr.value) } else { None } })); } depth += 1; } Ok(XmlEvent::EndElement { .. }) => depth -= 1, Err(e) => { return Err(e.into()); } _ => {} } } trace!( "Found {} {} in service {}", session_ids.len(), if session_ids.len() == 1 { "session" } else { "sessions" }, service_id ); Ok(session_ids) }
function_block-full_function
[ { "content": "pub fn get(name: &str) -> Option<Box<dyn Themeable>> {\n\n match name {\n\n #[cfg(dbus)]\n\n \"konsole\" => Some(Box::new(Konsole::default())),\n\n #[cfg(dbus)]\n\n \"gnome-shell\" => Some(Box::new(GnomeShell {})),\n\n #[cfg(dbus)]\n\n \"gnome-terminal\" => Some(Box::new(GnomeTerminal::default())),\n\n #[cfg(dbus)]\n\n \"gtk\" => Some(Box::new(Gtk {})),\n\n #[cfg(dbus)]\n\n \"plasma\" => Some(Box::new(Plasma {})),\n\n #[cfg(mac)]\n\n \"macos\" => Some(Box::new(MacOS {})),\n\n #[cfg(mac)]\n\n \"terminal-app\" => Some(Box::new(TerminalDotApp {})),\n\n #[cfg(mac)]\n\n \"iterm2\" => Some(Box::new(Iterm2 {})),\n\n \"vscode\" => Some(Box::new(VSCode {})),\n\n \"alacritty\" => Some(Box::new(Alacritty {})),\n\n \"vim\" => Some(Box::new(Vim {})),\n\n \"nvim\" => Some(Box::new(Neovim {})),\n\n \"sublime-text\" => Some(Box::new(SublimeText {})),\n\n \"atom\" => Some(Box::new(Atom {})),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "thcon/src/app.rs", "rank": 0, "score": 131826.10760730514 }, { "content": "/// Returns a PathBuf for an app-specific Unix domain socket, optionally including a process ID for\n\n/// apps that need separate sockets per-instance.\n\n///\n\n/// # Examples\n\n///\n\n/// App `foo` shares an instance of `thcon-listen` across all instances:\n\n///\n\n/// ```no_run\n\n/// # use thcon::sockets::socket_addr;\n\n/// # use std::path::PathBuf;\n\n/// assert_eq!(\n\n/// socket_addr(\"foo\", false),\n\n/// PathBuf::from(\"/tmp/thcon/foo.sock\"),\n\n/// )\n\n/// ```\n\n///\n\n/// App `bar` requires a new instance of `thcon-listen` for each instance, since it can't share one:\n\n///\n\n/// ```no_run\n\n/// # use thcon::sockets::socket_addr;\n\n/// # use std::path::PathBuf;\n\n/// let pid = std::process::id().to_string();\n\n/// assert_eq!(\n\n/// socket_addr(\"bar\", true),\n\n/// PathBuf::from(format!(\"/tmp/thcon/bar/{}.sock\", pid)),\n\n/// )\n\n/// ```\n\npub fn socket_addr(app_name: &str, include_pid: bool) -> PathBuf {\n\n let mut addr = crate::dirs::temp().join(\"thcon\").join(app_name);\n\n\n\n if include_pid {\n\n addr.push(process::id().to_string() + \".sock\");\n\n } else {\n\n addr.set_extension(\"sock\");\n\n }\n\n\n\n addr\n\n}\n\n\n", "file_path": "thcon/src/sockets.rs", "rank": 1, "score": 126254.25586745483 }, { "content": "fn is_disabled() -> bool {\n\n true\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct ConfigSection {\n\n color_scheme: Option<String>,\n\n theme: Option<String>,\n\n}\n\n\n\nimpl Default for _Config {\n\n fn default() -> Self {\n\n Self {\n\n light: ConfigSection {\n\n color_scheme: Some(\n\n \"Packages/Color Scheme - Default/Celeste.sublime-color-scheme\".to_string(),\n\n ),\n\n theme: Some(\"Adaptive.sublime-theme\".to_string()),\n\n },\n\n dark: ConfigSection {\n", "file_path": "thcon/src/app/sublime_text.rs", "rank": 2, "score": 117298.85593350849 }, { "content": "pub fn all_names() -> Vec<&'static str> {\n\n vec![\n\n \"alacritty\",\n\n \"atom\",\n\n #[cfg(dbus)]\n\n \"gnome-shell\",\n\n #[cfg(dbus)]\n\n \"gnome-terminal\",\n\n #[cfg(dbus)]\n\n \"gtk\",\n\n #[cfg(mac)]\n\n \"iterm2\",\n\n #[cfg(dbus)]\n\n \"konsole\",\n\n #[cfg(mac)]\n\n \"macos\",\n\n #[cfg(mac)]\n\n \"terminal-app\",\n\n \"nvim\",\n\n #[cfg(dbus)]\n\n \"plasma\",\n\n \"sublime-text\",\n\n \"vim\",\n\n \"vscode\",\n\n ]\n\n}\n", "file_path": "thcon/src/app.rs", "rank": 3, "score": 105230.47902586128 }, { "content": "#[proc_macro_derive(AppConfig)]\n\npub fn appconfig_macro_derive(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = &input.ident;\n\n let gen = quote! {\n\n #[derive(Deserialize, Debug)]\n\n #[serde(transparent)]\n\n pub struct Config {\n\n #[serde(with = \"either::serde_untagged\")]\n\n inner: either::Either<#name, thcon_trait::Disabled>,\n\n }\n\n\n\n impl Config {\n\n /// Convenience function to access the enabled configuration variant\n\n fn unwrap_inner_left(&self) -> &(#name) {\n\n self.inner.as_ref().unwrap_left()\n\n }\n\n\n\n /// Convenience function to access the disabled configuration variant\n\n fn unwrap_inner_right(&self) -> &thcon_trait::Disabled {\n\n self.inner.as_ref().unwrap_right()\n\n }\n\n }\n\n };\n\n\n\n gen.into()\n\n}\n", "file_path": "thcon_macro/src/lib.rs", "rank": 4, "score": 94919.57943800114 }, { "content": "#[proc_macro_derive(Disableable)]\n\npub fn disableable_macro_derive(input: TokenStream) -> TokenStream {\n\n let input = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = &input.ident;\n\n let gen = quote! {\n\n impl Disableable for #name {\n\n fn disabled(&self) -> bool {\n\n self.disabled == true\n\n }\n\n }\n\n };\n\n\n\n gen.into()\n\n}\n\n\n\n/// Creates a `Config` struct wrapping either the marked struct or an instance of\n\n/// `thcon_trait::Disabled`.\n", "file_path": "thcon_macro/src/lib.rs", "rank": 5, "score": 94861.8579953004 }, { "content": "pub fn switch(\n\n config: &Config,\n\n name: &str,\n\n was_requested: bool,\n\n operation: &Operation,\n\n) -> Result<()> {\n\n let start = Instant::now();\n\n let app = match app::get(name) {\n\n None => {\n\n return Ok(());\n\n }\n\n Some(app) => app,\n\n };\n\n\n\n match app.config_state(config) {\n\n ConfigState::NoDefault => {\n\n if was_requested {\n\n error!(target: name, \"skipping (needs manual configuration)\");\n\n trace!(\n\n target: name,\n", "file_path": "thcon/src/switch.rs", "rank": 6, "score": 88560.67012790164 }, { "content": "pub trait Themeable {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState;\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()>;\n\n}\n\n\n\n#[derive(PartialEq, Eq, Debug)]\n\npub enum ConfigState {\n\n NoDefault,\n\n Default,\n\n Disabled,\n\n Enabled,\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ConfigError {\n\n #[error(\"{0} requires manual configuration\")]\n\n RequiresManualConfig(&'static str),\n\n}\n\n\n\nimpl ConfigState {\n", "file_path": "thcon/src/themeable.rs", "rank": 7, "score": 85030.3211548902 }, { "content": "pub fn temp() -> PathBuf {\n\n #[cfg(not(windows))]\n\n return PathBuf::from(\"/tmp\");\n\n\n\n #[cfg(windows)]\n\n todo!();\n\n}\n", "file_path": "thcon/src/dirs.rs", "rank": 8, "score": 82340.4578696209 }, { "content": "#[cfg(not(mac))]\n\npub fn config() -> Option<PathBuf> {\n\n ::dirs::config_dir()\n\n}\n\n\n", "file_path": "thcon/src/dirs.rs", "rank": 9, "score": 78511.87686191966 }, { "content": "#[cfg(not(mac))]\n\npub fn data() -> Option<PathBuf> {\n\n ::dirs::data_dir()\n\n}\n\n\n", "file_path": "thcon/src/dirs.rs", "rank": 10, "score": 78511.87686191966 }, { "content": "/// An app that can be disabled via configuration.\n\npub trait Disableable {\n\n /// Returns `true` if the configured app is disabled.\n\n fn disabled(&self) -> bool;\n\n}\n\n\n\n#[derive(Deserialize, Debug)]\n\npub struct Disabled {\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\nimpl Disableable for Disabled {\n\n fn disabled(&self) -> bool {\n\n self.disabled\n\n }\n\n}\n\n\n", "file_path": "thcon_trait/src/lib.rs", "rank": 11, "score": 75616.12538684813 }, { "content": "fn replace_color_theme(settings_json: &str, new_theme: &str) -> String {\n\n let theme_regex = Regex::new(\n\n r#\"^(?P<prefix>\\s*\"workbench.colorTheme\"\\s*:\\s*)\"(?P<v>.+)\"(?P<suffix>,?\\s*//\\s*thcon:replace-line)\"#,\n\n );\n\n match theme_regex {\n\n Err(_) => settings_json.to_owned(),\n\n Ok(theme_regex) => {\n\n let modified_lines: Vec<String> = settings_json\n\n .lines()\n\n .map(|line| {\n\n theme_regex\n\n .replace(line, |caps: &Captures| {\n\n format!(r#\"{}\"{}\"{}\"#, &caps[\"prefix\"], new_theme, &caps[\"suffix\"])\n\n })\n\n .into_owned()\n\n })\n\n .collect();\n\n\n\n modified_lines.join(\"\\n\")\n\n }\n\n }\n\n}\n\n\n", "file_path": "thcon/src/app/vscode.rs", "rank": 12, "score": 69245.72013860407 }, { "content": "/// Unused marker trait to enable #[derive(AppConfig)].\n\npub trait AppConfig {}\n", "file_path": "thcon_trait/src/lib.rs", "rank": 13, "score": 62601.18259197999 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct WirePayload {\n\n rc_file: String,\n\n}\n\n\n", "file_path": "thcon/src/app/vim.rs", "rank": 14, "score": 50671.478955217695 }, { "content": "fn main() {\n\n cfg_aliases! {\n\n mac: { target_os=\"macos\" },\n\n dbus: { any(\n\n target_os=\"linux\",\n\n target_os=\"freebsd\",\n\n target_os=\"dragonfly\",\n\n target_os=\"openbsd\",\n\n target_os=\"netbsd\"\n\n ) },\n\n }\n\n}\n", "file_path": "thcon/build.rs", "rank": 15, "score": 45736.90181031516 }, { "content": "#[cfg(not(windows))]\n\n#[test]\n\nfn app_with_pid() {\n\n let pid = process::id().to_string();\n\n assert_eq!(\n\n PathBuf::from(format!(\"/tmp/thcon/some_app/{}.sock\", pid)),\n\n socket_addr(\"some_app\", true),\n\n )\n\n}\n", "file_path": "thcon/src/sockets.rs", "rank": 16, "score": 43766.31540175353 }, { "content": "#[cfg(not(windows))]\n\n#[test]\n\nfn app_without_pid() {\n\n assert_eq!(\n\n PathBuf::from(\"/tmp/thcon/some_app.sock\"),\n\n socket_addr(\"some_app\", false),\n\n )\n\n}\n\n\n", "file_path": "thcon/src/sockets.rs", "rank": 17, "score": 42866.827157280124 }, { "content": "#[test]\n\nfn to_vimrc_all_sections() {\n\n use serde_json::json;\n\n\n\n let config = ConfigSection {\n\n set: Some(\n\n [\n\n (\"background\".to_string(), json!(\"dark\")),\n\n (\"number\".to_string(), json!(true)),\n\n ]\n\n .iter()\n\n .cloned()\n\n .collect(),\n\n ),\n\n setglobal: Some(\n\n [\n\n (\"tw\".to_string(), json!(100)),\n\n (\"relnum\".to_string(), json!(false)),\n\n ]\n\n .iter()\n\n .cloned()\n", "file_path": "thcon/src/app/vim.rs", "rank": 18, "score": 42863.17596534291 }, { "content": "#[test]\n\nfn to_vimrc_empty_input() {\n\n let config = ConfigSection::default();\n\n assert_eq!(config.to_vimrc(), \"\",);\n\n}\n\n\n", "file_path": "thcon/src/app/vim.rs", "rank": 19, "score": 42015.391880907664 }, { "content": "#[test]\n\nfn replace_color_trailing_comma() {\n\n let settings_json = r#\"\n\n {\n\n \"workbench.colorTheme\": \"Default Dark+\", // thcon:replace-line\n\n \"editor.minimap.enabled\": false,\n\n }\n\n \"#;\n\n let res = replace_color_theme(settings_json, \"Default Light+\");\n\n assert_eq!(\n\n res,\n\n r#\"\n\n {\n\n \"workbench.colorTheme\": \"Default Light+\", // thcon:replace-line\n\n \"editor.minimap.enabled\": false,\n\n }\n\n \"#\n\n );\n\n}\n\n\n", "file_path": "thcon/src/app/vscode.rs", "rank": 20, "score": 41214.978437110476 }, { "content": "#[test]\n\nfn replace_color_no_trailing_comma() {\n\n let settings_json = r#\"\n\n {\n\n \"editor.minimap.enabled\": false,\n\n \"workbench.colorTheme\" :\"Default Dark+\" // thcon:replace-line\n\n }\n\n \"#;\n\n let res = replace_color_theme(settings_json, \"Default Light+\");\n\n assert_eq!(\n\n res,\n\n r#\"\n\n {\n\n \"editor.minimap.enabled\": false,\n\n \"workbench.colorTheme\" :\"Default Light+\" // thcon:replace-line\n\n }\n\n \"#\n\n );\n\n}\n", "file_path": "thcon/src/app/vscode.rs", "rank": 21, "score": 41214.978437110476 }, { "content": "fn preferences_path() -> PathBuf {\n\n [\n\n dirs::config_dir().unwrap().to_str().unwrap(),\n\n #[cfg(mac)]\n\n \"Sublime Text 3\",\n\n #[cfg(not(mac))]\n\n \"sublime-text-3\",\n\n \"Packages\",\n\n \"User\",\n\n \"Preferences.sublime-settings\",\n\n ]\n\n .iter()\n\n .collect()\n\n}\n\npub struct SublimeText;\n\n\n\nimpl Themeable for SublimeText {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n let config_state = ConfigState::with_default_config(\n\n config.sublime_text.as_ref().map(|c| c.inner.as_ref()),\n", "file_path": "thcon/src/app/sublime_text.rs", "rank": 22, "score": 38993.81474394996 }, { "content": "#[cfg(windows)]\n\nfn alacritty_config() -> Option<PathBuf> {\n\n dirs::config_dir()\n\n .map(|path| path.join(\"alacritty\\\\alacritty.yml\"))\n\n .filter(|new| new.exists())\n\n}\n", "file_path": "thcon/src/app/alacritty.rs", "rank": 23, "score": 37822.39008690841 }, { "content": "/// Switches settings and colorscheme in a `vim`-agnostic way.\n\n/// Returns unit result if successful, otherwise the causing error.\n\nfn anyvim_switch<V: ControlledVim>(\n\n config: &ThconConfig,\n\n config_state: ConfigState,\n\n operation: &Operation,\n\n) -> Result<()> {\n\n let config = match config_state {\n\n ConfigState::NoDefault => {\n\n return Err(ConfigError::RequiresManualConfig(V::SECTION_NAME).into())\n\n }\n\n ConfigState::Default => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n\n ConfigState::Enabled => V::extract_config(config)\n\n .as_ref()\n\n .unwrap()\n\n .unwrap_inner_left(),\n\n };\n\n\n\n let payload = match operation {\n\n Operation::Darken => &config.dark,\n\n Operation::Lighten => &config.light,\n", "file_path": "thcon/src/app/vim.rs", "rank": 24, "score": 37822.05832420655 }, { "content": "use anyhow::Result;\n\nuse either::Either;\n\nuse thiserror::Error;\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse thcon_trait::{Disableable, Disabled};\n\n\n", "file_path": "thcon/src/themeable.rs", "rank": 25, "score": 33147.57529604087 }, { "content": " pub fn with_manual_config<T>(section: Option<Either<&T, &Disabled>>) -> Self\n\n where\n\n T: Disableable,\n\n {\n\n match section.as_ref() {\n\n None => ConfigState::NoDefault,\n\n Some(c) => match c {\n\n Either::Left(t) => match t.disabled() {\n\n true => ConfigState::Disabled,\n\n false => ConfigState::Enabled,\n\n },\n\n Either::Right(d) => match d.disabled() {\n\n true => ConfigState::Disabled,\n\n false => ConfigState::Default,\n\n },\n\n },\n\n }\n\n }\n\n\n\n pub fn with_default_config<T>(section: Option<Either<&T, &Disabled>>) -> Self\n", "file_path": "thcon/src/themeable.rs", "rank": 26, "score": 33147.21244401823 }, { "content": " where\n\n T: Disableable,\n\n {\n\n match section.as_ref() {\n\n None => ConfigState::Default,\n\n Some(c) => match c {\n\n Either::Left(t) => match t.disabled() {\n\n true => ConfigState::Disabled,\n\n false => ConfigState::Enabled,\n\n },\n\n Either::Right(d) => match d.disabled() {\n\n true => ConfigState::Disabled,\n\n false => ConfigState::Default,\n\n },\n\n },\n\n }\n\n }\n\n}\n", "file_path": "thcon/src/themeable.rs", "rank": 27, "score": 33140.19146203115 }, { "content": "# Konsole\n\n\n\n## Usage: Linux & BSD\n\nKonsole instances can be discovered and controlled via DBus, but it's a cumbersome process to perform in a one-liner. `thcon` simplifies that - just list the name of the Konsole profiles you prefer in light mode and in dark mode in your `thcon.toml`, e.g.:\n\n\n\n```toml\n\n[plasma]\n\ndark = \"Some dark profile\"\n\nlight = \"A light profile\"\n\n```\n\n\n\n## Usage: Windows & macOS\n\nKonsole is not available on Windows or macOS.\n\n\n\n## Config Schema\n\nSection: `konsole`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The name of the profile (case-sensitive) to use in dark mode | (none) |\n\n| `light` | string | The name of the profile (case-sensitive) to use in light mode | (none) |\n", "file_path": "docs-site/docs/app/konsole.md", "rank": 43, "score": 22509.633844417953 }, { "content": " light: String,\n\n dark: String,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\npub struct GnomeTerminal {\n\n dbus: Connection,\n\n}\n\n\n\nimpl Default for GnomeTerminal {\n\n fn default() -> Self {\n\n Self {\n\n dbus: Connection::new_session().unwrap(),\n\n }\n\n }\n\n}\n\n\n\nimpl GnomeTerminal {\n\n fn get_window_ids(&self) -> Result<Vec<String>> {\n", "file_path": "thcon/src/app/gnome_terminal.rs", "rank": 44, "score": 37.732057201816076 }, { "content": "use serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n light: String,\n\n dark: String,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\nimpl Default for _Config {\n\n fn default() -> Self {\n\n Self {\n\n light: \"Adwaita\".to_string(),\n\n dark: \"Adwaita-dark\".to_string(),\n\n disabled: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "thcon/src/app/gtk.rs", "rank": 45, "score": 36.820431596625575 }, { "content": "//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | ------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The name of the theme (case-sensitive) to apply in dark mode | (none) |\n\n//! | `light` | string | The name of the theme (case-sensitive) to apply in light mode | (none) |\n\n//!\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigError, ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse anyhow::{Context, Result};\n\nuse gio::SettingsExt;\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n", "file_path": "thcon/src/app/gnome_shell.rs", "rank": 46, "score": 36.47141615553894 }, { "content": "pub struct Atom {}\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n dark: Vec<String>,\n\n light: Vec<String>,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\nimpl Default for _Config {\n\n fn default() -> Self {\n\n Self {\n\n dark: vec![\"one-dark-ui\", \"one-dark-syntax\"]\n\n .iter()\n\n .map(|s| s.to_string())\n\n .collect(),\n\n light: vec![\"one-light-ui\", \"one-light-syntax\"]\n\n .iter()\n\n .map(|s| s.to_string())\n", "file_path": "thcon/src/app/atom.rs", "rank": 47, "score": 35.008740705101935 }, { "content": "//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The name of the profile to use in dark mode | Pro |\n\n//! | `light` | string | The name of the profile to use in light mode | Basic |\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse std::process::Command;\n\n\n\nuse anyhow::anyhow;\n\nuse anyhow::{Context, Result};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n", "file_path": "thcon/src/app/terminal_dot_app.rs", "rank": 48, "score": 34.86056650645347 }, { "content": "//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The name of the profile to use in dark mode | (none) |\n\n//! | `light` | string | The name of the profile to use in light mode | (none) |\n\n\n\nuse std::io::Write;\n\nuse std::os::unix::net::UnixStream;\n\n\n\nuse anyhow::{Context, Result};\n\nuse log::trace;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::sockets;\n\nuse crate::themeable::{ConfigError, ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n", "file_path": "thcon/src/app/iterm2.rs", "rank": 49, "score": 34.302783355940264 }, { "content": "#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n light: String,\n\n dark: String,\n\n config: Option<String>,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\nimpl Default for _Config {\n\n fn default() -> Self {\n\n Self {\n\n light: \"Default Light+\".to_string(),\n\n dark: \"Default Dark+\".to_string(),\n\n config: None,\n\n disabled: false,\n\n }\n\n }\n\n}\n\n\n", "file_path": "thcon/src/app/vscode.rs", "rank": 50, "score": 34.2557942169758 }, { "content": "pub struct _Config {\n\n light: String,\n\n dark: String,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\nimpl Default for _Config {\n\n fn default() -> Self {\n\n Self {\n\n light: \"Basic\".to_string(),\n\n dark: \"Pro\".to_string(),\n\n disabled: false,\n\n }\n\n }\n\n}\n\n\n\npub struct TerminalDotApp;\n\n\n\nimpl Themeable for TerminalDotApp {\n", "file_path": "thcon/src/app/terminal_dot_app.rs", "rank": 51, "score": 34.23085029911448 }, { "content": "//! Section: `plasma`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The theme package name to use in dark mode | `org.kde.breezedark.desktop` |\n\n//! | `light` | string | The theme package name to use in light mode | `org.kde.breeze.desktop` |\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\nuse crate::{themeable::ConfigState, Themeable};\n\n\n\nuse std::process::{Command, Stdio};\n\n\n\nuse anyhow::anyhow;\n\nuse anyhow::{Context, Result};\n\nuse serde::Deserialize;\n\n\n", "file_path": "thcon/src/app/plasma.rs", "rank": 52, "score": 33.4495809607924 }, { "content": "#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n light: String,\n\n dark: String,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\nimpl Default for _Config {\n\n fn default() -> Self {\n\n Self {\n\n light: \"org.kde.breeze.desktop\".to_string(),\n\n dark: \"org.kde.breezedark.desktop\".to_string(),\n\n disabled: false,\n\n }\n\n }\n\n}\n\n\n\npub struct Plasma;\n\n\n", "file_path": "thcon/src/app/plasma.rs", "rank": 53, "score": 33.06653903013912 }, { "content": "//! | `light.theme` | string | The `theme` to use in dark mode | `Default.sublime-theme` |\n\n//! | `preferences` | string | Absolute path to your `Preferences.sublime-settings` file | Default Sublime Text 3 locations: <ul><li>Linux/BSD: `~/.config/sublime-text-3/Packages/User/Preferences.sublime-settings`</li><li>macOS: `~/Library/Application Support/Sublime Text 3/Packages/User/Preferences.sublime-settings`</li></ul> |\n\n\n\nuse std::fs::{self, OpenOptions};\n\nuse std::path::PathBuf;\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse anyhow::Result;\n\nuse log::{debug, warn};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::ser::{PrettyFormatter, Serializer};\n\nuse serde_json::Value as JsonValue;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n light: ConfigSection,\n\n dark: ConfigSection,\n\n #[serde(rename = \"preferences\")]\n\n preferences_file: Option<String>,\n\n #[serde(default = \"is_disabled\")]\n\n disabled: bool,\n\n}\n\n\n", "file_path": "thcon/src/app/sublime_text.rs", "rank": 54, "score": 31.80026294409619 }, { "content": "//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The theme to use in dark mode | Default Dark+ |\n\n//! | `light` | string | The theme to use in light mode | Default Light+ |\n\n//! | `config` | string | Absolute path to your `settings.json` file | `~/.config/Code/User/settings.json` |\n\n\n\nuse std::path::PathBuf;\n\nuse std::{fs, io};\n\n\n\nuse anyhow::{Context, Result};\n\nuse log::debug;\n\nuse regex::{Captures, Regex};\n\nuse serde::Deserialize;\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n", "file_path": "thcon/src/app/vscode.rs", "rank": 55, "score": 31.71180284525739 }, { "content": "//! Currently unsupported.\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `gtk`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | ------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The name of the theme (case-sensitive) to apply in dark mode | `Adwaita-dark` |\n\n//! | `light` | string | The name of the theme (case-sensitive) to apply in light mode | `Adwaita` |\n\n//!\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse anyhow::{Context, Result};\n\nuse gio::SettingsExt;\n", "file_path": "thcon/src/app/gtk.rs", "rank": 56, "score": 31.583774219903248 }, { "content": "//! | `dark` | array of strings | The themes to apply in dark mode, as shown in `config.cson` | `[\"one-dark-ui\", \"one-dark-syntax\"]` |\n\n//! | `light` | array of strings | The themes to apply in dark mode, as shown in `config.cson` | `[\"one-light-ui\", \"one-light-syntax\"]` |\n\n\n\nuse std::fs;\n\nuse std::io;\n\nuse std::io::Write;\n\nuse std::os::unix::net::UnixStream;\n\n\n\nuse anyhow::{Context, Result};\n\nuse log::trace;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::operation::Operation;\n\nuse crate::sockets;\n\nuse crate::themeable::Themeable;\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\nuse crate::{config::Config as ThconConfig, themeable::ConfigState};\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "thcon/src/app/atom.rs", "rank": 57, "score": 30.75044119853113 }, { "content": "//! 4. `$HOME/.alacritty.yml`\n\n\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigError, ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse anyhow::{Context, Result};\n\nuse log::{debug, error};\n\nuse regex::{Captures, Regex};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n light: String,\n\n dark: String,\n", "file_path": "thcon/src/app/alacritty.rs", "rank": 58, "score": 30.707112633706494 }, { "content": "\n\nuse anyhow::anyhow;\n\nuse anyhow::{Context, Result};\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\npub struct MacOS;\n\n\n\nimpl Themeable for MacOS {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_default_config(config.macos.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n match self.config_state(config) {\n", "file_path": "thcon/src/app/macos.rs", "rank": 59, "score": 26.92088353094223 }, { "content": "#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n\n dark: ConfigSection,\n\n light: ConfigSection,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\n#[derive(Debug, Default, Serialize, Deserialize)]\n\npub struct ConfigSection {\n\n colorscheme: Option<String>,\n\n r#let: Option<Map<String, JsonValue>>,\n\n set: Option<Map<String, JsonValue>>,\n\n setglobal: Option<Map<String, JsonValue>>,\n\n}\n\n\n\nimpl ConfigSection {\n\n /// Renders a `ConfigSection` instance as a valid `vimrc` file, using vim-standard syntax.\n\n /// This works mostly because single-line JSON representations of non-booleans seem to be valid\n\n /// vimscript.\n", "file_path": "thcon/src/app/vim.rs", "rank": 60, "score": 26.699458397178446 }, { "content": " light: String,\n\n dark: String,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\npub struct GnomeShell {}\n\n\n\nimpl Themeable for GnomeShell {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_manual_config(config.gnome_shell.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let config = match self.config_state(config) {\n\n ConfigState::NoDefault => {\n\n return Err(ConfigError::RequiresManualConfig(\"gnome_shell\").into())\n\n }\n\n ConfigState::Default => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n", "file_path": "thcon/src/app/gnome_shell.rs", "rank": 61, "score": 26.0372044211359 }, { "content": "pub struct _Config {\n\n dark: String,\n\n light: String,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct WireConfig {\n\n profile: String,\n\n}\n\n\n\npub struct Iterm2;\n\nimpl Themeable for Iterm2 {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_manual_config(config.iterm2.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let config = match self.config_state(config) {\n", "file_path": "thcon/src/app/iterm2.rs", "rank": 62, "score": 25.26069592158152 }, { "content": "use crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigError, ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse std::time::Duration;\n\nuse std::vec::Vec;\n\n\n\nuse anyhow::{Context, Result};\n\nuse dbus::arg::Dict;\n\nuse dbus::arg::Variant;\n\nuse dbus::blocking::Connection;\n\nuse gio::SettingsExt;\n\nuse log::debug;\n\nuse serde::Deserialize;\n\nuse xml::reader::{EventReader, XmlEvent};\n\n\n\n#[derive(Debug, Deserialize, Disableable, AppConfig)]\n\npub struct _Config {\n", "file_path": "thcon/src/app/gnome_terminal.rs", "rank": 63, "score": 24.761798592786118 }, { "content": "//! | dark.let | table | Set of key/value pairs to apply with `:let` in dark mode | (none) |\n\n//!\n\n\n\nuse std::fs;\n\nuse std::io;\n\nuse std::io::prelude::*;\n\nuse std::path::PathBuf;\n\n\n\nuse anyhow::{Context, Result};\n\nuse log::{debug, trace};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::{Map, Value as JsonValue};\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::sockets;\n\nuse crate::themeable::{ConfigError, ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n", "file_path": "thcon/src/app/vim.rs", "rank": 64, "score": 23.958390480165747 }, { "content": " config: Option<String>,\n\n #[serde(default)]\n\n disabled: bool,\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Alacritty {}\n\n\n\nimpl Themeable for Alacritty {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_manual_config(config.alacritty.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let config = match self.config_state(config) {\n\n ConfigState::NoDefault => {\n\n return Err(ConfigError::RequiresManualConfig(\"alacritty\").into())\n\n }\n\n ConfigState::Default => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n", "file_path": "thcon/src/app/alacritty.rs", "rank": 65, "score": 23.443808787168877 }, { "content": "//! Switches between Light and Dark [appearances](https://support.apple.com/en-us/HT208976) in macOS.\n\n//!\n\n//! ## Usage\n\n//! There's no configuration required! `thcon dark` will enable dark mode on macOS, and\n\n//! `thcon light` will disable it, but this behavior can be disabled with `disabled = true`.\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `macos`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n\n\nuse crate::config::Config as ThconConfig;\n\nuse crate::operation::Operation;\n\nuse crate::themeable::{ConfigState, Themeable};\n\nuse crate::AppConfig;\n\nuse crate::Disableable;\n\n\n\nuse std::process::Command;\n", "file_path": "thcon/src/app/macos.rs", "rank": 66, "score": 23.236824012134363 }, { "content": "# KDE Plasma\n\n\n\n::: tip NO CONFIGURATION REQUIRED\n\n`thcon` can switch between the default KDE Plasma light and dark themes immediately after installation with no\n\nconfiguration.\n\n:::\n\n\n\n## Usage: Linux & BSD\n\nKDE Plasma already ships with a commandline tool to switch global UI themes: `lookandfeeltool`. `thcon` simply shells out to that command, so configuring `thcon` requires a brief interaction with it if you don't like the default themes.\n\n\n\nRun `lookandfeeltool --list` to show all available theme packages. Choose the theme packages you want for light and dark mode, then list those in your `thcon.toml`, e.g.:\n\n\n\n```toml\n\n[plasma]\n\ndark = \"org.kde.breezedark.desktop\" # the default dark theme\n\nlight = \"org.kde.breeze.desktop\" # the default light theme\n\n```\n\n\n\n## Usage: Windows & macOS\n\nKDE Plasma is not supported on these platforms.\n\n\n\n## Config Schema\n\nSection: `plasma`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The theme package name to use in dark mode | `org.kde.breezedark.desktop` |\n\n| `light` | string | The theme package name to use in light mode | `org.kde.breeze.desktop` |\n", "file_path": "docs-site/docs/app/plasma.md", "rank": 67, "score": 22.400994475645582 }, { "content": "# Terminal.app\n\n\n\n## Usage\n\n[Terminal.app](https://support.apple.com/guide/terminal/welcome/mac)'s default \"Basic\" profile has is aware of the macOS dark mode setting, and will react accordingly. No other themes appear to behave that way, however. For manually-added (or imported) profiles, simply list the names of the desired light mode and dark mode profiles in your `thcon.toml`.\n\n\n\n```toml\n\n[terminal-app]\n\ndark = \"Pro\"\n\nlight = \"Silver Aerogel\"\n\n```\n\n\n\n## Config Schema\n\nSection: `terminal-app`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The name of the profile to use in dark mode | Pro |\n\n| `light` | string | The name of the profile to use in light mode | Basic |\n", "file_path": "docs-site/docs/app/terminal.app.md", "rank": 68, "score": 21.85894071318938 }, { "content": " .collect(),\n\n disabled: false,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Serialize)]\n\npub struct WireConfig {\n\n #[serde(rename = \"core.themes\")]\n\n themes: Vec<String>,\n\n}\n\n\n\nimpl Themeable for Atom {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_default_config(config.atom.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let default_config = _Config::default();\n\n\n", "file_path": "thcon/src/app/atom.rs", "rank": 69, "score": 21.1973826714222 }, { "content": "# iTerm2\n\n\n\n::: warning PLUGIN REQUIRED\n\n[iTerm2](https://iterm2.com) includes light and dark colorschemes by default, but requires [a plugin](https://github.com/theme-controller/thcon-iterm2) and manually-created profiles to interact with `thcon`.\n\n:::\n\n\n\n## Usage\n\n1. Install [thcon-iterm2](https://github.com/theme-controller/thcon-iterm2) by downloading its source and running `make install`:\n\n\n\n```sh:no-line-numbers\n\ngit clone https://github.com/theme-controller/thcon-iterm2.git\n\ncd thcon-iterm2\n\nmake install\n\n```\n\n\n\n2. If you haven't already, create an iTerm2 profile for light mode and another for dark mode via Preferences > Profiles.\n\n\n\n3. In your `thcon.toml`, list the name of the profiles to use in dark mode and light mode:\n\n\n\n```toml\n\n[iterm2]\n\ndark = \"dark and brooding\"\n\nlight = \"light and jovial\"\n\n```\n\n\n\n## Config Schema\n\nSection: `iterm2`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The name of the profile to use in dark mode | (none) |\n\n| `light` | string | The name of the profile to use in light mode | (none) |\n", "file_path": "docs-site/docs/app/iterm2.md", "rank": 70, "score": 20.962894058280316 }, { "content": "## Using `thcon` for Older Versions\n\nIf you're stuck on an older version of VSCode --- or want more granular control over VSCode themes than synchronizing to the OS would provide --- `thcon` can still help!\n\n\n\n\n\nVisual Studio Code monitors its `settings.json` file for changes while it's running. Because that `settings.json` file can include comments, the simplest way to preserve existing whitespace and comments is by looking for a magic comment annotating the `workbench.colorTheme` setting.\n\n\n\nIn your `settings.json`, mark the `workspace.colorTheme` line so `thcon` can find it, and be sure to disable `window.autoDetectColorScheme`:\n\n\n\n```json\n\n{\n\n // ... other settings\n\n\n\n \"window.autoDetectColorScheme\": false,\n\n \"workbench.colorTheme\": \"\" // thcon:replace-line\n\n}\n\n```\n\n\n\nIn your `thcon.toml`, define light and dark themes:\n\n\n\n```toml\n\n[vscode]\n\ndark = \"Solarized Dark\"\n\nlight = \"Solarized Light\"\n\n\n\n# optionally, tell thcon where your settings.json is stored\n\nconfig = \"/path/to/settings.json\"\n\n```\n\n\n\n### Config Schema\n\nSection: `vscode`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The theme to use in dark mode | Default Dark+ |\n\n| `light` | string | The theme to use in light mode | Default Light+ |\n\n| `config` | string | Absolute path to your `settings.json` file | `~/.config/Code/User/settings.json` |\n", "file_path": "docs-site/docs/app/vscode.md", "rank": 71, "score": 20.478653224449516 }, { "content": "//! # thcon\n\n//! Switches multiple apps between light and dark mode\n\n\n\n#![deny(clippy::all)]\n\n\n\nmod config;\n\nmod operation;\n\nmod themeable;\n\n\n\npub mod app;\n\npub mod dirs;\n\npub mod sockets;\n\npub use config::Config;\n\npub use operation::Operation;\n\npub use thcon_macro::AppConfig;\n\npub use thcon_macro::Disableable;\n\npub use thcon_trait::Disableable;\n\npub use themeable::{ConfigState, Themeable};\n\n\n\nmod switch;\n\npub use switch::switch;\n", "file_path": "thcon/src/lib.rs", "rank": 72, "score": 20.110536613241766 }, { "content": "---\n\ntitle: Alacritty\n\n---\n\n\n\n# Alacritty\n\n\n\n::: warning MANUAL CONFIGURATION\n\n[Alacritty](https://github.com/alacritty/alacritty) requires manual setup to be controlled by `thcon`.\n\n:::\n\n\n\nSince alacritty is configured via [yaml](https://yaml.org/), using anchors and aliases is the simplest way of managing color schemes.\n\n\n\n## Usage\n\nIn your `alacritty.yml`, define your colors\n\n\n\n```yaml\n\n# define your color themes:\n\n\n\nsolarized: &solarized_dark\n\n # ^^^^^^^^^^^^^^ - use this name in thcon.toml\n\n primary:\n\n background: '0x002b36'\n\n foreground: '0x839496'\n\n # ... the normal contents of a `colors` object\n\n\n\nlight_solarized: &solarized_light:\n\n # ^^^^^^^^^^^^^^^ - use this name in thcon.toml\n\n primary:\n\n background: '0xfdf6e3'\n\n foreground: '0x586e75'\n\n\n\n# then choose your color scheme one last time:\n\ncolors: *solarized_light # thcon:replace-line\n\n\n\n# thcon will manage the line ending in `thcon:replace-line`\n\n# to swap alacritty color schemes\n\n```\n\n\n\nIn your `thcon.toml`, define light and dark themes based on the `&anchor`s defined above:\n\n\n\n```toml\n\n[alacritty]\n\ndark = \"solarized_dark\"\n\nlight = \"solarized_light\"\n\n\n\n# optionally, tell thcon where your alacritty config is stored\n\nconfig = \"/path/to/alacritty.yml\"\n\n```\n\n\n\n## Config Schema\n\nSection: `alacritty`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The YAML anchor (declared in `alacritty.yml`) used for dark mode | (none) |\n\n| `light` | string | The YAML anchor (declared in `alacritty.yml`) used for light mode | (none) |\n\n| `config` | string | Absolute path to your `alacritty.yml` file | (see below) |\n\n\n\n### Default value for `config`\n\nThcon checks all default locations that `alacritty` [defines for alacritty.yml](https://github.com/alacritty/alacritty#configuration):\n\n\n\n* Windows: `%APPDATA%\\alacritty\\alacritty.yml`\n\n* Other platforms:\n\n 1. `$XDG_CONFIG_HOME/alacritty/alacritty.yml`\n\n 2. `$XDG_CONFIG_HOME/alacritty.yml`\n\n 3. `$HOME/.config/alacritty/alacritty.yml`\n\n 4. `$HOME/.alacritty.yml`\n", "file_path": "docs-site/docs/app/alacritty.md", "rank": 73, "score": 20.006580418983738 }, { "content": "## Config Schema\n\nSection: `sublime-text`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `true` |\n\n| `light` | table | Settings to apply in light mode | |\n\n| `light.color_scheme` | string | The `color_scheme` to use in light mode | `Packages/Color Scheme - Default/Celeste.sublime-color-scheme` |\n\n| `light.theme` | string | The `theme` to use in light mode | `Adaptive.sublime-theme` |\n\n| `dark` | table | Settings to apply in dark mode | |\n\n| `light.color_scheme` | string | The `color_scheme` to use in dark mode | `Packages/Color Scheme - Default/Monokai.sublime-color-scheme` |\n\n| `light.theme` | string | The `theme` to use in dark mode | `Default.sublime-theme` |\n\n| `preferences` | string | Absolute path to your `Preferences.sublime-settings` file | Default Sublime Text 3 locations: <ul><li>Linux/BSD: `~/.config/sublime-text-3/Packages/User/Preferences.sublime-settings`</li><li>macOS: `~/Library/Application Support/Sublime Text 3/Packages/User/Preferences.sublime-settings`</li></ul> |\n", "file_path": "docs-site/docs/app/sublime-text-3.md", "rank": 74, "score": 19.749384575644708 }, { "content": "//! [sublime-text.dark]\n\n//! color_scheme = \"Packages/Color Scheme - Default/Monokai.sublime-color-scheme\"\n\n//! theme = \"Default.sublime-theme\"\n\n//!\n\n//! [sublime-text.light]\n\n//! color_scheme = \"Packages/Color Scheme - Default/Celeste.sublime-color-scheme\"\n\n//! theme = \"Adaptive.sublime-theme\"\n\n//! ```\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `sublime-text`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `true` |\n\n//! | `light` | table | Settings to apply in light mode | |\n\n//! | `light.color_scheme` | string | The `color_scheme` to use in light mode | `Packages/Color Scheme - Default/Celeste.sublime-color-scheme` |\n\n//! | `light.theme` | string | The `theme` to use in light mode | `Adaptive.sublime-theme` |\n\n//! | `dark` | table | Settings to apply in dark mode | |\n\n//! | `light.color_scheme` | string | The `color_scheme` to use in dark mode | `Packages/Color Scheme - Default/Monokai.sublime-color-scheme` |\n", "file_path": "thcon/src/app/sublime_text.rs", "rank": 75, "score": 19.684187255218816 }, { "content": "//! ```\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `alacritty`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | `dark` | string | The YAML anchor (declared in `alacritty.yml`) used for dark mode | (none) |\n\n//! | `light` | string | The YAML anchor (declared in `alacritty.yml`) used for light mode | (none) |\n\n//! | `config` | string | Absolute path to your `alacritty.yml` file | (see below) |\n\n//!\n\n//! ### Default value for `config`\n\n//! Thcon checks all default locations that `alacritty` [defines for alacritty.yml](https://github.com/alacritty/alacritty#configuration):\n\n//!\n\n//! * Windows: `%APPDATA%\\alacritty\\alacritty.yml`\n\n//! * Other platforms:\n\n//! 1. `$XDG_CONFIG_HOME/alacritty/alacritty.yml`\n\n//! 2. `$XDG_CONFIG_HOME/alacritty.yml`\n\n//! 3. `$HOME/.config/alacritty/alacritty.yml`\n", "file_path": "thcon/src/app/alacritty.rs", "rank": 76, "score": 19.54635271091874 }, { "content": "//! Switches between [iTerm2](https://iterm2.com) profiles in all windows, tabs, and sessions.\n\n//!\n\n//! ## Usage: macOS\n\n//! Install [thcon-iterm2](https://github.com/sjbarag/thcon-iterm2) by downloading its source and\n\n//! running `make install`. In your `thcon.toml`, list the name of the profiles to use in dark\n\n//! mode and light mode:\n\n//!\n\n//! ```toml\n\n//! [iterm2]\n\n//! dark = \"dark and brooding\"\n\n//! light = \"light and jovial\"\n\n//! ```\n\n//!\n\n//! ## Usage: Windows, Linux & BSD\n\n//! iTerm2 is only available on macOS, so this module is only usable on macOS as a result.\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `iterm2`\n\n//!\n\n//! | Key | Type | Description | Default |\n", "file_path": "thcon/src/app/iterm2.rs", "rank": 77, "score": 19.489023493739168 }, { "content": "pub struct Gtk {}\n\n\n\nimpl Themeable for Gtk {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_default_config(config.gtk.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let default_config = _Config::default();\n\n\n\n let config = match self.config_state(config) {\n\n ConfigState::NoDefault => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n\n ConfigState::Default => &default_config,\n\n ConfigState::Enabled => config.gtk.as_ref().unwrap().unwrap_inner_left(),\n\n };\n\n\n\n let theme = match operation {\n\n Operation::Darken => &config.dark,\n\n Operation::Lighten => &config.light,\n", "file_path": "thcon/src/app/gtk.rs", "rank": 78, "score": 19.276747369252682 }, { "content": "# GNOME Terminal\n\n\n\n## Usage\n\nGNOME Terminal instances can be discovered and controlled via DBus, but it's a cumbersome process to perform in a one-liner. `thcon` simplifies that - just list the IDs of the GNOME Terminal profiles you prefer in light mode and in dark mode in your `thcon.toml`, e.g.:\n\n\n\n```toml\n\n[gnome-terminal]\n\ndark = \"f25b5812-61d9-4469-8009-142721cfd35c\"\n\nlight = \"7f6e0978-dee1-48e8-8212-50dc1810a720\"\n\n```\n\n\n\nProfile IDs are easily copied from the bottom corner of the GNOME Terminal Preferences window:\n\n\n\n![gnome terminal preferences](./gnome-terminal-profile.png)\n\n\n\n## Config Schema\n\nSection: `gnome-terminal`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The ID of the profile (case-sensitive) to use in dark mode | (none) |\n\n| `light` | string | The ID of the profile (case-sensitive) to use in light mode | (none) |\n\n\n", "file_path": "docs-site/docs/app/gnome-terminal.md", "rank": 79, "score": 18.938560466639125 }, { "content": " ConfigState::NoDefault => {\n\n return Err(ConfigError::RequiresManualConfig(\"iterm2\").into())\n\n }\n\n ConfigState::Default => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n\n ConfigState::Enabled => config.iterm2.as_ref().unwrap().unwrap_inner_left(),\n\n };\n\n\n\n let profile_name = match operation {\n\n Operation::Darken => &config.dark,\n\n Operation::Lighten => &config.light,\n\n };\n\n let wire_format = WireConfig {\n\n profile: profile_name.to_string(),\n\n };\n\n let payload = serde_json::to_vec(&wire_format).unwrap_or_default();\n\n\n\n let addr = sockets::socket_addr(\"iterm2\", false);\n\n if let Ok(mut stream) = UnixStream::connect(&addr) {\n\n trace!(\"Writing to socket at {}\", &addr.display());\n\n stream\n\n .write_all(&payload)\n\n .with_context(|| format!(\"Unable to write to scoket at {}\", addr.display()))?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "thcon/src/app/iterm2.rs", "rank": 80, "score": 18.90909364714039 }, { "content": "# GNOME Shell\n\n\n\n::: warning GNOME EXTENSION REQUIRED\n\nGNOME Shell user themes require the [User Themes extension](https://extensions.gnome.org/extension/19/user-themes/) to be enabled.\n\n:::\n\n\n\n## Usage: Linux & BSD\n\nWith the [User Themes extension](https://extensions.gnome.org/extension/19/user-themes/) installed and enabled, simply provide the name of the theme as displayed in the User Themes extension config (either via GNOME Extensions or GNOME Tweaks), e.g.:\n\n\n\n```toml\n\n[gnome-shell]\n\nlight = \"Arc\"\n\ndark = \"Arc-Dark-solid\"\n\n```\n\n\n\n## Usage: Windows & macOS\n\nCurrently unsupported.\n\n\n\n## Config Schema\n\nSection: `gnome-shell`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | ------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The name of the theme (case-sensitive) to apply in dark mode | (none) |\n\n| `light` | string | The name of the theme (case-sensitive) to apply in light mode | (none) |\n", "file_path": "docs-site/docs/app/gnome-shell.md", "rank": 81, "score": 18.811772565791042 }, { "content": "# GTK\n\n\n\n## Usage: Linux & BSD\n\nThe active GTK application theme is managed by [dconf](https://developer.gnome.org/dconf/unstable/dconf-overview.html), and is typically accessed as a user via something like [Gnome Tweaks](https://wiki.gnome.org/Apps/Tweaks) or [KDE GTK Configurator](https://invent.kde.org/plasma/kde-gtk-config). `thcon` can manage GTK theme switching quite simply by reading the desired theme names from `thcon.toml`, e.g.:\n\n\n\n```toml\n\n[gtk]\n\ndark = HighContrastInverse\n\nlight = HighContrast\n\n```\n\nThe value should be the name of the desired theme as reported in Gnome Tweaks, or its filename in `/usr/themes/`, `/usr/local/themes/` or `~/.themes/` if you don't have Gnome Tweaks installed. Invalid values default to `Adwaita` (light mode).\n\n\n\n## Usage: Windows & macOS\n\nCurrently unsupported.\n\n\n\n## Config Schema\n\nSection: `gtk`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | ------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | string | The name of the theme (case-sensitive) to apply in dark mode | `Adwaita-dark` |\n\n| `light` | string | The name of the theme (case-sensitive) to apply in light mode | `Adwaita` |\n\n\n", "file_path": "docs-site/docs/app/gtk.md", "rank": 82, "score": 18.643523308642376 }, { "content": "## `thcon.toml` Schema\n\nSection: `vim` or `nvim`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `light` | table | Settings to apply in light mode | (none) |\n\n| `light.colorscheme` | string | The colorscheme to apply in light mode | (none) |\n\n| `light.set` | table | Set of key/value pairs to apply with `:set` in light mode | (none) |\n\n| `light.setglobal` | table | Set of key/value pairs to apply with `:setglobal` in light mode | (none) |\n\n| `light.let` | table | Set of key/value pairs to apply with `:let` in light mode | (none) |\n\n| `dark` | table | Settings to apply in dark mode | (none) |\n\n| `dark.colorscheme` | string | The colorscheme to apply in dark mode | (none) |\n\n| `dark.set` | table | Set of key/value pairs to apply with `:set` in dark mode | (none) |\n\n| `dark.setglobal` | table | Set of key/value pairs to apply with `:setglobal` in dark mode | (none) |\n\n| `dark.let` | table | Set of key/value pairs to apply with `:let` in dark mode | (none) |\n\n\n", "file_path": "docs-site/docs/app/vim.md", "rank": 83, "score": 18.260164446980813 }, { "content": "impl Themeable for Plasma {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_default_config(config.plasma.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let default_config = _Config::default();\n\n\n\n let config = match self.config_state(config) {\n\n ConfigState::NoDefault => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n\n ConfigState::Default => &default_config,\n\n ConfigState::Enabled => config.plasma.as_ref().unwrap().unwrap_inner_left(),\n\n };\n\n\n\n let theme = match operation {\n\n Operation::Lighten => &config.light,\n\n Operation::Darken => &config.dark,\n\n };\n\n\n", "file_path": "thcon/src/app/plasma.rs", "rank": 84, "score": 17.088676694563603 }, { "content": " let proxy = self.dbus.with_proxy(\n\n \"org.gnome.Terminal\",\n\n \"/org/gnome/Terminal/window\",\n\n Duration::from_millis(2500),\n\n );\n\n let (xml,): (String,) = proxy\n\n .method_call(\"org.freedesktop.DBus.Introspectable\", \"Introspect\", ())\n\n .context(\"Unable to retrieve gnome-terminal windows from DBus\")?;\n\n\n\n let parser = EventReader::from_str(&xml);\n\n let mut depth = 0;\n\n\n\n let mut window_ids: Vec<String> = vec![];\n\n\n\n for e in parser {\n\n match e {\n\n Ok(XmlEvent::StartElement {\n\n name, attributes, ..\n\n }) => {\n\n if depth == 1 && name.local_name == \"node\" {\n", "file_path": "thcon/src/app/gnome_terminal.rs", "rank": 85, "score": 17.00710076296311 }, { "content": "---\n\ntitle: Vim & Neovim\n\n---\n\n\n\n# Vim & Neovim\n\n\n\n::: warning PLUGIN REQUIRED\n\nMost Vim and Neovim distributions include a light and dark theme by default, but all distributions require [a plugin](https://github.com/theme-controller/thcon-vim) to interact with `thcon`.\n\n:::\n\n\n\n## Windows\n\nWindows is not yet supported by `thon`, but `vim`/`nvim` under WSL should work just fine.\n\n\n\n## macOS & Linux\n\nInstall [thcon.vim](https://github.com/theme-controller/thcon.vim) via your `.vimrc` or `init.vim`\n\nusing your preferred plugin manager ([dein.vim](https://github.com/Shougo/dein.vim) and [vim-plug](https://github.com/junegunn/vim-plug) are popular options). Lower in your init script, add these lines to load and detect changes from `thcon`:\n\n\n\n```vim\n\ncall thcon#load() \" load previously-applied settings as defaults\n\ncall thcon#listen() \" listen for new settings to be applied with `thcon`\n\n```\n\n\n\nIn your `thcon.toml`, define light and dark themes. All values within 'dark' and 'light' are\n\noptional (blank values cause no changes):\n\n\n\n```toml\n\n[vim]\n\nlight.colorscheme = \"shine\"\n\ndark.colorscheme = \"blue\"\n\n```\n\n\n\nor:\n\n\n\n```toml\n\n[vim.light]\n\ncolorscheme = \"shine\"\n\n\n\n[vim.dark]\n\ncolorscheme = \"blue\"\n\n```\n\n\n\nor:\n\n\n\n```toml\n\n[neovim]\n\ndark.colorscheme = \"default\"\n\ndark.set.background = \"dark\"\n\ndark.let.\"g:lightline\" = { colorscheme = \"ayu_dark\" }\n\nlight.colorscheme = \"shine\"\n\nlight.set.background = \"light\"\n\nlight.let.\"g:lightline\" = { colorscheme = \"ayu_light\" }\n\n```\n\n\n\nFeel free to use whichever syntax you prefer &mdash; or any other &mdash; as long as it's valid TOML.\n\n\n", "file_path": "docs-site/docs/app/vim.md", "rank": 86, "score": 17.004054896921655 }, { "content": "\n\n#[cfg(test)]\n\nmod test {\n\n use super::SublimeText;\n\n use crate::themeable::Themeable;\n\n use crate::{Config as ThconConfig, ConfigState};\n\n\n\n #[test]\n\n fn disabled_by_default() {\n\n let st = SublimeText {};\n\n let config: ThconConfig = serde_json::from_str(\"{}\").unwrap();\n\n\n\n assert_eq!(st.config_state(&config), ConfigState::Disabled);\n\n }\n\n}\n", "file_path": "thcon/src/app/sublime_text.rs", "rank": 87, "score": 16.92394403617956 }, { "content": "//! light.let.\"g:lightline\" = { colorscheme = \"ayu_light\" }\n\n//! ```\n\n//!\n\n//! Feel free to use whichever syntax you prefer (or any other), as long as it's valid TOML.\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `vim` or `nvim`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n//! | light | table | Settings to apply in light mode | (none) |\n\n//! | light.colorscheme | string | The colorscheme to apply in light mode | (none) |\n\n//! | light.set | table | Set of key/value pairs to apply with `:set` in light mode | (none) |\n\n//! | light.setglobal | table | Set of key/value pairs to apply with `:setglobal` in light mode | (none) |\n\n//! | light.let | table | Set of key/value pairs to apply with `:let` in light mode | (none) |\n\n//! | dark | table | Settings to apply in dark mode | (none) |\n\n//! | dark.colorscheme | string | The colorscheme to apply in dark mode | (none) |\n\n//! | dark.set | table | Set of key/value pairs to apply with `:set` in dark mode | (none) |\n\n//! | dark.setglobal | table | Set of key/value pairs to apply with `:setglobal` in dark mode | (none) |\n", "file_path": "thcon/src/app/vim.rs", "rank": 88, "score": 16.40408630039992 }, { "content": "use crate::app::vscode;\n\nuse serde::Deserialize;\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Config {\n\n #[cfg(dbus)]\n\n pub plasma: Option<plasma::Config>,\n\n #[cfg(dbus)]\n\n pub konsole: Option<konsole::Config>,\n\n #[cfg(dbus)]\n\n #[serde(rename = \"gnome-shell\")]\n\n pub gnome_shell: Option<gnome_shell::Config>,\n\n #[cfg(dbus)]\n\n #[serde(rename = \"gnome-terminal\")]\n\n pub gnome_terminal: Option<gnome_terminal::Config>,\n\n #[cfg(dbus)]\n\n pub gtk: Option<gtk::Config>,\n\n pub vscode: Option<vscode::Config>,\n\n pub alacritty: Option<alacritty::Config>,\n\n pub vim: Option<vim::Config>,\n", "file_path": "thcon/src/config.rs", "rank": 89, "score": 16.176572230682737 }, { "content": "use std::fmt;\n\n\n\n#[derive(Debug)]\n\npub enum Operation {\n\n Darken,\n\n Lighten,\n\n}\n\n\n\nimpl fmt::Display for Operation {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let as_str = match &self {\n\n Self::Darken => \"darken\",\n\n Self::Lighten => \"lighten\",\n\n };\n\n\n\n write!(f, \"{}\", as_str)\n\n }\n\n}\n", "file_path": "thcon/src/operation.rs", "rank": 90, "score": 15.867818985036827 }, { "content": "---\n\ntitle: Atom\n\n---\n\n\n\n# Atom\n\n\n\n::: warning PLUGIN REQUIRED\n\n[Atom](https://atom.io) includes a light and dark theme by default, but requires [a plugin](https://github.com/theme-controller/thcon-atom) to interact with `thcon`.\n\n:::\n\n\n\n## Usage\n\nFirst, ensure you've installed [thcon-atom](https://github.com/theme-controller/thcon-atom):\n\n\n\n```sh:no-line-numbers\n\napm install thcon\n\n```\n\n\n\nIf you like the default light and dark UI and syntax themes, you're done! If you prefer other color schemes, you'll need to add those themes in `thcon.toml`. These can be copy-pasted from the `core.themes` property in your `config.cson`. Simply get Atom looking right in dark mode, copy those themes into `thcon.toml`, then repeat for light mode.\n\n\n\n```toml\n\n[atom]\n\ndark = [ \"one-dark-ui\", \"one-dark-syntax\" ]\n\nlight = [ \"one-light-ui\", \"one-light-syntax\" ]\n\n```\n\n\n\n## Config Schema\n\nSection: `atom`\n\n\n\n| Key | Type | Description | Default |\n\n| --- | ---- | ----------- | -------- |\n\n| `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n\n| `dark` | array of strings | The themes to apply in dark mode, as shown in `config.cson` | `[\"one-dark-ui\", \"one-dark-syntax\"]` |\n\n| `light` | array of strings | The themes to apply in dark mode, as shown in `config.cson` | `[\"one-light-ui\", \"one-light-syntax\"]` |\n", "file_path": "docs-site/docs/app/atom.md", "rank": 91, "score": 15.029031669465859 }, { "content": "#[cfg(mac)]\n\npub use iterm2::Iterm2;\n\n#[cfg(mac)]\n\npub use macos::MacOS;\n\n#[cfg(mac)]\n\npub use terminal_dot_app::TerminalDotApp;\n\n\n\nuse crate::themeable::Themeable;\n\nuse std::option::Option;\n\n\n", "file_path": "thcon/src/app.rs", "rank": 92, "score": 14.990512644658478 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\nAlso I copied this intro verbatim from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\n\n\n\n## [Unreleased]\n\n\n\n## [v0.13.2 - 2021-11-06]\n\n### Changed:\n\n* Sublime Text is now disabled by default, since ST4 is able to sync with system dark-mode\n\n\n\n## [v0.13.1 - 2021-11-05]\n\n### Fixed:\n\n* Systems without a VSCode `settings.json` no longer report errors when switching themes\n\n\n\n## [v0.13.0 - 2021-09-18]\n\n### Added:\n\n* Per-application timing is printed when run with `-vvv` (trace-level verbosity)\n\n* Web-based documentation hosted at https://thcon.vercel.app\n\n\n\n### Fixed:\n\n* No longer requires `thcon.toml` to exist on-disk for `thcon` to run\n\n\n\n### Changed:\n\n* `dark` and `light` are now positional arguments instead of subcommands, which better represents how `thcon` should be\n\n used.\n\n* Verbose output (`-v` through `-vvv`) is a bit more pretty\n\n\n\n## [v0.12.0 - 2021-06-29]\n\n### Added:\n\n* Terminal.app profile switching\n\n\n\n### Fixed:\n\n* Documented `disabled` property in app schemas (supported since v0.10.0)\n\n* Corrected documented KDE Konsole section name in `thcon.toml`. `konsole` was always supported, but the documentation\n\n was wrong\n\n\n\n## [v0.11.0 - 2021-06-28]\n\n### Added:\n\n* GTK theme switching\n\n* GNOME Shell user theme switching\n\n\n\n## [v0.10.0 - 2021-06-20]\n\n### Added:\n\n* Allow apps to be disabled with `disabled = true` in their config section\n\n* Establish default values for Atom, KDE Plasma, Sublime Text, and Visual Studio Code\n\n\n\n## [v0.9.0 - 2021-03-30]\n\n### Added:\n\n* Support for [Atom](https://atom.io) via an [app-side plugin](https://github.com/theme-controller/thcon-atom)\n\n* Writes vim/nvim rc file to disk so new instances use previously-applied settings. Requires\n\n [thcon.vim](https://github.com/theme-controller/thcon.vim) v0.4.0 (at least commit 39b6d82 (v0.4.0, 2021-03-24))\n\n\n", "file_path": "CHANGELOG.md", "rank": 93, "score": 14.884265395946972 }, { "content": "use std::time::Instant;\n\n\n\nuse anyhow::{anyhow, Result};\n\nuse log::{error, info, trace};\n\n\n\nuse crate::app;\n\nuse crate::{Config, ConfigState, Operation};\n\n\n", "file_path": "thcon/src/switch.rs", "rank": 94, "score": 14.55363567914149 }, { "content": "pub struct VSCode;\n\n\n\nimpl VSCode {\n\n fn settings_json_path(&self) -> PathBuf {\n\n [\n\n dirs::config_dir().unwrap().to_str().unwrap(),\n\n \"Code\",\n\n \"User\",\n\n \"settings.json\",\n\n ]\n\n .iter()\n\n .collect()\n\n }\n\n}\n\n\n\nimpl Themeable for VSCode {\n\n fn config_state(&self, config: &ThconConfig) -> ConfigState {\n\n ConfigState::with_default_config(config.vscode.as_ref().map(|c| c.inner.as_ref()))\n\n }\n\n\n", "file_path": "thcon/src/app/vscode.rs", "rank": 95, "score": 14.485624801496467 }, { "content": "//! Switches between [Atom](https://atom.io) UI and editor themes in all windows and tabs.\n\n//!\n\n//! ## Usage: All Platforms\n\n//! Install [thcon-atom](https://github.com/theme-controller/thcon-atom) with `apm install thcon`.\n\n//! In `thcon.toml`, define a list of themes to apply in dark mode and light mode. These can be\n\n//! copy-pasted from the `core.themes` property in your `config.cson`. You can easily get Atom\n\n//! looking right in dark mode, copy those themes into `thcon.toml`, then repeat for light mode.\n\n//!\n\n//! ```toml\n\n//! [atom]\n\n//! dark = [ \"one-dark-ui\", \"one-dark-syntax\" ]\n\n//! light = [ \"one-light-ui\", \"one-light-syntax\" ]\n\n//! ```\n\n//!\n\n//! ## `thcon.toml` Schema\n\n//! Section: `atom`\n\n//!\n\n//! | Key | Type | Description | Default |\n\n//! | --- | ---- | ----------- | -------- |\n\n//! | `disabled` | boolean | `true` to disable theming of this app, otherwise `false` | `false` |\n", "file_path": "thcon/src/app/atom.rs", "rank": 96, "score": 14.061151089913738 }, { "content": "### Fixed:\n\n* Stop logging errors for unconfigured applications (silently skips unconfigured\n\n apps instead)\n\n* Don't require apps to be listed individually on CLI (try all apps by default\n\n with `thcon dark` / `thcon light`)\n\n* Suppress spurious error messages when switching `plasma` themes\n\n* Explicitly listed `thcon.toml` section to use for `vim`/`nvim`\n\n* Lots of lint failures\n\n\n\n### Changed:\n\n* The `plasma` app (KDE Plasma) is now only available on non-macOS, non-Windows\n\n platforms\n\n\n\n## [v0.3.0] - 2021-01-18\n\n\n\n### Removed:\n\n* `Themeable::toggle()`, which was previously used to implicitly switch\n\n from light to dark (or vice-versa, depending on the current state) has\n\n been removed. It may return in a future release, but improving app\n\n support is more important right now.\n\n\n\n### Added:\n\n* `vim` & `neovim` theme switching via [thcon.vim](https://github.com/sjbarag/thcon.vim)\n\n* [This changelog](./CHANGELOG.md)\n\n\n\n### Changed:\n\n* Started documenting per-app configuration direction in crates to\n\n enable serving via https://docs.rs\n\n\n\n\n\n## [v0.2.0] - 2020-10-27\n\n \n\n### Added:\n\n* macOS global theme switching\n\n* alacritty color switching\n\n* First set of per-app documentation (may not be the correct place\n\n for those to live long-term though?)\n\n \n\n### Fixes:\n\n* Allow installation on systems that don't support DBus\n\n* Don't crash if ~/.config/thcon/thcon.toml doesn't exist (helpful on\n\n macOS, where there's no configuration required to switch between light\n\n and dark modes)\n", "file_path": "CHANGELOG.md", "rank": 97, "score": 14.052862733971534 }, { "content": "\n\n let theme = match operation {\n\n Operation::Darken => &config.dark,\n\n Operation::Lighten => &config.light,\n\n };\n\n\n\n if let Ok(windows) = self.get_window_ids() {\n\n debug!(\n\n \"Found {} {}\",\n\n windows.len(),\n\n if windows.len() == 1 {\n\n \"window\"\n\n } else {\n\n \"windows\"\n\n },\n\n );\n\n for window_id in windows.iter() {\n\n self.set_profile(window_id, theme)?;\n\n }\n\n }\n\n\n\n let gsettings = gio::Settings::new(\"org.gnome.Terminal.ProfilesList\");\n\n gsettings\n\n .set_string(\"default\", theme)\n\n .map(|_| gio::Settings::sync())\n\n .with_context(|| format!(\"Unable to set default gnome-terminal profile '{}'\", theme))\n\n }\n\n}\n", "file_path": "thcon/src/app/gnome_terminal.rs", "rank": 98, "score": 13.973429290839654 }, { "content": " );\n\n if config_state == ConfigState::Default {\n\n return ConfigState::Disabled;\n\n }\n\n config_state\n\n }\n\n\n\n fn switch(&self, config: &ThconConfig, operation: &Operation) -> Result<()> {\n\n let config = match self.config_state(config) {\n\n ConfigState::NoDefault => unreachable!(),\n\n ConfigState::Disabled => return Ok(()),\n\n ConfigState::Default => unreachable!(),\n\n ConfigState::Enabled => config.sublime_text.as_ref().unwrap().unwrap_inner_left(),\n\n };\n\n\n\n let section = match operation {\n\n Operation::Darken => &config.dark,\n\n Operation::Lighten => &config.light,\n\n };\n\n\n", "file_path": "thcon/src/app/sublime_text.rs", "rank": 99, "score": 13.933368540417378 } ]
Rust
src/kvm.rs
yodalee/rrxv6
bf3b076b28e8fbb5de00e63748b5e27a829d6634
use lazy_static::lazy_static; use spin::Mutex; use rv64::csr::satp::{Satp, SatpMode}; use rv64::asm::sfence_vma; use crate::vm::page_table::{PageTable, PageTableLevel}; use crate::vm::addr::{VirtAddr, PhysAddr}; use crate::vm::page_flag::PteFlag; use crate::riscv::{PAGESIZE, MAXVA}; use crate::memorylayout::{UART0, PLIC_BASE, TRAMPOLINE, KERNELBASE, PHYSTOP}; use crate::kalloc::kalloc; lazy_static! { static ref KERNELPAGE: Mutex<u64> = Mutex::new(0); } pub fn init_kvm() { extern "C" { static _trampoline: usize; static _etext: usize; } let ptrampoline: u64 = unsafe { &_trampoline as *const usize as u64 }; let petext: u64 = unsafe { &_etext as *const usize as u64 }; let root_page: &mut PageTable = unsafe { &mut *(kalloc() as *mut PageTable) }; let mut root_page_lock = KERNELPAGE.lock(); *root_page_lock = root_page as *const _ as u64; drop(root_page_lock); kvmmap(VirtAddr::new(UART0), PhysAddr::new(UART0), PAGESIZE, PteFlag::PTE_READ | PteFlag::PTE_WRITE); kvmmap(VirtAddr::new(PLIC_BASE), PhysAddr::new(PLIC_BASE), 0x400000, PteFlag::PTE_READ | PteFlag::PTE_WRITE); kvmmap(VirtAddr::new(KERNELBASE), PhysAddr::new(KERNELBASE), petext - KERNELBASE, PteFlag::PTE_READ | PteFlag::PTE_EXEC); kvmmap(VirtAddr::new(petext), PhysAddr::new(petext), PHYSTOP as u64 - petext, PteFlag::PTE_READ | PteFlag::PTE_WRITE); kvmmap(VirtAddr::new(TRAMPOLINE), PhysAddr::new(ptrampoline), PAGESIZE, PteFlag::PTE_READ | PteFlag::PTE_EXEC); } pub fn init_page() { let mut satp = Satp::from_bits(0); let ptr = unsafe { get_root_page() }; satp.set_mode(SatpMode::ModeSv39); satp.set_addr(ptr as *const _ as u64); satp.write(); sfence_vma(); } pub unsafe fn get_root_page() -> &'static mut PageTable { let addr = *KERNELPAGE.lock(); let ptr: *mut PageTable = addr as *mut PageTable; &mut *ptr } fn kvmmap(va: VirtAddr, pa: PhysAddr, size: u64, perm: PteFlag) { match map_pages(va, pa, size, perm) { Ok(_) => {}, Err(e) => panic!("mappages error: {}", e), } } fn map_pages(va: VirtAddr, mut pa: PhysAddr, size: u64, perm: PteFlag) -> Result<(), &'static str> { let page_table = unsafe { get_root_page() }; let va_start = va.align_down(); let va_end = VirtAddr::new_truncate(va.as_u64() + size - 1).align_down(); let mut page_addr = va_start; loop { map_page(page_table, page_addr, pa, perm, PageTableLevel::Two)?; if page_addr == va_end { break; } page_addr += PAGESIZE; pa += PAGESIZE; } Ok(()) } fn map_page(page_table: &mut PageTable, va: VirtAddr, pa: PhysAddr, perm: PteFlag, level: PageTableLevel) -> Result<(), &'static str> { if va >= VirtAddr::new(MAXVA) { return Err("map_page: virtual address over MAX address") } let index = va.get_index(level); let pte = &mut page_table[index]; match level.next_level() { None => { if pte.is_unused() { pte.set_addr(pa.as_pte(), perm | PteFlag::PTE_VALID); Ok(()) } else { Err("map_page: remap") } }, Some(next_level) => { if pte.is_unused() { let ptr = kalloc(); if ptr == 0 as *mut u8 { return Err("kalloc failed in map_page"); } let addr = PhysAddr::new(ptr as *const _ as u64); pte.set_addr(addr.as_pte(), PteFlag::PTE_VALID); } let next_table = unsafe { &mut *(pte.addr() as *mut PageTable) }; map_page(next_table, va, pa, perm, next_level) } } }
use lazy_static::lazy_static; use spin::Mutex; use rv64::csr::satp::{Satp, SatpMode}; use rv64::asm::sfence_vma; use crate::vm::page_table::{PageTable, PageTableLevel}; use crate::vm::addr::{VirtAddr, PhysAddr}; use crate::vm::page_flag::PteFlag; use crate::riscv::{PAGESIZE, MAXVA}; use crate::memorylayout::{UART0, PLIC_BASE, TRAMPOLINE, KERNELBASE, PHYSTOP}; use crate::kalloc::kalloc; lazy_static! { static ref KERNELPAGE: Mutex<u64> = Mutex::new(0); } pub fn init_kvm() { extern "C" { static _trampoline: usize; static _etext: usize; } let ptrampoline: u64 = unsafe { &_trampoline as *const usize as u64 }; let petext: u64 = unsafe { &_etext as *const usize as u64 }; let root_page: &mut PageTable = unsafe { &mut *(kalloc() as *mut PageTable) }; let mut root_page_lock = KERNELPAGE.lock(); *root_page_lock = root_page as *const _ as u64; drop(root_page_lock); kvmmap(VirtAddr::new(UART0), PhysAddr::new(UART0), PAGESIZE, PteFlag::PTE_READ | PteFlag::PTE_WRITE); kvmmap(VirtAddr::new(PLIC_BASE), PhysAddr::new(PLIC_BASE), 0x400000, PteFlag::PTE_READ | PteFlag::PTE_WRITE); kvmmap(VirtAddr::new(KERNELBASE), PhysAddr::new(KERNELBASE), petext - KERNELBASE, PteFlag::PTE_READ | PteFlag::PTE_EXEC); kvmmap(VirtAddr::new(petext), PhysAddr::new(petext), PHYSTOP as u64 - petext, PteFlag::PTE_READ | PteFlag::PTE_WRITE); kvmmap(VirtAddr::new(TRAMPOLINE), PhysAddr::new(ptrampoline), PAGESIZE, PteFlag::PTE_READ | PteFlag::PTE_EXEC); } pub fn init_page() { let mut satp = Satp::from_bits(0); let ptr = unsafe { get_root_page() }; satp.set_mode(SatpMode::ModeSv39); satp.set_addr(ptr as *const _ as u64); satp.write(); sfence_vma(); } pub unsafe fn get_root_page() -> &'static mut PageTable { let addr = *KERNELPAGE.lock(); let ptr: *mut PageTable = addr as *mut PageTable; &mut *ptr } fn kvmmap(va: VirtAddr, pa: PhysAddr, size: u64, perm: PteFlag) { match map_pages(va, pa, size, perm) { Ok(_) => {}, Err(e) => panic!("mappages error: {}", e), } } fn map_pages(va: VirtAddr, mut pa: PhysAddr, size: u64, perm: PteFlag) -> Result<(), &'static str> { let page_table = unsafe { get_root_page() }; let va_start = va.align_down(); let va_end = VirtAddr::new_truncate(va.as_u64() + size - 1).align_down(); let mut page_addr = va_start; loop { map_page(page_table, page_addr, pa, perm, PageTableLevel::Two)?; if page_addr == va_end { break; } page_addr += PAGESIZE; pa += PAGESIZE; } Ok(()) } fn map_page(page_table: &mut PageTable, va: VirtAddr, pa: PhysAddr, perm: PteFlag, level: PageTableLevel) -> Result<(), &'static str> { if va >= VirtAddr::new(MAXVA) { return Err("map_page: virtual address over MAX address") } let index = va.get_index(level); let pte = &mut page_table[index]; match level.next_level() { None => { if pte.is_unused() { pte.set_addr(pa.as_pte(), perm | PteFlag::PTE_VALID); Ok(()) } else { Err("map_page: remap") } }, Some(next_level) => { if pte.is_unused() { let ptr = kalloc();
if ptr == 0 as *mut u8 { return Err("kalloc failed in map_page"); } let addr = PhysAddr::new(ptr as *const _ as u64); pte.set_addr(addr.as_pte(), PteFlag::PTE_VALID); } let next_table = unsafe { &mut *(pte.addr() as *mut PageTable) }; map_page(next_table, va, pa, perm, next_level) } } }
function_block-function_prefix_line
[ { "content": "/// Allocate one 4096-byte page of physical memory.\n\n/// Returns a pointer that the kernel can use.\n\n/// Returns 0 if the memory cannot be allocated.\n\npub fn kalloc() -> *mut u8 {\n\n unsafe {\n\n let layout = Layout::from_size_align(PAGESIZE as usize, 4096).unwrap();\n\n let ptr = alloc(layout);\n\n write_bytes(ptr, 0x0, PAGESIZE as usize);\n\n return ptr;\n\n }\n\n}\n", "file_path": "src/kalloc.rs", "rank": 3, "score": 134747.9555613553 }, { "content": "// Must be called with interrupts disabled,\n\n// to prevent race with process being moved\n\n// to a different CPU.\n\npub fn get_cpuid() -> u64 {\n\n tp::read()\n\n}\n", "file_path": "src/cpu.rs", "rank": 4, "score": 94142.75676507037 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n // build directory for this crate\n\n let out_dir = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n\n\n // extend the library search path\n\n println!(\"cargo:rustc-link-search={}\", out_dir.display());\n\n\n\n // put `linker.ld` in the build directory\n\n File::create(out_dir.join(\"linker.ld\"))?.write_all(include_bytes!(\"linker.ld\"))?;\n\n\n\n // assemble the assembly file\n\n Build::new()\n\n .file(\"src/asm/entry.S\")\n\n .file(\"src/asm/kernelvec.S\")\n\n .file(\"src/asm/trampoline.S\")\n\n .compile(\"asm\");\n\n\n\n // rebuild if `entry.s` changed\n\n println!(\"cargo:rerun-if-changed=src/entry.S\");\n\n // rebuild if `kernelvec.s` changed\n\n println!(\"cargo:rerun-if-changed=src/kernelvec.S\");\n\n\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 5, "score": 92540.71178298804 }, { "content": "#[inline]\n\npub fn clint_mtimecmp(hart: u64) -> u64 { CLINT + 0x4000 + 8 * hart }\n\n\n\n// qemu puts platform-level interrupt controller (PLIC) here.\n\npub const PLIC_BASE : u64 = 0x0c000000;\n\npub const PLIC_PRIORITY : u64 = PLIC_BASE + 0x0;\n\npub const PLIC_PENDING : u64 = PLIC_BASE + 0x1_000;\n\npub const PLIC_ENABLE : u64 = PLIC_BASE + 0x2_000;\n\npub const PLIC_THRESHOLD : u64 = PLIC_BASE + 0x200_000;\n\npub const PLIC_CLAIM : u64 = PLIC_BASE + 0x200_004;\n\n\n\n// RAM from physical address 0x8000_0000 to PHYSTOP\n\n// 128 MB available\n\npub const KERNELBASE : u64 = 0x8000_0000;\n\npub const PHYSTOP : u64 = KERNELBASE + 128 * 1024 * 1024;\n\n\n\n// map the trampoline page to the highest address in both user and kernel space\n\npub const TRAMPOLINE : u64 = riscv::MAXVA - riscv::PAGESIZE;\n\n\n\n// map kernel stacks beneath the trampoline,\n\n// each surrounded by invalid guard pages.\n", "file_path": "src/memorylayout.rs", "rank": 6, "score": 83154.98277984897 }, { "content": "pub fn init_heap() {\n\n extern \"C\" {\n\n // _END defined in linker.ld\n\n static _END: usize;\n\n }\n\n\n\n let heap_start: usize = unsafe {\n\n &_END as *const usize as usize\n\n };\n\n let heap_end = memorylayout::PHYSTOP as usize;\n\n let heap_size = heap_end - heap_start;\n\n unsafe {\n\n ALLOCATOR\n\n .lock()\n\n .init(heap_start, heap_size)\n\n }\n\n}\n\n\n", "file_path": "src/kalloc.rs", "rank": 7, "score": 83098.52855957815 }, { "content": "/// disable device interrupt\n\npub fn intr_off() {\n\n let mut sstatus = Sstatus::from_read();\n\n sstatus.disable_interrupt(Mode::SupervisedMode);\n\n sstatus.write();\n\n}\n\n\n", "file_path": "src/trap.rs", "rank": 10, "score": 63731.73263362495 }, { "content": "#[no_mangle]\n\npub fn kerneltrap() {\n\n let sepc = Sepc::from_read();\n\n let sstatus = Sstatus::from_read();\n\n\n\n if sstatus.get_spp() != Mode::SupervisedMode {\n\n panic!(\"kerneltrap: not from supervised mode\");\n\n }\n\n if sstatus.get_sie() {\n\n panic!(\"kerneltrap: interrupts enabled\");\n\n }\n\n\n\n interrupt_handler();\n\n\n\n sepc.write();\n\n sstatus.write();\n\n}\n", "file_path": "src/trap.rs", "rank": 11, "score": 63731.73263362495 }, { "content": "#[no_mangle]\n\npub fn main() -> ! {\n\n if get_cpuid() == 0 {\n\n let mut m_uart = UART.lock();\n\n m_uart.puts(\"rrxv6 start\\n\");\n\n drop(m_uart);\n\n\n\n init_heap(); // initialize physical memory allocator\n\n init_kvm(); // initialize kernel page table\n\n init_page(); // initialize virtual memory\n\n init_proc(); // initialize process table\n\n init_harttrap(); // install kernel trap vector\n\n init_plic(); // initialize PLIC interrupt controller\n\n init_hartplic(); // ask PLIC for device interrupt\n\n\n\n let mut m_uart = UART.lock();\n\n m_uart.puts(\"OS started\\n\");\n\n drop(m_uart);\n\n }\n\n intr_on();\n\n\n\n loop {}\n\n}\n\n\n\n#[global_allocator]\n\nstatic ALLOCATOR: LockedHeap = LockedHeap::empty();\n\n\n", "file_path": "src/main.rs", "rank": 12, "score": 63731.73263362495 }, { "content": "/// enable device interrupt\n\npub fn intr_on() {\n\n let mut sstatus = Sstatus::from_read();\n\n sstatus.enable_interrupt(Mode::SupervisedMode);\n\n sstatus.write();\n\n}\n\n\n", "file_path": "src/trap.rs", "rank": 13, "score": 63731.73263362495 }, { "content": "pub fn init_proc() {\n\n}\n", "file_path": "src/proc.rs", "rank": 14, "score": 61521.7689802002 }, { "content": "// setup to take exceptions and traps in supervisor mode\n\npub fn init_harttrap() {\n\n let mut stvec = Stvec::from_bits(0);\n\n stvec.set_addr(kernelvec as u64);\n\n stvec.write();\n\n}\n\n\n", "file_path": "src/trap.rs", "rank": 15, "score": 61521.7689802002 }, { "content": "pub fn init_plic() {\n\n let plic = Plic::new();\n\n plic.set_priority(UART0_IRQ, 1);\n\n plic.set_priority(VIRTIO0_IRQ, 1);\n\n}\n\n\n", "file_path": "src/plic.rs", "rank": 16, "score": 61521.7689802002 }, { "content": "pub fn init_hartplic() {\n\n let hart = get_cpuid();\n\n let plic = Plic::new();\n\n plic.set_enable(hart, PlicContext::Supervisor, UART0_IRQ);\n\n plic.set_threshold(hart, PlicContext::Supervisor, 0);\n\n}\n", "file_path": "src/plic.rs", "rank": 17, "score": 61521.7689802002 }, { "content": "fn handle_external_interrupt() {\n\n let plic = Plic::new();\n\n let hart = get_cpuid();\n\n let irq = plic.get_claim(hart, PlicContext::Supervisor);\n\n\n\n match irq {\n\n UART0_IRQ => {\n\n let mut uart = UART.lock();\n\n uart.handle_interrupt();\n\n }\n\n }\n\n\n\n if irq != 0 {\n\n plic.set_complete(hart, PlicContext::Supervisor, irq);\n\n }\n\n}\n\n\n", "file_path": "src/trap.rs", "rank": 18, "score": 48276.84672188288 }, { "content": "#[alloc_error_handler]\n\nfn alloc_error_handler(layout: Layout) -> ! {\n\n panic!(\"allocation error {:?}\", layout);\n\n}\n", "file_path": "src/main.rs", "rank": 19, "score": 43153.09730675684 }, { "content": "#[panic_handler]\n\nfn panic(panic_info: &PanicInfo<'_>) -> ! {\n\n let mut m_uart = UART.lock();\n\n m_uart.puts(&format!(\"{}\", panic_info));\n\n loop {}\n\n}\n", "file_path": "src/start.rs", "rank": 20, "score": 38325.6344143992 }, { "content": "#[no_mangle]\n\nfn start() -> ! {\n\n extern \"Rust\" {\n\n fn main() -> !;\n\n }\n\n\n\n /* Set M Previous Privilege mode to SupervisedMode\n\n * so mret will switch to supervise mode\n\n */\n\n let mut mstatus = mstatus::Mstatus::from_read();\n\n mstatus.set_mpp(mstatus::Mode::SupervisedMode);\n\n mstatus.write();\n\n\n\n // Setup M exception program counter for mret\n\n Mepc::from_bits(main as u64).write();\n\n\n\n // Disable paging for now\n\n Satp::from_bits(0).write();\n\n\n\n // Delegate all interrupts and exceptions to supervisor mode\n\n Medeleg::from_bits(0xffff).write();\n", "file_path": "src/start.rs", "rank": 21, "score": 28850.09607552461 }, { "content": "fn tick() {\n\n let mut tick = TICK.lock();\n\n *tick += 1;\n\n}\n\n\n", "file_path": "src/trap.rs", "rank": 22, "score": 28850.09607552461 }, { "content": "// setup timer and timer interrupt\n\nfn init_timer() {\n\n let mhartid = Mhartid::from_read().bits();\n\n\n\n let mtimecmpaddr = memorylayout::clint_mtimecmp(mhartid);\n\n unsafe {\n\n let val = core::ptr::read_volatile(memorylayout::CLINT_MTIME as *mut u64);\n\n core::ptr::write_volatile(mtimecmpaddr as *mut u64, val + INTERVAL);\n\n }\n\n unsafe {\n\n let arr = &mut TIMER_SCRATCH[mhartid as usize];\n\n arr[3] = mtimecmpaddr;\n\n arr[4] = INTERVAL;\n\n Mscratch::from_bits(arr.as_ptr() as u64).write();\n\n }\n\n\n\n // set the machine mode trap handler\n\n let mtvec = Mtvec::from_bits(timervec as u64);\n\n mtvec.write();\n\n\n\n // Enable machine interrupt in mstatus\n\n let mut mstatus = mstatus::Mstatus::from_read();\n\n mstatus.enable_interrupt(mstatus::Mode::MachineMode);\n\n mstatus.write();\n\n\n\n let mut mie = Mie::from_read();\n\n mie.set_machine_enable(Interrupt::TimerInterrupt);\n\n mie.write();\n\n}\n\n\n", "file_path": "src/start.rs", "rank": 23, "score": 27646.82260372527 }, { "content": "fn interrupt_handler() {\n\n let scause = Scause::from_read();\n\n let code = scause.get_code();\n\n\n\n if scause.is_interrupt() {\n\n match code {\n\n x if x == Interrupt::SupervisorExternal as u64 => handle_external_interrupt(),\n\n x if x == Interrupt::SupervisorSoftware as u64 => handle_software_interrupt(),\n\n _ => panic!(\"Illegal interrupt code\"),\n\n }\n\n }\n\n}\n\n\n\n/// interrupts and exceptions from kernel code go here via kernelvec,\n\n/// on whatever the current kernel stack is.\n", "file_path": "src/trap.rs", "rank": 24, "score": 27646.82260372527 }, { "content": "fn handle_software_interrupt() {\n\n if get_cpuid() == 0 {\n\n tick();\n\n }\n\n\n\n let mut sip = Sip::from_read();\n\n sip.clear_pending(1);\n\n sip.write();\n\n}\n\n\n", "file_path": "src/trap.rs", "rank": 25, "score": 26567.36617350261 }, { "content": "\n\nuse alloc::alloc::alloc;\n\nuse alloc::alloc::Layout;\n\nuse crate::memorylayout;\n\nuse crate::riscv::PAGESIZE;\n\nuse crate::ALLOCATOR;\n\n\n\nuse core::ptr::write_bytes;\n\n\n", "file_path": "src/kalloc.rs", "rank": 26, "score": 25520.124976222796 }, { "content": "\n\nuse super::addr::VirtAddr;\n\n\n\npub struct Page {\n\n start_address: VirtAddr,\n\n}\n", "file_path": "src/vm/page.rs", "rank": 27, "score": 23402.16864349332 }, { "content": " pub fn new(addr: u64) -> Self {\n\n Self::try_new(addr).expect(&format!(\"Virtual address in riscv should have bit 39-63 copied from bit 38 {}\", addr))\n\n }\n\n\n\n /// Try to create a new virtual address.\n\n #[inline]\n\n pub fn try_new(addr: u64) -> Result<VirtAddr, InvalidVirtAddr> {\n\n match addr.get_bits(38..64) {\n\n 0 | 0x3ffffff => Ok(VirtAddr(addr)), // valid address\n\n 1 => Ok(VirtAddr::new_truncate(addr)), // address need sign extend\n\n _ => Err(InvalidVirtAddr{}),\n\n }\n\n }\n\n\n\n /// Create a VirtAddr with signed extension\n\n #[inline]\n\n pub fn new_truncate(addr: u64) -> Self {\n\n Self(((addr << 25) as i64 >> 25) as u64)\n\n }\n\n\n", "file_path": "src/vm/addr.rs", "rank": 28, "score": 23003.995012473195 }, { "content": " pub const fn p3_index(self) -> PageTableIndex {\n\n PageTableIndex::new_truncate((self.0 >> 9 >> 9 >> 9 >> 12) as u16)\n\n }\n\n\n\n /// Return the 9 bits page table index according to level\n\n #[inline]\n\n pub const fn get_index(self, level: PageTableLevel) -> PageTableIndex {\n\n match level {\n\n PageTableLevel::Zero => self.p0_index(),\n\n PageTableLevel::One => self.p1_index(),\n\n PageTableLevel::Two => self.p2_index(),\n\n PageTableLevel::Three => self.p3_index(),\n\n }\n\n }\n\n}\n\n\n\nimpl Add<u64> for VirtAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n", "file_path": "src/vm/addr.rs", "rank": 29, "score": 23003.448666340835 }, { "content": "#[derive(Debug)]\n\npub struct InvalidPhysAddr;\n\n\n\nimpl PhysAddr {\n\n #[inline]\n\n pub fn new(addr: u64) -> Self {\n\n Self::try_new(addr).expect(&format!(\"Physical address in riscv should have bit 56-63 zeroed {}\", addr))\n\n }\n\n\n\n /// Try to create a new physical address.\n\n #[inline]\n\n pub fn try_new(addr: u64) -> Result<PhysAddr, InvalidPhysAddr> {\n\n match addr.get_bits(56..64) {\n\n 0 => Ok(PhysAddr(addr)), // valid address\n\n _ => Err(InvalidPhysAddr{}),\n\n }\n\n }\n\n\n\n /// Create a PhysAddr with zeroed bit 56-64\n\n #[inline]\n", "file_path": "src/vm/addr.rs", "rank": 30, "score": 23001.856260476598 }, { "content": " #[inline]\n\n pub const fn p0_index(self) -> PageTableIndex {\n\n PageTableIndex::new_truncate((self.0 >> 12) as u16)\n\n }\n\n\n\n /// Return the 9 bits level 1 page table index from offset [21,29]\n\n #[inline]\n\n pub const fn p1_index(self) -> PageTableIndex {\n\n PageTableIndex::new_truncate((self.0 >> 9 >> 12) as u16)\n\n }\n\n\n\n /// Return the 9 bits level 2 page table index from offset [30,38]\n\n #[inline]\n\n pub const fn p2_index(self) -> PageTableIndex {\n\n PageTableIndex::new_truncate((self.0 >> 9 >> 9 >> 12) as u16)\n\n }\n\n\n\n /// Return the 9 bits level 3 page table index from offset [39,47]\n\n /// Only valid with sv47 mode\n\n #[inline]\n", "file_path": "src/vm/addr.rs", "rank": 31, "score": 23001.080891874808 }, { "content": " #[inline]\n\n pub fn as_u64(self) -> u64 {\n\n self.0\n\n }\n\n\n\n #[inline]\n\n pub fn align_down(self) -> Self {\n\n Self(\n\n align_down(self.0, 4096)\n\n )\n\n }\n\n\n\n #[inline]\n\n pub fn align_up(self) -> Self {\n\n Self(\n\n align_up(self.0, 4096)\n\n )\n\n }\n\n\n\n /// Return the 9 bits level 0 page table index from offset [12,20]\n", "file_path": "src/vm/addr.rs", "rank": 32, "score": 23000.965577042003 }, { "content": "use bit_field::BitField;\n\nuse super::page_table::{PageTableIndex, PageTableLevel};\n\n\n\nuse core::ops::{Add, AddAssign, Sub, SubAssign};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct InvalidVirtAddr;\n\n\n\n#[derive(Clone,Copy,PartialEq,Eq,PartialOrd,Ord)]\n\npub struct VirtAddr(u64);\n\n\n\n/// A 64-bits physical memory address.\n\n///\n\n/// A wrapper type around `u64`\n\n/// On riscv, only lower 56 bits can be used, top 8 bits must be zeroed.\n\n#[derive(Clone,Copy,PartialEq,Eq,PartialOrd,Ord)]\n\npub struct PhysAddr(u64);\n\n\n\nimpl VirtAddr {\n\n #[inline]\n", "file_path": "src/vm/addr.rs", "rank": 33, "score": 23000.94854423739 }, { "content": " #[inline]\n\n fn sub(self, rhs: usize) -> Self::Output {\n\n self - rhs as u64\n\n }\n\n}\n\n\n\nimpl SubAssign<usize> for PhysAddr {\n\n #[inline]\n\n fn sub_assign(&mut self, rhs: usize) {\n\n self.sub_assign(rhs as u64);\n\n }\n\n}\n\n\n\n#[inline]\n\npub const fn align_down(addr: u64, align: u64) -> u64 {\n\n assert!(align.is_power_of_two());\n\n addr & !(align -1)\n\n}\n\n\n\n#[inline]\n\npub const fn align_up(addr: u64, align: u64) -> u64 {\n\n assert!(align.is_power_of_two());\n\n align_down(addr + align - 1, align)\n\n}\n", "file_path": "src/vm/addr.rs", "rank": 34, "score": 22999.206245314497 }, { "content": " pub fn new_truncate(addr: u64) -> Self {\n\n Self(addr & ((1 << 56) - 1))\n\n }\n\n\n\n #[inline]\n\n pub fn as_u64(self) -> u64 {\n\n self.0\n\n }\n\n\n\n #[inline]\n\n pub fn as_pte(self) -> u64 {\n\n (self.0 >> 12) << 10\n\n }\n\n\n\n #[inline]\n\n pub fn align_down(self) -> Self {\n\n Self(\n\n align_down(self.0, 4096)\n\n )\n\n }\n", "file_path": "src/vm/addr.rs", "rank": 35, "score": 22998.860935352848 }, { "content": " fn add_assign(&mut self, rhs: u64) {\n\n *self = *self + rhs;\n\n }\n\n}\n\n\n\nimpl Add<usize> for PhysAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn add(self, rhs: usize) -> Self::Output {\n\n self + rhs as u64\n\n }\n\n}\n\n\n\nimpl AddAssign<usize> for PhysAddr {\n\n #[inline]\n\n fn add_assign(&mut self, rhs: usize) {\n\n self.add_assign(rhs as u64)\n\n }\n\n}\n", "file_path": "src/vm/addr.rs", "rank": 36, "score": 22998.020502441468 }, { "content": "\n\nimpl AddAssign<usize> for VirtAddr {\n\n #[inline]\n\n fn add_assign(&mut self, rhs: usize) {\n\n self.add_assign(rhs as u64)\n\n }\n\n}\n\n\n\nimpl Sub<u64> for VirtAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn sub(self, rhs: u64) -> Self::Output {\n\n VirtAddr::new(self.0 - rhs)\n\n }\n\n}\n\n\n\nimpl SubAssign<u64> for VirtAddr {\n\n #[inline]\n\n fn sub_assign(&mut self, rhs: u64) {\n", "file_path": "src/vm/addr.rs", "rank": 37, "score": 22997.79741420531 }, { "content": " *self = *self - rhs;\n\n }\n\n}\n\n\n\nimpl Sub<usize> for VirtAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn sub(self, rhs: usize) -> Self::Output {\n\n self - rhs as u64\n\n }\n\n}\n\n\n\nimpl SubAssign<usize> for VirtAddr {\n\n #[inline]\n\n fn sub_assign(&mut self, rhs: usize) {\n\n self.sub_assign(rhs as u64);\n\n }\n\n}\n\n\n", "file_path": "src/vm/addr.rs", "rank": 38, "score": 22997.43003765248 }, { "content": " fn add(self, rhs: u64) -> Self::Output {\n\n VirtAddr::new(self.0 + rhs)\n\n }\n\n}\n\n\n\nimpl AddAssign<u64> for VirtAddr {\n\n #[inline]\n\n fn add_assign(&mut self, rhs: u64) {\n\n *self = *self + rhs;\n\n }\n\n}\n\n\n\nimpl Add<usize> for VirtAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn add(self, rhs: usize) -> Self::Output {\n\n self + rhs as u64\n\n }\n\n}\n", "file_path": "src/vm/addr.rs", "rank": 39, "score": 22997.316194767856 }, { "content": "\n\nimpl Sub<u64> for PhysAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn sub(self, rhs: u64) -> Self::Output {\n\n PhysAddr::new(self.0 - rhs)\n\n }\n\n}\n\n\n\nimpl SubAssign<u64> for PhysAddr {\n\n #[inline]\n\n fn sub_assign(&mut self, rhs: u64) {\n\n *self = *self - rhs;\n\n }\n\n}\n\n\n\nimpl Sub<usize> for PhysAddr {\n\n type Output = Self;\n\n\n", "file_path": "src/vm/addr.rs", "rank": 40, "score": 22996.876550655143 }, { "content": "\n\n #[inline]\n\n pub fn align_up(self) -> Self {\n\n Self(\n\n align_up(self.0, 4096)\n\n )\n\n }\n\n}\n\n\n\nimpl Add<u64> for PhysAddr {\n\n type Output = Self;\n\n\n\n #[inline]\n\n fn add(self, rhs: u64) -> Self::Output {\n\n PhysAddr::new(self.0 + rhs)\n\n }\n\n}\n\n\n\nimpl AddAssign<u64> for PhysAddr {\n\n #[inline]\n", "file_path": "src/vm/addr.rs", "rank": 41, "score": 22995.327563018625 }, { "content": "//! riscv page table\n\n\n\nuse core::ops::{Index, IndexMut};\n\nuse super::page_flag::PteFlag;\n\n\n\n// 4096 bytes / 8 bytes per entry = 512 entries\n\nconst ENTRY_COUNT: usize = 512;\n\n\n\n/// A 9-bits index for page table\n\npub struct PageTableIndex(u16);\n\n\n\nimpl PageTableIndex {\n\n /// Create a PageTableIndex from u16\n\n /// Will crash if the input > 512\n\n pub fn new(index: u16) -> Self {\n\n assert!((index as usize) < ENTRY_COUNT);\n\n Self (index)\n\n }\n\n\n\n /// Create a PageTableIndex from u16\n", "file_path": "src/vm/page_table.rs", "rank": 42, "score": 22134.408783485444 }, { "content": " pub fn set_addr(&mut self, addr: u64, perm: PteFlag) {\n\n // TODO: check aligned here\n\n self.entry = addr | perm.bits();\n\n }\n\n}\n\n\n\npub struct PageTable {\n\n entries: [PageTableEntry;ENTRY_COUNT]\n\n}\n\n\n\nimpl PageTable {\n\n /// Create empty PageTable\n\n #[inline]\n\n pub const fn new() -> Self {\n\n const EMPTY: PageTableEntry = PageTableEntry::new();\n\n Self {\n\n entries: [EMPTY;ENTRY_COUNT]\n\n }\n\n }\n\n}\n", "file_path": "src/vm/page_table.rs", "rank": 43, "score": 22134.014480936996 }, { "content": "\n\nimpl Index<usize> for PageTable {\n\n type Output = PageTableEntry;\n\n\n\n #[inline]\n\n fn index(&self, index: usize) -> &Self::Output {\n\n &self.entries[index]\n\n }\n\n}\n\n\n\nimpl IndexMut<usize> for PageTable {\n\n #[inline]\n\n fn index_mut(&mut self, index: usize) -> &mut Self::Output {\n\n &mut self.entries[index]\n\n }\n\n}\n\n\n\nimpl Index<PageTableIndex> for PageTable {\n\n type Output = PageTableEntry;\n\n\n", "file_path": "src/vm/page_table.rs", "rank": 44, "score": 22133.037167725077 }, { "content": " #[inline]\n\n fn index(&self, index: PageTableIndex) -> &Self::Output {\n\n &self.entries[usize::from(index.0)]\n\n }\n\n}\n\n\n\nimpl IndexMut<PageTableIndex> for PageTable {\n\n #[inline]\n\n fn index_mut(&mut self, index: PageTableIndex) -> &mut Self::Output {\n\n &mut self.entries[usize::from(index.0)]\n\n }\n\n}\n", "file_path": "src/vm/page_table.rs", "rank": 45, "score": 22132.833877908444 }, { "content": " pub const fn next_level(self) -> Option<Self> {\n\n match self {\n\n PageTableLevel::Three => Some(PageTableLevel::Two),\n\n PageTableLevel::Two => Some(PageTableLevel::One),\n\n PageTableLevel::One => Some(PageTableLevel::Zero),\n\n PageTableLevel::Zero => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone,Default)]\n\npub struct PageTableEntry {\n\n entry: u64\n\n}\n\n\n\nimpl PageTableEntry {\n\n // Create empty page table entry\n\n #[inline]\n\n pub const fn new() -> Self {\n\n Self {\n", "file_path": "src/vm/page_table.rs", "rank": 46, "score": 22132.821870507418 }, { "content": " /// Truncate the input if > 512\n\n pub const fn new_truncate(index: u16) -> Self {\n\n Self(index % ENTRY_COUNT as u16)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum PageTableLevel {\n\n /// Level 0, table of page\n\n Zero = 0,\n\n /// Level 1, table of page table\n\n One,\n\n /// Level 2, table of level 1 page table\n\n Two,\n\n /// Level 3, table of level 2 page table, only valid in sv48 mode\n\n Three,\n\n}\n\n\n\nimpl PageTableLevel {\n\n /// Return the next level\n", "file_path": "src/vm/page_table.rs", "rank": 47, "score": 22131.75961592749 }, { "content": " entry: 0\n\n }\n\n }\n\n\n\n // true if page is zero (unused)\n\n #[inline]\n\n pub const fn is_unused(&self) -> bool {\n\n self.entry == 0\n\n }\n\n\n\n #[inline]\n\n pub fn set_unused(&mut self) {\n\n self.entry = 0;\n\n }\n\n\n\n #[inline]\n\n pub fn addr(&self) -> u64 {\n\n (self.entry >> 10) << 12\n\n }\n\n\n", "file_path": "src/vm/page_table.rs", "rank": 48, "score": 22131.248492786985 }, { "content": "use bitflags::bitflags;\n\n\n\n// bit flag for page permission\n\nbitflags! {\n\n pub struct PteFlag: u64 {\n\n const PTE_VALID = 0x01;\n\n const PTE_READ = 0x02;\n\n const PTE_WRITE = 0x04;\n\n const PTE_EXEC = 0x08;\n\n const PTE_USER = 0x10;\n\n const PTE_GLOB = 0x20;\n\n const PTE_ACCES = 0x40;\n\n const PTE_DIRTY = 0x80;\n\n }\n\n}\n", "file_path": "src/vm/page_flag.rs", "rank": 49, "score": 22130.40582934901 }, { "content": "// Byte per page and big offset within a page\n\npub const PAGESIZE : u64 = 4096;\n\npub const PAGESHIFT : u64 = 12;\n\n\n\n// MAXVA marks the virtual address limitation\n\npub const MAXVA : u64 = 1 << (9 + 9 + 9 + 12 - 1);\n\n\n\n// Maximum Interrupt Count\n\npub const MAX_INTERRUPT : u64 = 1024;\n\n\n\npub enum Interrupt {\n\n SupervisorSoftware = 1,\n\n SupervisorTimer = 5,\n\n SupervisorExternal = 9,\n\n}\n\n\n\npub enum Exception {\n\n InstructionAddressMisaligned = 0,\n\n InstructionAccessFault = 1,\n\n IllegalInstruction = 2,\n", "file_path": "src/riscv.rs", "rank": 52, "score": 14.808596927146162 }, { "content": "pub const KSTACK : u64 = TRAMPOLINE - 2 * riscv::PAGESIZE;\n\n\n\n// User memory layout.\n\n// Address zero first:\n\n// text\n\n// original data and bss\n\n// fixed-size stack\n\n// expandable heap\n\n// ...\n\n// TRAPFRAME (p->trapframe, used by the trampoline)\n\n// TRAMPOLINE (the same page as in the kernel)\n\npub const TRAPFRAME : u64 = TRAMPOLINE - riscv::PAGESIZE;\n", "file_path": "src/memorylayout.rs", "rank": 54, "score": 12.8898388128764 }, { "content": " core::ptr::write_volatile(addr, threshold);\n\n }\n\n }\n\n\n\n /// Get PLIC current interupt id\n\n pub fn get_claim(&self, hart: u64, context: PlicContext) -> u32 {\n\n let addr = (PLIC_CLAIM +\n\n hart * 0x2000 +\n\n (context as u64) * 0x1000) as *mut u32;\n\n unsafe {\n\n core::ptr::read_volatile(addr)\n\n }\n\n }\n\n\n\n /// Mark irq complete\n\n pub fn set_complete(&self, hart: u64, context: PlicContext, id: u32) {\n\n assert!((id as u64) < MAX_INTERRUPT);\n\n let addr = (PLIC_CLAIM +\n\n hart * 0x2000 +\n\n (context as u64) * 0x1000) as *mut u32;\n\n unsafe {\n\n core::ptr::write_volatile(addr, id);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/plic.rs", "rank": 55, "score": 12.090188239065984 }, { "content": " /// set id interrupt priority, zero is disabled\n\n pub fn set_priority(&self, id: u64, priority: u32) {\n\n let addr = (PLIC_BASE + 4 * id) as *mut u32;\n\n unsafe {\n\n core::ptr::write_volatile(addr, priority);\n\n }\n\n }\n\n\n\n /// Set interrupt enable\n\n pub fn set_enable(&self, hart: u64, context: PlicContext, id: u64) {\n\n assert!(id < MAX_INTERRUPT);\n\n let addr = (PLIC_ENABLE +\n\n hart * 0x100 +\n\n (context as u64) * 0x80 +\n\n (id / 32)) as *mut u32;\n\n unsafe {\n\n let val = core::ptr::read_volatile(addr);\n\n core::ptr::write_volatile(addr, val | (1u32 << (id % 32)));\n\n }\n\n }\n", "file_path": "src/plic.rs", "rank": 56, "score": 12.052681745399092 }, { "content": "\n\n /// Set interrupt enable\n\n pub fn set_disable(&self, hart: u64, context: PlicContext, id: u64) {\n\n assert!(id < MAX_INTERRUPT);\n\n let addr = (PLIC_ENABLE +\n\n hart * 0x100 +\n\n (context as u64) * 0x80 +\n\n (id / 32)) as *mut u32;\n\n unsafe {\n\n let val = core::ptr::read_volatile(addr);\n\n core::ptr::write_volatile(addr, val & !(1u32 << (id % 32)));\n\n }\n\n }\n\n\n\n /// Set threshold of interrupt of (hart, context)\n\n pub fn set_threshold(&self, hart: u64, context: PlicContext, threshold: u32) {\n\n let addr = (PLIC_THRESHOLD +\n\n hart * 0x2000 +\n\n (context as u64) * 0x1000) as *mut u32;\n\n unsafe {\n", "file_path": "src/plic.rs", "rank": 57, "score": 11.629391029328936 }, { "content": "#[no_mangle]\n\nstatic STACK0: [u8;param::OS_STACK_SIZE * param::NCPU] = [0;param::OS_STACK_SIZE * param::NCPU];\n\n\n\n#[no_mangle]\n\nstatic mut TIMER_SCRATCH: [[u64;5];param::NCPU] = [[0u64;5];param::NCPU];\n\n\n\nconst INTERVAL : u64 = 1000000;\n\n\n\nextern \"C\" {\n\n fn timervec();\n\n}\n\n\n\n// setup timer and timer interrupt\n", "file_path": "src/start.rs", "rank": 58, "score": 11.147834439585868 }, { "content": " Some(self.p.thr.read())\n\n }\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn set_interrupt(&mut self, flag: IerFlag) {\n\n unsafe {\n\n self.p.ier.write(flag.bits());\n\n }\n\n }\n\n\n\n /// Handle an uart interrupt\n\n /// can be RX interrupt or TX interrupt\n\n pub fn handle_interrupt(&mut self) {\n\n // read input character\n\n while true {\n\n match self.readc() {\n\n Some(c) => {},\n\n None => break,\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/uart.rs", "rank": 59, "score": 10.870368642731915 }, { "content": "pub const NPROC: usize = 2;\n\npub const NCPU: usize = 8;\n\npub const STACK_SIZE: usize = 4096;\n\npub const OS_STACK_SIZE: usize = 8192;\n", "file_path": "src/param.rs", "rank": 60, "score": 9.245881430478924 }, { "content": "use rv64::csr::stvec::Stvec;\n\nuse rv64::csr::sstatus::{Sstatus, Mode};\n\nuse rv64::csr::scause::Scause;\n\nuse rv64::csr::sepc::Sepc;\n\nuse rv64::csr::sip::Sip;\n\n\n\nuse lazy_static::lazy_static;\n\nuse spin::Mutex;\n\nuse bit_field::BitField;\n\n\n\nuse crate::cpu::get_cpuid;\n\nuse crate::riscv::Interrupt;\n\nuse crate::uart::UART;\n\nuse crate::plic::{Plic, PlicContext};\n\n\n\nlazy_static! {\n\n static ref TICK: Mutex<u64> = Mutex::new(0);\n\n}\n\n\n\nextern \"C\" {\n\n fn kernelvec();\n\n}\n\n\n\n// setup to take exceptions and traps in supervisor mode\n", "file_path": "src/trap.rs", "rank": 61, "score": 8.96187998930925 }, { "content": "use volatile_register::RW;\n\nuse crate::memorylayout;\n\nuse bitflags::bitflags;\n\nuse lazy_static::lazy_static;\n\nuse spin::Mutex;\n\n\n\nlazy_static! {\n\n pub static ref UART: Mutex<Uart> = Mutex::new(Uart::new());\n\n}\n\n\n\nbitflags! {\n\n struct IerFlag: u8 {\n\n const DISABLE = 0;\n\n const RX_ENABLE = 1 << 0;\n\n const TX_ENABLE = 1 << 1;\n\n }\n\n\n\n struct LcrFlag: u8 {\n\n const LENGTH_5 = 0;\n\n const LENGTH_6 = 1;\n", "file_path": "src/uart.rs", "rank": 62, "score": 8.031520876461572 }, { "content": "pub mod addr;\n\npub mod page_flag;\n\npub mod page_table;\n", "file_path": "src/vm/mod.rs", "rank": 63, "score": 7.9838131547035545 }, { "content": " // enable transmit and receive interruit\n\n self.set_interrupt(IerFlag::RX_ENABLE | IerFlag::TX_ENABLE);\n\n }\n\n\n\n pub fn putc(&mut self, c: char) {\n\n while (self.p.lsr.read() & 0x40) == 0 {}\n\n unsafe {\n\n self.p.thr.write(c as u8);\n\n }\n\n }\n\n\n\n pub fn puts(&mut self, s: &str) {\n\n for c in s.chars() {\n\n self.putc(c);\n\n }\n\n }\n\n\n\n fn readc(&mut self) -> Option<u8> {\n\n if (self.p.lsr.read() & 0x01) != 0 {\n\n unsafe {\n", "file_path": "src/uart.rs", "rank": 64, "score": 7.840766174475716 }, { "content": "#![feature(asm)]\n\n#![feature(default_free_fn)]\n\n#![feature(alloc_error_handler)]\n\n#![feature(const_panic)]\n\n#![no_main]\n\n#![no_std]\n\n\n\n#[macro_use]\n\nextern crate alloc;\n\nextern crate rv64;\n\n\n\nmod cpu;\n\nmod kalloc;\n\nmod kvm;\n\nmod memorylayout;\n\nmod param;\n\nmod plic;\n\nmod proc;\n\nmod riscv;\n\nmod start;\n", "file_path": "src/main.rs", "rank": 65, "score": 7.703681151135265 }, { "content": " const LENGTH_7 = 2;\n\n const LENGTH_8 = 3;\n\n const DLAB = 1 << 7;\n\n }\n\n\n\n struct FcrFlag: u8 {\n\n const FIFO_ENABLE = 1 << 0;\n\n const FIFO_CLEAR_RX = 1 << 1;\n\n const FIFO_CLEAR_TX = 1 << 2;\n\n }\n\n}\n\n\n\npub struct Uart {\n\n p: &'static mut UartRegister\n\n}\n\n\n\n#[repr(C)]\n", "file_path": "src/uart.rs", "rank": 66, "score": 7.227764980596419 }, { "content": "//! the riscv Platform Level Interrupt Controller (PLIC).\n\n\n\nuse crate::cpu::get_cpuid;\n\nuse crate::memorylayout::{PLIC_BASE, PLIC_ENABLE, PLIC_THRESHOLD, PLIC_CLAIM, UART0_IRQ, VIRTIO0_IRQ};\n\nuse crate::riscv::MAX_INTERRUPT;\n\n\n\npub enum PlicContext {\n\n Machine = 0,\n\n Supervisor = 1,\n\n}\n\n\n\npub struct Plic {\n\n}\n\n\n\nimpl Plic {\n\n pub fn new() -> Self {\n\n Plic {\n\n }\n\n }\n\n\n", "file_path": "src/plic.rs", "rank": 67, "score": 7.136489095622572 }, { "content": "use crate::riscv;\n\n\n\n// qemu virt UART registers.\n\npub const UART0 : u64 = 0x1000_0000;\n\npub const UART0_IRQ : u64 = 10;\n\n\n\n// virtio mmio interface\n\npub const VIRTIO0 : u64 = 0x10001000;\n\npub const VIRTIO0_IRQ : u64 = 1;\n\n\n\n// core local interruptor (CLINT), which contains the timer\n\npub const CLINT : u64 = 0x2000000;\n\npub const CLINT_MTIME : u64 = 0x200BFF8;\n\n#[inline]\n", "file_path": "src/memorylayout.rs", "rank": 68, "score": 6.877683624262433 }, { "content": "use core::panic::PanicInfo;\n\n\n\nuse crate::param;\n\nuse crate::memorylayout;\n\nuse crate::uart::UART;\n\n\n\nuse rv64::csr::interrupt::Interrupt;\n\nuse rv64::csr::medeleg::Medeleg;\n\nuse rv64::csr::mepc::Mepc;\n\nuse rv64::csr::mhartid::Mhartid;\n\nuse rv64::csr::mideleg::Mideleg;\n\nuse rv64::csr::mie::Mie;\n\nuse rv64::csr::mscratch::Mscratch;\n\nuse rv64::csr::mstatus;\n\nuse rv64::csr::mtvec::Mtvec;\n\nuse rv64::csr::pmp::{PMPConfigMode,PMPConfigAddress,PMPAddress,PMPConfig};\n\nuse rv64::csr::satp::Satp;\n\nuse rv64::csr::sie::Sie;\n\nuse rv64::register::tp;\n\n\n", "file_path": "src/start.rs", "rank": 69, "score": 6.008275623118465 }, { "content": "use std::{env, error::Error, fs::File, io::Write, path::PathBuf};\n\n\n\nuse cc::Build;\n\n\n", "file_path": "build.rs", "rank": 70, "score": 5.583857109193872 }, { "content": "mod trap;\n\nmod uart;\n\nmod vm;\n\n\n\nuse crate::cpu::get_cpuid;\n\nuse crate::kalloc::init_heap;\n\nuse crate::kvm::{init_kvm, init_page};\n\nuse crate::plic::{init_plic, init_hartplic};\n\nuse crate::proc::init_proc;\n\nuse crate::uart::UART;\n\nuse crate::trap::{init_harttrap, intr_on, intr_off};\n\n\n\nuse linked_list_allocator::LockedHeap;\n\nuse alloc::alloc::Layout;\n\n\n\n#[no_mangle]\n", "file_path": "src/main.rs", "rank": 71, "score": 5.230571755485373 }, { "content": " Breakpoint = 3,\n\n LoadAddressMisaligned = 4,\n\n LoadAccessFault = 5,\n\n StoreAddressMisaligned = 6,\n\n StoreAccessFault = 7,\n\n EnvironmentCallUMode = 8,\n\n EnvironmentCallSMode = 9,\n\n InstructionPageFault = 12,\n\n LoadPageFault = 13,\n\n StorePageFault = 15,\n\n}\n", "file_path": "src/riscv.rs", "rank": 72, "score": 4.830436694567768 }, { "content": " config.set_config(PMPConfigMode::Write);\n\n config.set_config(PMPConfigMode::Exec);\n\n config.set_config(PMPConfigMode::Address(PMPConfigAddress::TOR));\n\n PMPConfig::write(config);\n\n\n\n // Switch to supervisor mode and jump to main\n\n unsafe { asm!(\"mret\"); }\n\n\n\n // mret will jump into kernel, should not execute to here\n\n loop {}\n\n}\n\n\n", "file_path": "src/start.rs", "rank": 73, "score": 4.699809077748207 }, { "content": "//! Physical memory layout\n\n\n\n//! qemu -machine virt is set up like this,\n\n//! based on qemu's hw/riscv/virt.c:\n\n//! https://github.com/qemu/qemu/blob/master/hw/riscv/virt.c\n\n//!\n\n//! 00001000 -- boot ROM, provided by qemu\n\n//! 02000000 -- CLINT\n\n//! 0C000000 -- PLIC\n\n//! 10000000 -- uart0\n\n//! 10001000 -- virtio disk\n\n//! 80000000 -- boot ROM jumps here in machine mode\n\n//! -kernel loads the kernel here\n\n//! unused RAM after 80000000.\n\n\n\n//! the kernel uses physical memory thus:\n\n//! 80000000 -- entry.S, then kernel text and data\n\n//! end -- start of kernel page allocation area\n\n//! PHYSTOP -- end RAM used by the kernel\n\n\n", "file_path": "src/memorylayout.rs", "rank": 74, "score": 3.68823520716834 }, { "content": " Mideleg::from_bits(0xffff).write();\n\n\n\n // Enable interrupt in supervisor mode\n\n let mut sie = Sie::from_read();\n\n sie.set_supervisor_enable(Interrupt::SoftwareInterrupt);\n\n sie.set_supervisor_enable(Interrupt::TimerInterrupt);\n\n sie.set_supervisor_enable(Interrupt::ExternalInterrupt);\n\n sie.write();\n\n\n\n // Store hart id in tp register, for cpuid()\n\n let hartid = Mhartid::from_read().bits();\n\n tp::write(hartid);\n\n\n\n init_timer();\n\n\n\n // Setup PMP so that supervisor mode can access memory\n\n PMPAddress::write(0, (!(0)) >> 10);\n\n\n\n let mut config = PMPConfig::from_bits(0);\n\n config.set_config(PMPConfigMode::Read);\n", "file_path": "src/start.rs", "rank": 75, "score": 3.390838580625133 }, { "content": "#[repr(C)]\n\nstruct UartRegister {\n\n thr: RW<u8>,\n\n ier: RW<u8>,\n\n isr: RW<u8>,\n\n lcr: RW<u8>,\n\n mcr: RW<u8>,\n\n lsr: RW<u8>,\n\n msr: RW<u8>,\n\n spr: RW<u8>,\n\n}\n\n\n\nimpl Uart {\n\n fn new() -> Self {\n\n let mut uart = Uart {\n\n p: unsafe { &mut *(memorylayout::UART0 as *mut UartRegister) },\n\n };\n\n uart.init();\n\n uart\n\n }\n\n\n", "file_path": "src/uart.rs", "rank": 76, "score": 3.040005052606529 }, { "content": "use rv64::register::tp;\n\n\n\n// Must be called with interrupts disabled,\n\n// to prevent race with process being moved\n\n// to a different CPU.\n", "file_path": "src/cpu.rs", "rank": 77, "score": 2.0956811961076234 }, { "content": " /// Do uart initialization\n\n fn init(&mut self) {\n\n // disable interrupt\n\n self.set_interrupt(IerFlag::DISABLE);\n\n\n\n unsafe {\n\n // special mode to set baud rate\n\n self.p.lcr.write(LcrFlag::DLAB.bits());\n\n\n\n // set baud rate of 38.4K\n\n self.p.thr.write(0x03);\n\n self.p.ier.write(0x0);\n\n\n\n // set word length to 8 bits, no parity\n\n self.p.lcr.write(LcrFlag::LENGTH_8.bits());\n\n\n\n // reset and enable FIFOs\n\n self.p.isr.write((FcrFlag::FIFO_ENABLE | FcrFlag::FIFO_CLEAR_RX | FcrFlag::FIFO_CLEAR_TX).bits());\n\n }\n\n\n", "file_path": "src/uart.rs", "rank": 78, "score": 2.004555470656546 } ]
Rust
src/lib.rs
duallsistemas/libduallnet
e7dd019c8fa2d98d1ecb41e28bd1b68c5ad880ae
#[cfg(target_os = "windows")] extern crate winapi; use std::io::{Error, ErrorKind}; use std::net::{SocketAddr, TcpStream, ToSocketAddrs}; use std::time::Duration; #[cfg(target_os = "windows")] use winapi::shared::minwindef::DWORD; #[cfg(target_os = "windows")] use winapi::shared::winerror::{WSAEAFNOSUPPORT, WSAETIMEDOUT}; use libc::{c_char, c_int, size_t}; use mac_address::get_mac_address; use sntp_request::{SntpRequest, SntpUnixTimeResult}; mod utils; #[no_mangle] pub unsafe extern "C" fn dn_version() -> *const c_char { concat!(env!("CARGO_PKG_VERSION"), '\0').as_ptr() as *const c_char } #[no_mangle] pub unsafe extern "C" fn dn_lookup_host( hostname: *const c_char, prefer_ipv4: bool, ip: *mut c_char, size: size_t, ) -> c_int { if hostname.is_null() || ip.is_null() || size <= 0 { return -1; } match vec![from_c_str!(hostname).unwrap(), "0"] .join(":") .to_socket_addrs() { Ok(addrs) => { for addr in addrs { if prefer_ipv4 && !addr.is_ipv4() { continue; } let mut resolved_ip = addr.to_string(); resolved_ip.truncate(resolved_ip.len() - ":0".len()); let dest_ip = to_c_str!(resolved_ip).unwrap(); copy_c_str!(dest_ip, ip, size); return 0; } -2 } Err(_) => -3, } } #[no_mangle] pub unsafe extern "C" fn dn_connection_health(ip: *const c_char, port: u16, timeout: u64) -> c_int { if ip.is_null() || port <= 0 { return -1; } match format!("{}:{}", from_c_str!(ip).unwrap(), port).parse::<SocketAddr>() { Ok(addr) => match TcpStream::connect_timeout(&addr, Duration::from_millis(timeout)) { Ok(_) => 0, Err(error) => { if error.kind() == ErrorKind::TimedOut { return -2; } -3 } }, Err(_) => -4, } } #[no_mangle] pub unsafe extern "C" fn dn_mac_address(mac_addr: *mut c_char, size: size_t) -> c_int { if mac_addr.is_null() || size <= 0 { return -1; } match get_mac_address() { Ok(Some(ma)) => { let addr = to_c_str!(ma.to_string()).unwrap(); copy_c_str!(addr, mac_addr, size); 0 } Ok(None) => -2, Err(_) => -3, } } #[no_mangle] pub unsafe extern "C" fn dn_sntp_request( addr: *const c_char, timeout: u64, timestamp: *mut i64, ) -> c_int { if timestamp.is_null() { return -1; } let sntp = SntpRequest::new(); if (timeout > 0) && !sntp.set_timeout(Duration::from_millis(timeout)).is_ok() { return -1; } let result: SntpUnixTimeResult; if addr.is_null() { result = sntp.get_unix_time(); } else { result = sntp.get_unix_time_by_addr(from_c_str!(addr).unwrap()); } if sntp.is_kiss_of_death() { return -3; } match result { Ok(ts) => { *timestamp = ts; 0 } Err(error) => { if Error::last_os_error().kind() == ErrorKind::InvalidInput { return -3; } #[cfg(target_os = "windows")] match Error::last_os_error().raw_os_error().unwrap_or(0) as DWORD { WSAETIMEDOUT => return -2, WSAEAFNOSUPPORT => return -3, _ => -4, }; match error.kind() { ErrorKind::TimedOut => -2, ErrorKind::InvalidInput => -3, _ => -4, } } } } #[cfg(test)] mod tests { use super::*; use std::thread; #[test] fn version() { unsafe { assert_eq!( from_c_str!(dn_version()).unwrap(), env!("CARGO_PKG_VERSION") ); } } #[test] fn lookup_host() { unsafe { let ip: [c_char; 45] = [0; 45]; assert_eq!( dn_lookup_host(std::ptr::null(), true, ip.as_ptr() as *mut c_char, ip.len()), -1 ); assert_eq!( dn_lookup_host( to_c_str!("::1").unwrap().as_ptr(), true, std::ptr::null_mut(), ip.len() ), -1 ); assert_eq!( dn_lookup_host( to_c_str!("::1").unwrap().as_ptr(), true, ip.as_ptr() as *mut c_char, 0 ), -1 ); assert_eq!( dn_lookup_host( to_c_str!("::1").unwrap().as_ptr(), true, ip.as_ptr() as *mut c_char, ip.len() ), -2 ); assert_eq!( dn_lookup_host( to_c_str!("abc123").unwrap().as_ptr(), false, ip.as_ptr() as *mut c_char, ip.len() ), -3 ); assert_eq!( dn_lookup_host( to_c_str!("localhost").unwrap().as_ptr(), true, ip.as_ptr() as *mut c_char, ip.len(), ), 0 ); let len = length!(ip.as_ptr()); assert_eq!(len, 9); assert_eq!( compare!( ip.as_ptr(), to_c_str!("127.0.0.1").unwrap().as_ptr(), len + 1 ), 0 ); assert_eq!( dn_lookup_host( to_c_str!("localhost").unwrap().as_ptr(), false, ip.as_ptr() as *mut c_char, ip.len(), ), 0 ); let len = length!(ip.as_ptr()); assert_eq!(len, 5); assert_eq!( compare!(ip.as_ptr(), to_c_str!("[::1]").unwrap().as_ptr(), len + 1), 0 ); } } #[test] fn connection_health() { unsafe { assert_eq!(dn_connection_health(std::ptr::null_mut(), 123, 3000), -1); assert_eq!( dn_connection_health(to_c_str!("127.0.0.1").unwrap().as_ptr(), 0, 3000), -1 ); assert_eq!( dn_connection_health(to_c_str!("54.94.220.237").unwrap().as_ptr(), 443, 3000), 0 ); } } #[test] fn mac_address() { unsafe { let mac_addr: [c_char; 18] = [0; 18]; assert_eq!(dn_mac_address(std::ptr::null_mut(), mac_addr.len()), -1); assert_eq!(dn_mac_address(mac_addr.as_ptr() as *mut c_char, 0), -1); dn_mac_address(mac_addr.as_ptr() as *mut c_char, mac_addr.len()); let len = length!(mac_addr.as_ptr()); assert_eq!(len, 17); let mac = format!("{}", get_mac_address().unwrap().unwrap()); assert_eq!( compare!(mac_addr.as_ptr(), to_c_str!(mac).unwrap().as_ptr(), len + 1), 0 ); } } #[test] fn sntp_request() { unsafe { let addr = to_c_str!("pool.ntp.org:123").unwrap().as_ptr(); let mut ts: i64 = 0; assert_eq!(dn_sntp_request(addr, 0, std::ptr::null_mut()), -1); assert_eq!( dn_sntp_request(to_c_str!("pool.ntp.org").unwrap().as_ptr(), 100, &mut ts), -3 ); assert_eq!( dn_sntp_request(to_c_str!(":123").unwrap().as_ptr(), 100, &mut ts), -3 ); assert_eq!( dn_sntp_request( to_c_str!("pool.ntp.org:321").unwrap().as_ptr(), 100, &mut ts ), if cfg!(target_os = "windows") { -2 } else { -4 } ); let mut ts1: i64 = 0; let mut ts2: i64 = 0; assert_eq!( dn_sntp_request( to_c_str!("time.google.com:123").unwrap().as_ptr(), 0, &mut ts1 ), 0 ); thread::sleep(Duration::from_secs(1)); assert_eq!( dn_sntp_request( to_c_str!("time.cloudflare.com:123").unwrap().as_ptr(), 0, &mut ts2 ), 0 ); assert!(ts2 > ts1); } } }
#[cfg(target_os = "windows")] extern crate winapi; use std::io::{Error, ErrorKind}; use std::net::{SocketAddr, TcpStream, ToSocketAddrs}; use std::time::Duration; #[cfg(target_os = "windows")] use winapi::shared::minwindef::DWORD; #[cfg(target_os = "windows")] use winapi::shared::winerror::{WSAEAFNOSUPPORT, WSAETIMEDOUT}; use libc::{c_char, c_int, size_t}; use mac_address::get_mac_address; use sntp_request::{SntpRequest, SntpUnixTimeResult}; mod utils; #[no_mangle] pub unsafe extern "C" fn dn_version() -> *const c_char { concat!(env!("CARGO_PKG_VERSION"), '\0').as_ptr() as *const c_char } #[no_mangle] pub unsafe extern "C" fn dn_lookup_host( hostname: *const c_char, prefer_ipv4: bool, ip: *mut c_char, size: size_t, ) -> c_int { if hostname.is_null() || ip.is_null() || size <= 0 { return -1; } match vec![from_c_str!(hostname).unwrap(), "0"] .join(":") .to_socket_addrs() { Ok(addrs) => { for addr in addrs { if prefer_ipv4 && !addr.is_ipv4() { continue; } let mut resolved_ip = addr.to_string(); resolved_ip.truncate(resolved_ip.len() - ":0".len()); let dest_ip = to_c_str!(resolved_ip).unwrap(); copy_c_str!(dest_ip, ip, size); return 0; } -2 } Err(_) => -3, } } #[no_mangle] pub unsafe extern "C" fn dn_connection_health(ip: *const c_char, port: u16, timeout: u64) -> c_int { if ip.is_null() || port <= 0 { return -1; } match format!("{}:{}", from_c_str!(ip).unwrap(), port).parse::<SocketAddr>() { Ok(addr) => match TcpStream::connect_timeout(&addr, Duration::from_millis(timeout)) { Ok(_) => 0, Err(error) => { if error.kind() == ErrorKind::TimedOut { return -2; } -3 } }, Err(_) => -4, } } #[no_mangle] pub unsafe extern "C" fn dn_mac_address(mac_addr: *mut c_char, size: size_t) -> c_int { if mac_addr.is_null() || size <= 0 { return -1; } match get_mac_address() { Ok(Some(ma)) => { let addr = to_c_str!(ma.to_string()).unwrap(); copy_c_str!(addr, mac_addr, size); 0 } Ok(None) => -2, Err(_) => -3, } } #[no_mangle]
#[cfg(test)] mod tests { use super::*; use std::thread; #[test] fn version() { unsafe { assert_eq!( from_c_str!(dn_version()).unwrap(), env!("CARGO_PKG_VERSION") ); } } #[test] fn lookup_host() { unsafe { let ip: [c_char; 45] = [0; 45]; assert_eq!( dn_lookup_host(std::ptr::null(), true, ip.as_ptr() as *mut c_char, ip.len()), -1 ); assert_eq!( dn_lookup_host( to_c_str!("::1").unwrap().as_ptr(), true, std::ptr::null_mut(), ip.len() ), -1 ); assert_eq!( dn_lookup_host( to_c_str!("::1").unwrap().as_ptr(), true, ip.as_ptr() as *mut c_char, 0 ), -1 ); assert_eq!( dn_lookup_host( to_c_str!("::1").unwrap().as_ptr(), true, ip.as_ptr() as *mut c_char, ip.len() ), -2 ); assert_eq!( dn_lookup_host( to_c_str!("abc123").unwrap().as_ptr(), false, ip.as_ptr() as *mut c_char, ip.len() ), -3 ); assert_eq!( dn_lookup_host( to_c_str!("localhost").unwrap().as_ptr(), true, ip.as_ptr() as *mut c_char, ip.len(), ), 0 ); let len = length!(ip.as_ptr()); assert_eq!(len, 9); assert_eq!( compare!( ip.as_ptr(), to_c_str!("127.0.0.1").unwrap().as_ptr(), len + 1 ), 0 ); assert_eq!( dn_lookup_host( to_c_str!("localhost").unwrap().as_ptr(), false, ip.as_ptr() as *mut c_char, ip.len(), ), 0 ); let len = length!(ip.as_ptr()); assert_eq!(len, 5); assert_eq!( compare!(ip.as_ptr(), to_c_str!("[::1]").unwrap().as_ptr(), len + 1), 0 ); } } #[test] fn connection_health() { unsafe { assert_eq!(dn_connection_health(std::ptr::null_mut(), 123, 3000), -1); assert_eq!( dn_connection_health(to_c_str!("127.0.0.1").unwrap().as_ptr(), 0, 3000), -1 ); assert_eq!( dn_connection_health(to_c_str!("54.94.220.237").unwrap().as_ptr(), 443, 3000), 0 ); } } #[test] fn mac_address() { unsafe { let mac_addr: [c_char; 18] = [0; 18]; assert_eq!(dn_mac_address(std::ptr::null_mut(), mac_addr.len()), -1); assert_eq!(dn_mac_address(mac_addr.as_ptr() as *mut c_char, 0), -1); dn_mac_address(mac_addr.as_ptr() as *mut c_char, mac_addr.len()); let len = length!(mac_addr.as_ptr()); assert_eq!(len, 17); let mac = format!("{}", get_mac_address().unwrap().unwrap()); assert_eq!( compare!(mac_addr.as_ptr(), to_c_str!(mac).unwrap().as_ptr(), len + 1), 0 ); } } #[test] fn sntp_request() { unsafe { let addr = to_c_str!("pool.ntp.org:123").unwrap().as_ptr(); let mut ts: i64 = 0; assert_eq!(dn_sntp_request(addr, 0, std::ptr::null_mut()), -1); assert_eq!( dn_sntp_request(to_c_str!("pool.ntp.org").unwrap().as_ptr(), 100, &mut ts), -3 ); assert_eq!( dn_sntp_request(to_c_str!(":123").unwrap().as_ptr(), 100, &mut ts), -3 ); assert_eq!( dn_sntp_request( to_c_str!("pool.ntp.org:321").unwrap().as_ptr(), 100, &mut ts ), if cfg!(target_os = "windows") { -2 } else { -4 } ); let mut ts1: i64 = 0; let mut ts2: i64 = 0; assert_eq!( dn_sntp_request( to_c_str!("time.google.com:123").unwrap().as_ptr(), 0, &mut ts1 ), 0 ); thread::sleep(Duration::from_secs(1)); assert_eq!( dn_sntp_request( to_c_str!("time.cloudflare.com:123").unwrap().as_ptr(), 0, &mut ts2 ), 0 ); assert!(ts2 > ts1); } } }
pub unsafe extern "C" fn dn_sntp_request( addr: *const c_char, timeout: u64, timestamp: *mut i64, ) -> c_int { if timestamp.is_null() { return -1; } let sntp = SntpRequest::new(); if (timeout > 0) && !sntp.set_timeout(Duration::from_millis(timeout)).is_ok() { return -1; } let result: SntpUnixTimeResult; if addr.is_null() { result = sntp.get_unix_time(); } else { result = sntp.get_unix_time_by_addr(from_c_str!(addr).unwrap()); } if sntp.is_kiss_of_death() { return -3; } match result { Ok(ts) => { *timestamp = ts; 0 } Err(error) => { if Error::last_os_error().kind() == ErrorKind::InvalidInput { return -3; } #[cfg(target_os = "windows")] match Error::last_os_error().raw_os_error().unwrap_or(0) as DWORD { WSAETIMEDOUT => return -2, WSAEAFNOSUPPORT => return -3, _ => -4, }; match error.kind() { ErrorKind::TimedOut => -2, ErrorKind::InvalidInput => -3, _ => -4, } } } }
function_block-full_function
[ { "content": " let buf = $src.to_bytes_with_nul();\n\n let mut buf_size = $size;\n\n if buf_size > buf.len() {\n\n buf_size = buf.len()\n\n }\n\n copy!(buf.as_ptr(), $dest, buf_size);\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! compare {\n\n ($a:expr,$b:expr,$size:expr) => {\n\n libc::memcmp($a as *const libc::c_void, $b as *const libc::c_void, $size)\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! length {\n\n ($cstr:expr) => {\n\n libc::strlen($cstr)\n\n };\n\n}\n", "file_path": "src/utils.rs", "rank": 0, "score": 14963.544330004745 }, { "content": " serde_plain::to_string(&$raw)\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! copy {\n\n ($src:expr,$dest:expr,$size:expr) => {\n\n libc::memcpy(\n\n $dest as *mut libc::c_void,\n\n $src as *const libc::c_void,\n\n $size,\n\n )\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! copy_c_str {\n\n ($src:expr,$dest:expr,$size:expr) => {\n", "file_path": "src/utils.rs", "rank": 1, "score": 14963.322800905846 }, { "content": "#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! from_c_str {\n\n ($cstr:expr) => {\n\n std::ffi::CStr::from_ptr($cstr).to_str()\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! to_c_str {\n\n ($string:expr) => {\n\n std::ffi::CString::new($string)\n\n };\n\n}\n\n\n\n#[doc(hidden)]\n\n#[macro_export]\n\nmacro_rules! to_string {\n\n ($raw:expr) => {\n", "file_path": "src/utils.rs", "rank": 2, "score": 14961.840503227137 }, { "content": ":::::::::::::::::::::::::::::::::::::::::::::::\n\n:: Copyright (C) 2019-2021 Duall Sistemas Ltda.\n\n:::::::::::::::::::::::::::::::::::::::::::::::\n\n\n\nset RUSTFLAGS=-Ctarget-feature=+crt-static\n\nrustup target add i686-pc-windows-msvc\n\ncargo clean\n\ncargo build --target=i686-pc-windows-msvc --release\n", "file_path": "build.bat", "rank": 20, "score": 3.4945918688020274 }, { "content": ":::::::::::::::::::::::::::::::::::::::::::\n\n:: Copyright (C) 2021 Duall Sistemas Ltda.\n\n:::::::::::::::::::::::::::::::::::::::::::\n\n\n\nset RUSTFLAGS=-Ctarget-feature=+crt-static\n\ncargo build --release\n", "file_path": "build64.bat", "rank": 22, "score": 1.6869032244140763 } ]
Rust
src/main.rs
scru128/gen_test
3590ba236f29b8323b6b5ba93c0bcc2b703df4cf
use std::env::args; use std::io; use std::io::prelude::*; use std::time::{SystemTime, UNIX_EPOCH}; const STATS_INTERVAL: u64 = 10 * 1000; fn main() { if let Some(arg) = args().nth(1) { let usage = "Usage: any-command-that-prints-identifiers-infinitely | scru128-test"; if arg == "-h" || arg == "--help" { println!("{}", usage); } else { eprintln!("{}", usage); eprintln!("Error: unknown argument: {}", arg); } return; } let stdin = io::stdin(); let mut reader = stdin.lock(); let mut buffer = String::with_capacity(32); println!( "Reading IDs from stdin and will show stats every {} seconds. Press Ctrl-C to quit.", STATS_INTERVAL / 1000 ); let mut st = Status::default(); let mut prev = Identifier::default(); while reader.read_line(&mut buffer).unwrap() > 0 { let line = buffer .strip_suffix('\n') .map_or(buffer.as_str(), |x| x.strip_suffix('\r').unwrap_or(x)); let opt = Identifier::new(line); buffer.clear(); if opt.is_some() { st.n_processed += 1; } else { eprintln!("Error: invalid string representation"); st.n_errors += 1; continue; } let e = opt.unwrap(); if e.str_value <= prev.str_value { eprintln!("Error: string representation not monotonically ordered"); st.n_errors += 1; continue; } if e.int_value <= prev.int_value { eprintln!("Error: integer representation not monotonically ordered"); st.n_errors += 1; continue; } if e.timestamp < prev.timestamp { eprintln!("Error: clock went backwards"); st.n_errors += 1; continue; } else if e.timestamp == prev.timestamp && e.counter_hi < prev.counter_hi { eprintln!("Error: counter_hi went backwards within same timestamp"); st.n_errors += 1; continue; } else if e.timestamp == prev.timestamp && e.counter_hi == prev.counter_hi && e.counter_lo <= prev.counter_lo { eprintln!( "Error: counter_lo not monotonically ordered within same timestamp and counter_hi" ); st.n_errors += 1; continue; } if st.ts_first == 0 { st.ts_first = e.timestamp; } st.ts_last = e.timestamp; count_set_bits_by_pos(&mut st.n_ones_by_bit_entropy, e.entropy); if e.counter_lo != prev.counter_lo + 1 && !(e.counter_lo == 0 && prev.counter_lo == 0xff_ffff) { if st.ts_last_counter_lo_update > 0 { st.n_counter_lo_update += 1; st.sum_intervals_counter_lo_update += e.timestamp - st.ts_last_counter_lo_update; } st.ts_last_counter_lo_update = e.timestamp; count_set_bits_by_pos(&mut st.n_ones_by_bit_counter_lo, e.counter_lo); } if e.counter_hi == prev.counter_hi + 1 && e.timestamp == prev.timestamp && e.counter_lo == 0 && prev.counter_lo == 0xff_ffff { st.n_counter_hi_increment += 1; } else if e.counter_hi != prev.counter_hi { if st.ts_last_counter_hi_update > 0 { st.n_counter_hi_update += 1; st.sum_intervals_counter_hi_update += e.timestamp - st.ts_last_counter_hi_update; } st.ts_last_counter_hi_update = e.timestamp; count_set_bits_by_pos(&mut st.n_ones_by_bit_counter_hi, e.counter_hi); } if e.timestamp > st.ts_last_stats_print + STATS_INTERVAL { if st.ts_last_stats_print > 0 { st.print().unwrap(); } st.ts_last_stats_print = e.timestamp; } prev = e; } if st.n_processed > 0 { st.print().unwrap(); } else { eprintln!("Error: no valid ID processed"); } } #[derive(Debug, Default)] struct Status { n_processed: usize, n_errors: usize, ts_first: u64, ts_last: u64, n_ones_by_bit_entropy: [usize; 32], n_counter_lo_update: usize, ts_last_counter_lo_update: u64, sum_intervals_counter_lo_update: u64, n_ones_by_bit_counter_lo: [usize; 24], n_counter_hi_increment: usize, n_counter_hi_update: usize, ts_last_counter_hi_update: u64, sum_intervals_counter_hi_update: u64, n_ones_by_bit_counter_hi: [usize; 24], ts_last_stats_print: u64, } impl Status { fn print(&self) -> Result<(), io::Error> { let time_elapsed = self.ts_last - self.ts_first; let mut buf = io::BufWriter::new(io::stdout()); writeln!(buf)?; writeln!(buf, "{:<52} {:>8} {:>12}", "STAT", "EXPECTED", "ACTUAL")?; writeln!( buf, "{:<52} {:>8} {:>12.1}", "Seconds from first input ID to last (sec)", "NA", time_elapsed as f64 / 1000.0 )?; writeln!( buf, "{:<52} {:>8} {:>12}", "Number of valid IDs processed", "NA", self.n_processed )?; writeln!( buf, "{:<52} {:>8} {:>12}", "Number of invalid IDs skipped", 0, self.n_errors )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Mean number of IDs per millisecond", "NA", self.n_processed as f64 / time_elapsed as f64 )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Current time less timestamp of last ID (sec)", "~0", get_current_time() - (self.ts_last as f64) / 1000.0 )?; writeln!( buf, "{:<52} {:>8} {:>12}", "Number of counter_hi increments", "Few", self.n_counter_hi_increment )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Mean interval of counter_hi updates (msec)", "~1000", self.sum_intervals_counter_hi_update as f64 / self.n_counter_hi_update as f64 )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Mean interval of counter_lo updates (msec)", "~1", self.sum_intervals_counter_lo_update as f64 / self.n_counter_lo_update as f64 )?; writeln!( buf, "{:<52} {:>8} {:>12}", "1/0 ratio by bit of counter_hi at reset (min-max)", "~0.500", summarize_n_set_bits_by_pos( &self.n_ones_by_bit_counter_hi, self.n_counter_hi_update + 1 ) )?; writeln!( buf, "{:<52} {:>8} {:>12}", "1/0 ratio by bit of counter_lo at reset (min-max)", "~0.500", summarize_n_set_bits_by_pos( &self.n_ones_by_bit_counter_lo, self.n_counter_lo_update + 1 ) )?; writeln!( buf, "{:<52} {:>8} {:>12}", "1/0 ratio by bit of entropy (min-max)", "~0.500", summarize_n_set_bits_by_pos(&self.n_ones_by_bit_entropy, self.n_processed) )?; Ok(()) } } #[derive(Clone, Eq, PartialEq, Hash, Debug, Default)] struct Identifier { str_value: [u8; 25], int_value: u128, timestamp: u64, counter_hi: u32, counter_lo: u32, entropy: u32, } impl Identifier { fn new(str_value: &str) -> Option<Self> { const DECODE_MAP: [u8; 256] = [ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, ]; if str_value.len() != 25 { return None; } let mut fixed_str = [0; 25]; let bs = str_value.as_bytes(); let mut int_value = 0u128; for i in 0..25 { fixed_str[i] = bs[i]; let n = DECODE_MAP[bs[i] as usize] as u128; if n == 0xff { return None; } int_value = int_value.checked_mul(36)?.checked_add(n)?; } Some(Self { str_value: fixed_str, int_value, timestamp: (int_value >> 80) as u64, counter_hi: ((int_value >> 56) & 0xff_ffff) as u32, counter_lo: ((int_value >> 32) & 0xff_ffff) as u32, entropy: (int_value & 0xffff_ffff) as u32, }) } } fn get_current_time() -> f64 { SystemTime::now() .duration_since(UNIX_EPOCH) .expect("clock may have gone backwards") .as_secs_f64() } #[allow(unused_mut)] fn count_set_bits_by_pos(counts: &mut [usize], mut n: u32) { #[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] let mut n: usize = n as usize; for i in 0..counts.len() { counts[counts.len() - 1 - i] += (n & 1) as usize; n >>= 1; } } fn summarize_n_set_bits_by_pos(counts: &[usize], n_samples: usize) -> String { let mut min = 1.0; let mut max = 0.0; for e in counts { let p = *e as f64 / n_samples as f64; if p < min { min = p; } if p > max { max = p; } } format!("{:.3}-{:.3}", min, max) }
use std::env::args; use std::io; use std::io::prelude::*; use std::time::{SystemTime, UNIX_EPOCH}; const STATS_INTERVAL: u64 = 10 * 1000; fn main() { if let Some(arg) = args().nth(1) { let usage = "Usage: any-command-that-prints-identifiers-infinitely | scru128-test"; if arg == "-h" || arg == "--help" { println!("{}", usage); } else { eprintln!("{}", usage); eprintln!("Error: unknown argument: {}", arg); } return; } let stdin = io::stdin(); let mut reader = stdin.lock(); let mut buffer = String::with_capacity(32); println!( "Reading IDs from stdin and will show stats every {} seconds. Press Ctrl-C to quit.", STATS_INTERVAL / 1000 ); let mut st = Status::default(); let mut prev = Identifier::default(); while reader.read_line(&mut buffer).unwrap() > 0 { let line = buffer .strip_suffix('\n') .map_or(buffer.as_str(), |x| x.strip_suffix('\r').unwrap_or(x)); let opt = Identifier::new(line); buffer.clear(); if opt.is_some() { st.n_processed += 1; } else { eprintln!("Error: invalid string representation"); st.n_errors += 1; continue; } let e = opt.unwrap(); if e.str_value <= prev.str_value { eprintln!("Error: string representation not monotonically ordered"); st.n_errors += 1; continue; } if e.int_value <= prev.int_value { eprintln!("Error: integer representation not monotonically ordered"); st.n_errors += 1; continue; } if e.timestamp < prev.timestamp { eprintln!("Error: clock went backwards"); st.n_errors += 1; continue; } else if e.timestamp == prev.timestamp && e.counter_hi < prev.counter_hi { eprintln!("Error: counter_hi went backwards within same timestamp"); st.n_errors += 1; continue; } else if e.timestamp == prev.timestamp && e.counter_hi == prev.counter_hi && e.counter_lo <= prev.counter_lo { eprintln!( "Error: counter_lo not monotonically ordered within same timestamp and counter_hi" ); st.n_errors += 1; continue; } if st.ts_first == 0 { st.ts_first = e.timestamp; } st.ts_last = e.timestamp; count_set_bits_by_pos(&mut st.n_ones_by_bit_entropy, e.entropy); if e.counter_lo != prev.counter_lo + 1 && !(e.counter_lo == 0 && prev.counter_lo == 0xff_ffff) { if st.ts_last_counter_lo_update > 0 { st.n_counter_lo_update += 1; st.sum_intervals_counter_lo_update += e.timestamp - st.ts_last_counter_lo_update; } st.ts_last_counter_lo_update = e.timestamp; count_set_bits_by_pos(&mut st.n_ones_by_bit_counter_lo, e.counter_lo); } if e.counter_hi == prev.counter_hi + 1 && e.timestamp == prev.timestamp && e.counter_lo == 0 && prev.counter_lo == 0xff_ffff { st.n_counter_hi_increment += 1; } else if e.counter_hi != prev.counter_hi { if st.ts_last_counter_hi_update > 0 { st.n_counter_hi_update += 1; st.sum_intervals_counter_hi_update += e.timestamp - st.ts_last_counter_hi_update; } st.ts_last_counter_hi_update = e.timestamp; count_set_bits_by_pos(&mut st.n_ones_by_bit_counter_hi, e.counter_hi); } if e.timestamp > st.ts_last_stats_print + STATS_INTERVAL { if st.ts_last_stats_print > 0 { st.print().unwrap(); } st.ts_last_stats_print = e.timestamp; } prev = e; } if st.n_processed > 0 { st.print().unwrap(); } else { eprintln!("Error: no valid ID processed"); } } #[derive(Debug, Default)] struct Status { n_processed: usize, n_errors: usize, ts_first: u64, ts_last: u64, n_ones_by_bit_entropy: [usize; 32], n_counter_lo_update: usize, ts_last_counter_lo_update: u64, sum_intervals_counter_lo_update: u64, n_ones_by_bit_counter_lo: [usize; 24], n_counter_hi_increment: usize, n_counter_hi_update: usize, ts_last_counter_hi_update: u64, sum_intervals_counter_hi_update: u64, n_ones_by_bit_counter_hi: [usize; 24], ts_last_stats_print: u64, } impl Status { fn print(&self) -> Result<(), io::Error> { let time_elapsed = self.ts_last - self.ts_first; let mut buf = io::BufWriter::new(io::stdout()); writeln!(buf)?; writeln!(buf, "{:<52} {:>8} {:>12}", "STAT", "EXPECTED", "ACTUAL")?; writeln!( buf, "{:<52} {:>8} {:>12.1}", "Seconds from first input ID to last (sec)", "NA", time_elapsed as f64 / 1000.0 )?; writeln!( buf, "{:<52} {:>8} {:>12}", "Number of valid IDs processed", "NA", self.n_processed )?; writeln!( buf, "{:<52} {:>8} {:>12}", "Number of invalid IDs skipped", 0, self.n_errors )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Mean number of IDs per millisecond", "NA", self.n_processed as f64 / time_elapsed as f64 )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Current time less timestamp of last ID (sec)", "~0", get_current_time() - (self.ts_last as f64) / 1000.0 )?; writeln!( buf, "{:<52} {:>8} {:>12}", "Number of counter_hi increments", "Few", self.n_counter_hi_increment )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Mean interval of counter_hi updates (msec)", "~1000", self.sum_intervals_counter_hi_update as f64 / self.n_counter_hi_update as f64 )?; writeln!( buf, "{:<52} {:>8} {:>12.3}", "Mean interval of counter_lo updates (msec)", "~1", self.sum_intervals_counter_lo_update as f64 / self.n_counter_lo_update as f64 )?; writeln!( buf, "{:<52} {:>8} {:>12}", "1/0 ratio by bit of counter_hi at reset (min-max)", "~0.500", summarize_n_set_bits_by_pos( &self.n_ones_by_bit_counter_hi, self.n_counter_hi_update + 1 ) )?; writeln!( buf, "{:<52} {:>8} {:>12}", "1/0 ratio by bit of counter_lo at reset (min-max)", "~0.500", summarize_n_set_bits_by_pos( &self.n_ones_by_bit_counter_lo, self.n_counter_lo_update + 1 ) )?; writeln!( buf, "{:<52} {:>8} {:>12}", "1/0 ratio by bit of entropy (min-max)", "~0.500", summarize_n_set_bits_by_pos(&self.n_ones_by_bit_entropy, self.n_processed) )?; Ok(()) } } #[derive(Clone, Eq, PartialEq, Hash, Debug, Default)] struct Identifier { str_value: [u8; 25], int_value: u128, timestamp: u64, counter_hi: u32, counter_lo: u32, entropy: u32, } impl Identifier { fn new(str_value: &str) -> Option<Self> { const DECODE_MAP: [u8; 256] = [ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x0a, 0
timestamp: (int_value >> 80) as u64, counter_hi: ((int_value >> 56) & 0xff_ffff) as u32, counter_lo: ((int_value >> 32) & 0xff_ffff) as u32, entropy: (int_value & 0xffff_ffff) as u32, }) } } fn get_current_time() -> f64 { SystemTime::now() .duration_since(UNIX_EPOCH) .expect("clock may have gone backwards") .as_secs_f64() } #[allow(unused_mut)] fn count_set_bits_by_pos(counts: &mut [usize], mut n: u32) { #[cfg(any(target_pointer_width = "32", target_pointer_width = "64"))] let mut n: usize = n as usize; for i in 0..counts.len() { counts[counts.len() - 1 - i] += (n & 1) as usize; n >>= 1; } } fn summarize_n_set_bits_by_pos(counts: &[usize], n_samples: usize) -> String { let mut min = 1.0; let mut max = 0.0; for e in counts { let p = *e as f64 / n_samples as f64; if p < min { min = p; } if p > max { max = p; } } format!("{:.3}-{:.3}", min, max) }
x0b, 0x0c, 0x0d, 0x0e, 0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, ]; if str_value.len() != 25 { return None; } let mut fixed_str = [0; 25]; let bs = str_value.as_bytes(); let mut int_value = 0u128; for i in 0..25 { fixed_str[i] = bs[i]; let n = DECODE_MAP[bs[i] as usize] as u128; if n == 0xff { return None; } int_value = int_value.checked_mul(36)?.checked_add(n)?; } Some(Self { str_value: fixed_str, int_value,
function_block-random_span
[ { "content": "# SCRU128 Generator Tester\n\n\n\n[![GitHub tag](https://img.shields.io/github/v/tag/scru128/gen_test)](https://github.com/scru128/gen_test)\n\n[![License](https://img.shields.io/github/license/scru128/gen_test)](https://github.com/scru128/gen_test/blob/main/LICENSE)\n\n\n\nA command-line SCRU128 tester that tests if a generator generates monotonically\n\nordered IDs, sets up-to-date timestamps, fills randomness bits with random\n\nnumbers, resets the per-second randomness field every second, and so on.\n\n\n\n## Usage\n\n\n\n```bash\n\nany-command-that-prints-identifiers-infinitely | scru128-test\n\n```\n\n\n\n## Installation\n\n\n\n[Install Rust](https://www.rust-lang.org/tools/install) and build from source:\n\n\n\n```bash\n\ncargo install --git https://github.com/scru128/gen_test.git\n\n```\n\n\n\n## License\n\n\n\nCopyright 2021-2022 LiosK\n\n\n\nLicensed under the Apache License, Version 2.0.\n\n\n\n## See also\n\n\n\n- [SCRU128 Specification](https://github.com/scru128/spec)\n", "file_path": "README.md", "rank": 19, "score": 6.077534232924799 } ]
Rust
src/runner.rs
sile/hone
460e0bd51455fa479e8487fb54d6f9ba8d9a8cf3
use self::command::CommandRunner; use self::tempdir::TempDirs; use crate::event::{Event, EventReader, EventWriter}; use crate::metric::MetricInstance; use crate::param::{ParamInstance, ParamValue}; use crate::rpc; use crate::study::StudySpec; use crate::trial::{Observation, ObservationId, TrialId}; use crate::tuners::{Action, Tune, Tuner}; use crate::types::Scope; use std::io::{BufRead, Write}; use std::num::NonZeroUsize; use std::path::PathBuf; use std::time::{Duration, Instant}; mod command; mod loader; mod tempdir; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct StudyRunnerOpt { pub study: StudySpec, pub workers: NonZeroUsize, pub repeat: Option<usize>, } #[derive(Debug)] pub struct StudyRunner<W> { output: EventWriter<W>, runnings: Vec<CommandRunner>, next_obs_id: ObservationId, next_trial_id: TrialId, rpc_channel: rpc::Channel, tuner: Tuner, opt: StudyRunnerOpt, start_time: Instant, elapsed_offset: Duration, tempdirs: TempDirs, terminating: bool, } impl<W: Write> StudyRunner<W> { pub fn new(output: W, opt: StudyRunnerOpt) -> anyhow::Result<Self> { let tuner = opt.study.tuner.build()?; let rpc_channel = rpc::spawn_rpc_server()?; let mut output = EventWriter::new(output); output.write(Event::study_started())?; output.write(Event::study_defined(opt.study.clone()))?; Ok(Self { output, runnings: Vec::new(), rpc_channel, next_obs_id: ObservationId::new(0), next_trial_id: TrialId::new(0), tuner, opt, start_time: Instant::now(), tempdirs: TempDirs::new(), elapsed_offset: Duration::new(0, 0), terminating: false, }) } pub fn load_study<R: BufRead>(&mut self, reader: EventReader<R>) -> anyhow::Result<()> { let mut loader = self::loader::StudyLoader::new(self); loader.load(reader) } pub fn run(mut self) -> anyhow::Result<()> { self.start_time = Instant::now(); let mut finished_count = 0; let mut did_nothing; while self.opt.repeat.map_or(true, |n| finished_count < n) { did_nothing = true; while self.runnings.len() < self.opt.workers.get() && !self.terminating { let action = self.tuner.next_action(); let waiting = matches!(action, Some(Action::WaitObservations)); self.handle_action(action)?; if waiting { break; } else { did_nothing = false; } } while let Some(message) = self.rpc_channel.try_recv() { self.handle_message(message)?; did_nothing = false; } let mut i = 0; while i < self.runnings.len() { if self.runnings[i].is_exited()? { finished_count += 1; let obs = self.runnings.swap_remove(i).into_obs(); self.tell_finished_obs(obs, self.start_time.elapsed())?; did_nothing = false; } else { i += 1; } } if did_nothing { std::thread::sleep(Duration::from_millis(1)); } } Ok(()) } fn tell_finished_obs(&mut self, obs: Observation, elapsed: Duration) -> anyhow::Result<()> { self.tuner.tell(&obs)?; self.finish_obs(obs, elapsed)?; Ok(()) } fn start_obs(&mut self, obs: Observation) -> anyhow::Result<()> { self.output.write(Event::observation_started( obs.id, obs.trial_id, self.elapsed_offset + self.start_time.elapsed(), ))?; self.runnings.push(CommandRunner::spawn( &self.opt.study, obs, self.rpc_channel.server_addr, )?); Ok(()) } fn finish_obs(&mut self, obs: Observation, elapsed: Duration) -> anyhow::Result<()> { let elapsed = self.elapsed_offset + elapsed; self.tempdirs.remove_obs_tempdir(obs.id); self.output .write(Event::observation_finished(obs, elapsed))?; Ok(()) } fn start_trial(&mut self, trial_id: TrialId) -> anyhow::Result<()> { self.output.write(Event::trial_started(trial_id))?; Ok(()) } fn finish_trial(&mut self, trial_id: TrialId) -> anyhow::Result<()> { self.tempdirs.remove_trial_tempdir(trial_id); self.output.write(Event::trial_finished(trial_id))?; Ok(()) } fn handle_action(&mut self, action: Option<Action>) -> anyhow::Result<()> { match action { None => { let obs = Observation::new( self.next_obs_id.fetch_and_increment(), self.next_trial_id.fetch_and_increment(), ); self.start_trial(obs.trial_id)?; self.start_obs(obs)?; } Some(Action::ResumeTrial { trial_id }) => { let obs = Observation::new(self.next_obs_id.fetch_and_increment(), trial_id); self.start_obs(obs)?; } Some(Action::FinishTrial { trial_id }) => { self.finish_trial(trial_id)?; } Some(Action::WaitObservations) => {} Some(Action::QuitOptimization) => { self.terminating = true; for worker in &mut self.runnings { worker.kill()?; } } } Ok(()) } fn handle_message(&mut self, message: rpc::Message) -> anyhow::Result<()> { match message { rpc::Message::Ask { req, reply } => { let value = self.handle_ask(req)?; reply.send(value)?; } rpc::Message::Tell { req, reply } => { self.handle_tell(req)?; reply.send(())?; } rpc::Message::Mktemp { req, reply } => { let path = self.handle_mktemp(req)?; reply.send(path)?; } } Ok(()) } fn handle_mktemp(&mut self, req: rpc::MktempReq) -> anyhow::Result<PathBuf> { match req.scope { Scope::Study => self.tempdirs.create_study_tempdir(req.parent.as_ref()), Scope::Trial => { let trial_id = self .runnings .iter() .find(|o| o.obs().id == req.observation_id) .ok_or_else(|| { anyhow::anyhow!("unknown observation: {:?}", req.observation_id) })? .obs() .trial_id; self.tempdirs .create_trial_tempdir(trial_id, req.parent.as_ref()) } Scope::Observation => self .tempdirs .create_obs_tempdir(req.observation_id, req.parent.as_ref()), } } fn handle_ask(&mut self, req: rpc::AskReq) -> anyhow::Result<ParamValue> { let obs = self .runnings .iter_mut() .find(|o| o.obs().id == req.observation_id) .ok_or_else(|| anyhow::anyhow!("unknown observation_id {}", req.observation_id.get()))? .obs_mut(); if let Some(instance) = obs.params.get(&req.param_name) { Ok(instance.value.clone()) } else { let value = self.tuner.ask(obs, &req.param_name, &req.param_type)?; obs.params.insert( req.param_name, ParamInstance::new(req.param_type, value.clone()), ); Ok(value) } } fn handle_tell(&mut self, req: rpc::TellReq) -> anyhow::Result<()> { let obs = self .runnings .iter_mut() .find(|o| o.obs().id == req.observation_id) .ok_or_else(|| anyhow::anyhow!("unknown observation_id {}", req.observation_id.get()))? .obs_mut(); obs.metrics.insert( req.metric_name, MetricInstance::new(req.metric_type, req.metric_value), ); Ok(()) } }
use self::command::CommandRunner; use self::tempdir::TempDirs; use crate::event::{Event, EventReader, EventWriter}; use crate::metric::MetricInstance; use crate::param::{ParamInstance, ParamValue}; use crate::rpc; use crate::study::StudySpec; use crate::trial::{Observation, ObservationId, TrialId}; use crate::tuners::{Action, Tune, Tuner}; use crate::types::Scope; use std::io::{BufRead, Write}; use std::num::NonZeroUsize; use std::path::PathBuf; use std::time::{Duration, Instant}; mod command; mod loader; mod tempdir; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] pub struct StudyRunnerOpt { pub study: StudySpec, pub workers: NonZeroUsize, pub repeat: Option<usize>, } #[derive(Debug)] pub struct StudyRunner<W> { output: EventWriter<W>, runnings: Vec<CommandRunner>, next_obs_id: ObservationId, next_trial_id: TrialId, rpc_channel: rpc::Channel, tuner: Tuner, opt: StudyRunnerOpt, start_time: Instant, elapsed_offset: Duration, tempdirs: TempDirs, terminating: bool, } impl<W: Write> StudyRunner<W> { pub fn new(output: W, opt: StudyRunnerOpt) -> anyhow::Result<Self> { let tuner = opt.study.tuner.build()?; let rpc_channel = rpc::spawn_rpc_server()?; let mut output = EventWriter::new(output); output.write(Event::study_started())?; output.write(Event::study_defined(opt.study.clone()))?; Ok(Self { output, runnings: Vec::new(), rpc_channel, next_obs_id: ObservationId::new(0), next_trial_id: TrialId::new(0), tuner, opt, start_time: Instant::now(), tempdirs: TempDirs::new(), elapsed_offset: Duration::new(0, 0), terminating: false, }) } pub fn load_study<R: BufRead>(&mut self, reader: EventReader<R>) -> anyhow::Result<()> { let mut loader = self::loader::StudyLoader::new(self); loader.load(reader) } pub fn run(mut self) -> anyhow::Result<()> { self.start_time = Instant::now(); let mut finished_count = 0; let mut did_nothing; while self.opt.repeat.map_or(true, |n| finished_count < n) { did_nothing = true; while self.runnings.len() < self.opt.workers.get() && !self.terminating { let action = self.tuner.next_action(); let waiting = matches!(action, Some(Action::WaitObservations)); self.handle_action(action)?; if waiting { break; } else { did_nothing = false; } } while let Some(message) = self.rpc_channel.try_recv() { self.handle_message(message)?; did_nothing = false; } let mut i = 0; while i < self.runnings.len() { if self.runnings[i].is_exited()? { finished_count += 1; let obs = self.runnings.swap_remove(i).into_obs(); self.tell_finished_obs(obs, self.start_time.elapsed())?; did_nothing = false; } else { i += 1; } } if did_nothing { std::thread::sleep(Duration::from_millis(1)); } } Ok(()) } fn tell_finished_obs(&mut self, obs: Observation, elapsed: Duration) -> anyhow::Result<()> { self.tuner.tell(&obs)?; self.finish_obs(obs, elapsed)?; Ok(()) } fn start_obs(&mut self, obs: Observation) -> anyhow::Result<()> { self.output.write(Event::observation_started( obs.id, obs.trial_id, self.elapsed_offset + self.start_time.elapsed(), ))?; self.runnings.push(CommandRunner::spawn( &self.opt.study, obs, self.rpc_channel.server_addr, )?); Ok(()) } fn finish_obs(&mut self, obs: Observation, elapsed: Duration) -> anyhow::Result<()> { let elapsed = self.elapsed_offset + elapsed; self.tempdirs.remove_obs_tempdir(obs.id); self.output .write(Event::observation_finished(obs, elapsed))?; Ok(()) } fn start_trial(&mut self, trial_id: TrialId) -> anyhow::Result<()> { self.output.write(Event::trial_started(trial_id))?; Ok(()) } fn finish_trial(&mut self, trial_id: TrialId) -> anyhow::Result<()> { self.tempdirs.remove_trial_tempdir(trial_id); self.output.write(Event::trial_finished(trial_id))?; Ok(()) }
fn handle_message(&mut self, message: rpc::Message) -> anyhow::Result<()> { match message { rpc::Message::Ask { req, reply } => { let value = self.handle_ask(req)?; reply.send(value)?; } rpc::Message::Tell { req, reply } => { self.handle_tell(req)?; reply.send(())?; } rpc::Message::Mktemp { req, reply } => { let path = self.handle_mktemp(req)?; reply.send(path)?; } } Ok(()) } fn handle_mktemp(&mut self, req: rpc::MktempReq) -> anyhow::Result<PathBuf> { match req.scope { Scope::Study => self.tempdirs.create_study_tempdir(req.parent.as_ref()), Scope::Trial => { let trial_id = self .runnings .iter() .find(|o| o.obs().id == req.observation_id) .ok_or_else(|| { anyhow::anyhow!("unknown observation: {:?}", req.observation_id) })? .obs() .trial_id; self.tempdirs .create_trial_tempdir(trial_id, req.parent.as_ref()) } Scope::Observation => self .tempdirs .create_obs_tempdir(req.observation_id, req.parent.as_ref()), } } fn handle_ask(&mut self, req: rpc::AskReq) -> anyhow::Result<ParamValue> { let obs = self .runnings .iter_mut() .find(|o| o.obs().id == req.observation_id) .ok_or_else(|| anyhow::anyhow!("unknown observation_id {}", req.observation_id.get()))? .obs_mut(); if let Some(instance) = obs.params.get(&req.param_name) { Ok(instance.value.clone()) } else { let value = self.tuner.ask(obs, &req.param_name, &req.param_type)?; obs.params.insert( req.param_name, ParamInstance::new(req.param_type, value.clone()), ); Ok(value) } } fn handle_tell(&mut self, req: rpc::TellReq) -> anyhow::Result<()> { let obs = self .runnings .iter_mut() .find(|o| o.obs().id == req.observation_id) .ok_or_else(|| anyhow::anyhow!("unknown observation_id {}", req.observation_id.get()))? .obs_mut(); obs.metrics.insert( req.metric_name, MetricInstance::new(req.metric_type, req.metric_value), ); Ok(()) } }
fn handle_action(&mut self, action: Option<Action>) -> anyhow::Result<()> { match action { None => { let obs = Observation::new( self.next_obs_id.fetch_and_increment(), self.next_trial_id.fetch_and_increment(), ); self.start_trial(obs.trial_id)?; self.start_obs(obs)?; } Some(Action::ResumeTrial { trial_id }) => { let obs = Observation::new(self.next_obs_id.fetch_and_increment(), trial_id); self.start_obs(obs)?; } Some(Action::FinishTrial { trial_id }) => { self.finish_trial(trial_id)?; } Some(Action::WaitObservations) => {} Some(Action::QuitOptimization) => { self.terminating = true; for worker in &mut self.runnings { worker.kill()?; } } } Ok(()) }
function_block-full_function
[ { "content": "pub trait Tune {\n\n fn ask(\n\n &mut self,\n\n obs: &Observation,\n\n param_name: &ParamName,\n\n param_type: &ParamType,\n\n ) -> anyhow::Result<ParamValue>;\n\n\n\n fn tell(&mut self, obs: &Observation) -> anyhow::Result<()>;\n\n\n\n fn next_action(&mut self) -> Option<Action>;\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Action {\n\n ResumeTrial { trial_id: TrialId },\n\n FinishTrial { trial_id: TrialId },\n\n WaitObservations,\n\n QuitOptimization,\n\n}\n", "file_path": "src/tuners.rs", "rank": 0, "score": 101403.3527435065 }, { "content": "pub fn get_observation_id() -> Result<ObservationId, EnvVarError> {\n\n let value = std::env::var(KEY_OBSERVATION_ID)\n\n .map_err(|e| EnvVarError::from_var_error(KEY_OBSERVATION_ID, e))?;\n\n let id = value\n\n .parse()\n\n .map_err(|e| EnvVarError::from_other_error(KEY_OBSERVATION_ID, e))?;\n\n Ok(ObservationId::new(id))\n\n}\n\n\n\n#[derive(Debug, thiserror::Error)]\n\npub enum EnvVarError {\n\n #[error(\"the environment variable {key:?} is not found\")]\n\n NotFound { key: &'static str },\n\n\n\n #[error(\"the environment variable {key:?} contains an invalid value: {source}\")]\n\n Other {\n\n key: &'static str,\n\n source: anyhow::Error,\n\n },\n\n}\n", "file_path": "src/envvar.rs", "rank": 1, "score": 90329.66637633434 }, { "content": "#[derive(Debug)]\n\nstruct FailedObservation {\n\n obs: Observation,\n\n retried_count: usize,\n\n}\n", "file_path": "src/tuners/retry.rs", "rank": 2, "score": 88019.09904161353 }, { "content": "pub fn init() {\n\n fibers_global::set_thread_count(1);\n\n}\n\n\n", "file_path": "src/rpc.rs", "rank": 3, "score": 72014.98093113057 }, { "content": "pub fn get_trial_id() -> Result<TrialId, EnvVarError> {\n\n let value =\n\n std::env::var(KEY_TRIAL_ID).map_err(|e| EnvVarError::from_var_error(KEY_TRIAL_ID, e))?;\n\n let id = value\n\n .parse()\n\n .map_err(|e| EnvVarError::from_other_error(KEY_TRIAL_ID, e))?;\n\n Ok(TrialId::new(id))\n\n}\n\n\n", "file_path": "src/envvar.rs", "rank": 4, "score": 64249.44102622669 }, { "content": "// TODO:\n\npub fn spawn_rpc_server() -> anyhow::Result<Channel> {\n\n let mut builder = ServerBuilder::new(SocketAddr::from(([127, 0, 0, 1], 0)));\n\n let (tx, rx) = fibers::sync::mpsc::channel();\n\n builder.add_call_handler(AskHandler { tx: tx.clone() });\n\n builder.add_call_handler(TellHandler { tx: tx.clone() });\n\n builder.add_call_handler(MktempHandler { tx: tx.clone() });\n\n let server = builder.finish(fibers_global::handle());\n\n let (server, addr) = fibers_global::execute(server.local_addr())?;\n\n fibers_global::spawn(server.map_err(|e| panic!(\"{}\", e)));\n\n\n\n Ok(Channel {\n\n rx,\n\n server_addr: addr,\n\n })\n\n}\n", "file_path": "src/rpc.rs", "rank": 5, "score": 56758.76395288007 }, { "content": "use crate::attr::Attr;\n\nuse crate::event::EventReader;\n\nuse crate::runner::{StudyRunner, StudyRunnerOpt};\n\nuse crate::study::{CommandSpec, StudySpec};\n\nuse crate::tuners::TunerSpec;\n\nuse anyhow::Context;\n\nuse std::io::{BufReader, Write};\n\nuse std::num::NonZeroUsize;\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub struct RunOpt {\n\n #[structopt(long = \"name\")]\n\n pub study_name: Option<String>,\n\n\n\n #[structopt(long)]\n\n pub study_attrs: Vec<Attr>,\n\n\n", "file_path": "src/commands/run.rs", "rank": 6, "score": 52576.07918247145 }, { "content": " repeat: self.repeat,\n\n };\n\n\n\n let stdout = std::io::stdout();\n\n let runner = StudyRunner::new(stdout.lock(), opt)?;\n\n self.load_then_run(runner)\n\n }\n\n\n\n fn load_then_run<W: Write>(&self, mut runner: StudyRunner<W>) -> anyhow::Result<()> {\n\n for path in &self.load {\n\n self.load(&mut runner, path)\n\n .with_context(|| format!(\"Cannot load a study: path={:?}\", path))?;\n\n }\n\n runner.run()\n\n }\n\n\n\n fn load<W: Write>(&self, runner: &mut StudyRunner<W>, path: &PathBuf) -> anyhow::Result<()> {\n\n let file = std::fs::File::open(path)?;\n\n runner.load_study(EventReader::new(BufReader::new(file)))?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/commands/run.rs", "rank": 7, "score": 52574.005148125 }, { "content": " #[structopt(long, default_value = \"1\")]\n\n pub workers: NonZeroUsize,\n\n\n\n #[structopt(long, short = \"n\")]\n\n pub repeat: Option<usize>,\n\n\n\n #[structopt(long)]\n\n pub load: Vec<PathBuf>,\n\n\n\n #[structopt(long, parse(try_from_str = crate::json::parse_json))]\n\n pub tuner: Option<TunerSpec>,\n\n\n\n pub command: PathBuf,\n\n pub args: Vec<String>,\n\n}\n\n\n\nimpl RunOpt {\n\n pub fn run(&self) -> anyhow::Result<()> {\n\n let command = CommandSpec {\n\n path: self.command.clone(),\n", "file_path": "src/commands/run.rs", "rank": 8, "score": 52572.293674966786 }, { "content": " args: self.args.clone(),\n\n };\n\n let study = StudySpec {\n\n name: self\n\n .study_name\n\n .clone()\n\n .unwrap_or_else(|| uuid::Uuid::new_v4().to_string()),\n\n id: uuid::Uuid::new_v4(),\n\n attrs: self\n\n .study_attrs\n\n .iter()\n\n .cloned()\n\n .map(|a| (a.key, a.value))\n\n .collect(),\n\n tuner: self.tuner.clone().unwrap_or_default(),\n\n command,\n\n };\n\n let opt = StudyRunnerOpt {\n\n study,\n\n workers: self.workers,\n", "file_path": "src/commands/run.rs", "rank": 9, "score": 52569.48606731044 }, { "content": "pub fn get_server_addr() -> Result<SocketAddr, EnvVarError> {\n\n let value = std::env::var(KEY_SERVER_ADDR)\n\n .map_err(|e| EnvVarError::from_var_error(KEY_SERVER_ADDR, e))?;\n\n let server_addr: SocketAddr = value\n\n .parse()\n\n .map_err(|e| EnvVarError::from_other_error(KEY_SERVER_ADDR, e))?;\n\n Ok(server_addr)\n\n}\n\n\n", "file_path": "src/envvar.rs", "rank": 10, "score": 52316.47797498358 }, { "content": "use crate::tuners::TunerSpec;\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\npub struct TunerOpt {\n\n #[structopt(flatten)]\n\n pub spec: TunerSpec,\n\n}\n", "file_path": "src/commands/tuner.rs", "rank": 11, "score": 51912.95802673144 }, { "content": "pub fn parse_json<T>(json: &str) -> anyhow::Result<T>\n\nwhere\n\n T: for<'a> Deserialize<'a>,\n\n{\n\n let v = serde_json::from_str(json)?;\n\n Ok(v)\n\n}\n", "file_path": "src/json.rs", "rank": 12, "score": 50650.42492615655 }, { "content": "pub fn get_string(key: &'static str) -> Result<String, EnvVarError> {\n\n let value = std::env::var(key).map_err(|e| EnvVarError::from_var_error(key, e))?;\n\n Ok(value)\n\n}\n\n\n", "file_path": "src/envvar.rs", "rank": 13, "score": 48016.76675291451 }, { "content": "pub fn call<RPC: Call>(req: RPC::Req) -> anyhow::Result<RPC::Res>\n\nwhere\n\n RPC::ReqEncoder: Default,\n\n RPC::ResDecoder: Default,\n\n{\n\n let server_addr = envvar::get_server_addr()?;\n\n let service = ClientServiceBuilder::new().finish(fibers_global::handle());\n\n let service_handle = service.handle();\n\n fibers_global::spawn(service.map_err(|e| panic!(\"{}\", e)));\n\n let future = RPC::client(&service_handle).call(server_addr, req);\n\n let res =\n\n fibers_global::execute(future).with_context(|| format!(\"RPC {:?} failed\", RPC::NAME))?;\n\n Ok(res)\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct AskRpc;\n\n\n\nimpl Call for AskRpc {\n\n const ID: ProcedureId = ProcedureId(0);\n", "file_path": "src/rpc.rs", "rank": 14, "score": 45899.09353809768 }, { "content": "#[derive(StructOpt, Debug)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\nenum Opt {\n\n Ask(hone::commands::ask::AskOpt),\n\n Get(hone::commands::get::GetOpt),\n\n Run(hone::commands::run::RunOpt),\n\n Show(hone::commands::show::ShowOpt),\n\n Tell(hone::commands::tell::TellOpt),\n\n Tuner(hone::commands::tuner::TunerOpt),\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 15, "score": 43658.01591158123 }, { "content": "#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n\nstruct UncheckedInclusiveRange {\n\n min: f64,\n\n max: f64,\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\n#[serde(try_from = \"UncheckedInclusiveRange\")]\n\npub struct InclusiveRange {\n\n min: FiniteF64,\n\n max: FiniteF64,\n\n}\n\n\n\nimpl InclusiveRange {\n\n pub fn new(min: f64, max: f64) -> anyhow::Result<Self> {\n\n let min = FiniteF64::new(min)?;\n\n let max = FiniteF64::new(max)?;\n\n anyhow::ensure!(\n\n min.get() <= max.get(),\n\n \"`min`({}) must be smaller than or equal to `max`({})\",\n\n min.get(),\n", "file_path": "src/types.rs", "rank": 16, "score": 40422.8303126337 }, { "content": "fn main() -> anyhow::Result<()> {\n\n hone::rpc::init();\n\n\n\n let opt = Opt::from_args();\n\n match opt {\n\n Opt::Ask(opt) => {\n\n let value = opt.ask()?;\n\n println!(\"{}\", value);\n\n }\n\n Opt::Tell(opt) => {\n\n opt.tell()?;\n\n }\n\n Opt::Run(opt) => {\n\n opt.run()?;\n\n }\n\n Opt::Tuner(opt) => {\n\n serde_json::to_writer(std::io::stdout().lock(), &opt.spec)?;\n\n println!();\n\n }\n\n Opt::Get(opt) => {\n\n let value = opt.get()?;\n\n println!(\"{}\", value);\n\n }\n\n Opt::Show(opt) => {\n\n opt.show()?;\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 17, "score": 33823.449768203536 }, { "content": "#[derive(Debug, Clone, structopt::StructOpt, serde::Serialize, serde::Deserialize)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\n#[serde(rename_all = \"snake_case\")]\n\nenum TunerSpecInner {\n\n // TODO: HyperbandTuner, TpeTuner\n\n Random(self::random::RandomTunerSpec),\n\n}\n\n\n\nimpl TunerSpecInner {\n\n pub fn build(&self) -> anyhow::Result<Tuner> {\n\n match self {\n\n Self::Random(spec) => spec.build().map(Tuner::new),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for TunerSpecInner {\n\n fn default() -> Self {\n\n Self::Random(self::random::RandomTunerSpec::default())\n\n }\n\n}\n\n\n\n#[derive(Debug, Default, Clone, structopt::StructOpt, serde::Serialize, serde::Deserialize)]\n", "file_path": "src/tuners.rs", "rank": 18, "score": 31350.960290246745 }, { "content": "use crate::tuners::TunerSpec;\n\nuse std::collections::BTreeMap;\n\nuse std::path::PathBuf;\n\nuse uuid::Uuid;\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct StudySpec {\n\n pub name: String,\n\n pub id: Uuid,\n\n pub attrs: BTreeMap<String, String>,\n\n pub tuner: TunerSpec,\n\n pub command: CommandSpec,\n\n}\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct CommandSpec {\n\n pub path: PathBuf,\n\n pub args: Vec<String>,\n\n}\n", "file_path": "src/study.rs", "rank": 19, "score": 28512.56758478939 }, { "content": " }\n\n}\n\n\n\npub struct Tuner(Box<dyn 'static + Tune + Send + Sync>);\n\n\n\nimpl Tuner {\n\n pub fn new<T>(tuner: T) -> Self\n\n where\n\n T: 'static + Tune + Send + Sync,\n\n {\n\n Self(Box::new(tuner))\n\n }\n\n}\n\n\n\nimpl Tune for Tuner {\n\n fn ask(\n\n &mut self,\n\n obs: &Observation,\n\n param_name: &ParamName,\n\n param_type: &ParamType,\n", "file_path": "src/tuners.rs", "rank": 20, "score": 27672.480382151338 }, { "content": " ) -> anyhow::Result<ParamValue> {\n\n self.0.ask(obs, param_name, param_type)\n\n }\n\n\n\n fn tell(&mut self, obs: &Observation) -> anyhow::Result<()> {\n\n self.0.tell(obs)\n\n }\n\n\n\n fn next_action(&mut self) -> Option<Action> {\n\n self.0.next_action()\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for Tuner {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"Tuner {{ .. }}\")\n\n }\n\n}\n", "file_path": "src/tuners.rs", "rank": 21, "score": 27671.068660999732 }, { "content": "\n\nimpl Action {\n\n pub const fn resume_trial(trial_id: TrialId) -> Self {\n\n Self::ResumeTrial { trial_id }\n\n }\n\n\n\n pub const fn finish_trial(trial_id: TrialId) -> Self {\n\n Self::FinishTrial { trial_id }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ActionQueue(VecDeque<Action>);\n\n\n\nimpl ActionQueue {\n\n pub fn new() -> Self {\n\n Self(VecDeque::new())\n\n }\n\n\n\n pub fn enqueue(&mut self, action: Action) {\n\n self.0.push_back(action);\n\n }\n\n\n\n pub fn next(&mut self) -> Option<Action> {\n\n self.0.pop_front()\n\n }\n\n}\n\n\n", "file_path": "src/tuners.rs", "rank": 22, "score": 27667.1310372238 }, { "content": "use crate::param::{ParamName, ParamType, ParamValue};\n\nuse crate::trial::{Observation, TrialId};\n\nuse std::collections::VecDeque;\n\n\n\npub mod random;\n\npub mod retry;\n\n\n", "file_path": "src/tuners.rs", "rank": 23, "score": 27666.345621068023 }, { "content": "#[structopt(rename_all = \"kebab-case\")]\n\n#[serde(rename_all = \"snake_case\")]\n\npub struct TunerSpec {\n\n #[structopt(long, default_value = \"0\")]\n\n retry: usize,\n\n\n\n // TODO: AverageTuner, HyperbandTuner, TpeTuner\n\n #[structopt(subcommand)]\n\n #[serde(flatten)]\n\n inner: Option<TunerSpecInner>,\n\n}\n\n\n\nimpl TunerSpec {\n\n pub fn build(&self) -> anyhow::Result<Tuner> {\n\n let default_tuner = TunerSpecInner::Random(self::random::RandomTunerSpec::default());\n\n let mut tuner = self.inner.as_ref().unwrap_or(&default_tuner).build()?;\n\n if self.retry > 0 {\n\n tuner = Tuner::new(self::retry::RetryTuner::new(tuner, self.retry));\n\n }\n\n Ok(tuner)\n", "file_path": "src/tuners.rs", "rank": 24, "score": 27666.33805249703 }, { "content": "pub mod ask;\n\npub mod get;\n\npub mod run;\n\npub mod show;\n\npub mod tell;\n\npub mod tuner;\n", "file_path": "src/commands.rs", "rank": 25, "score": 27344.794625838716 }, { "content": "\n\n pub fn create_obs_tempdir(\n\n &mut self,\n\n id: ObservationId,\n\n parent: Option<&PathBuf>,\n\n ) -> anyhow::Result<PathBuf> {\n\n let (temp, path) = Self::ensure_temp_dir_created(self.observations.get(&id), parent)?;\n\n if let Some(temp) = temp {\n\n self.observations.insert(id, temp);\n\n }\n\n Ok(path)\n\n }\n\n\n\n pub fn remove_trial_tempdir(&mut self, id: TrialId) {\n\n self.trials.remove(&id);\n\n }\n\n\n\n pub fn remove_obs_tempdir(&mut self, id: ObservationId) {\n\n self.observations.remove(&id);\n\n }\n", "file_path": "src/runner/tempdir.rs", "rank": 26, "score": 26773.11406080979 }, { "content": "\n\n pub fn create_study_tempdir(&mut self, parent: Option<&PathBuf>) -> anyhow::Result<PathBuf> {\n\n let (temp, path) = Self::ensure_temp_dir_created(self.study.as_ref(), parent)?;\n\n if let Some(temp) = temp {\n\n self.study = Some(temp);\n\n }\n\n Ok(path)\n\n }\n\n\n\n pub fn create_trial_tempdir(\n\n &mut self,\n\n id: TrialId,\n\n parent: Option<&PathBuf>,\n\n ) -> anyhow::Result<PathBuf> {\n\n let (temp, path) = Self::ensure_temp_dir_created(self.trials.get(&id), parent)?;\n\n if let Some(temp) = temp {\n\n self.trials.insert(id, temp);\n\n }\n\n Ok(path)\n\n }\n", "file_path": "src/runner/tempdir.rs", "rank": 27, "score": 26769.945352566076 }, { "content": "use crate::trial::{ObservationId, TrialId};\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\nuse tempfile::TempDir;\n\n\n\n#[derive(Debug)]\n\npub struct TempDirs {\n\n study: Option<TempDir>,\n\n trials: HashMap<TrialId, TempDir>,\n\n observations: HashMap<ObservationId, TempDir>,\n\n}\n\n\n\nimpl TempDirs {\n\n pub fn new() -> Self {\n\n Self {\n\n study: None,\n\n trials: HashMap::new(),\n\n observations: HashMap::new(),\n\n }\n\n }\n", "file_path": "src/runner/tempdir.rs", "rank": 28, "score": 26768.81335329268 }, { "content": "\n\n fn ensure_temp_dir_created(\n\n tempdir: Option<&tempfile::TempDir>,\n\n parent: Option<&PathBuf>,\n\n ) -> anyhow::Result<(Option<TempDir>, PathBuf)> {\n\n if let Some(temp) = tempdir {\n\n Ok((None, temp.path().to_path_buf()))\n\n } else if let Some(parent) = parent {\n\n std::fs::create_dir_all(parent)?;\n\n let temp = TempDir::new_in(parent)?;\n\n let path = temp.path().to_path_buf();\n\n Ok((Some(temp), path))\n\n } else {\n\n let temp = TempDir::new()?;\n\n let path = temp.path().to_path_buf();\n\n Ok((Some(temp), path))\n\n }\n\n }\n\n}\n", "file_path": "src/runner/tempdir.rs", "rank": 29, "score": 26761.29623367527 }, { "content": "use super::StudyRunner;\n\nuse crate::event::{Event, EventReader, ObservationEvent, StudyEvent, TrialEvent};\n\nuse crate::trial::{ObservationId, TrialId};\n\nuse std::collections::HashMap;\n\nuse std::io::{BufRead, Write};\n\nuse std::time::Duration;\n\n\n\n#[derive(Debug)]\n\npub struct StudyLoader<'a, W> {\n\n study: &'a mut StudyRunner<W>,\n\n trial_id_mapping: HashMap<TrialId, TrialId>,\n\n obs_id_mapping: HashMap<ObservationId, ObservationId>,\n\n last_elapsed: Duration,\n\n}\n\n\n\nimpl<'a, W: Write> StudyLoader<'a, W> {\n\n pub fn new(study: &'a mut StudyRunner<W>) -> Self {\n\n Self {\n\n study,\n\n trial_id_mapping: HashMap::new(),\n", "file_path": "src/runner/loader.rs", "rank": 30, "score": 26737.929472675387 }, { "content": " .ok_or_else(|| anyhow::anyhow!(\"unknown trial id {:?}\", orig_trial_id))?;\n\n self.obs_id_mapping.insert(orig_obs_id, obs_id);\n\n let elapsed = self.study.elapsed_offset + elapsed.to_duration();\n\n self.study\n\n .output\n\n .write(Event::observation_started(obs_id, trial_id, elapsed))?;\n\n }\n\n ObservationEvent::Finished { mut obs, elapsed } => {\n\n let obs_id = *self\n\n .obs_id_mapping\n\n .get(&obs.id)\n\n .ok_or_else(|| anyhow::anyhow!(\"unknown observation id {:?}\", obs.id))?;\n\n let trial_id = *self\n\n .trial_id_mapping\n\n .get(&obs.trial_id)\n\n .ok_or_else(|| anyhow::anyhow!(\"unknown trial id {:?}\", obs.trial_id))?;\n\n obs.id = obs_id;\n\n obs.trial_id = trial_id;\n\n self.study.tell_finished_obs(obs, elapsed.to_duration())?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/runner/loader.rs", "rank": 31, "score": 26728.161443263092 }, { "content": " obs_id_mapping: HashMap::new(),\n\n last_elapsed: Duration::new(0, 0),\n\n }\n\n }\n\n\n\n pub fn load<R: BufRead>(&mut self, mut reader: EventReader<R>) -> anyhow::Result<()> {\n\n let mut skip = true;\n\n while let Some(event) = reader.read()? {\n\n if let Some(elapsed) = event.elapsed() {\n\n self.last_elapsed = elapsed;\n\n }\n\n\n\n match event {\n\n Event::Study(StudyEvent::Started) => {\n\n skip = true;\n\n self.study.elapsed_offset += self.last_elapsed;\n\n self.trial_id_mapping = HashMap::new();\n\n self.obs_id_mapping = HashMap::new();\n\n self.last_elapsed = Duration::new(0, 0);\n\n }\n", "file_path": "src/runner/loader.rs", "rank": 32, "score": 26727.964862447767 }, { "content": " } = event\n\n {\n\n let trial_id = self.study.next_trial_id.fetch_and_increment();\n\n self.trial_id_mapping.insert(orig_trial_id, trial_id);\n\n self.study.output.write(Event::trial_started(trial_id))?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn handle_observation_event(&mut self, event: ObservationEvent) -> anyhow::Result<()> {\n\n match event {\n\n ObservationEvent::Started {\n\n obs_id: orig_obs_id,\n\n trial_id: orig_trial_id,\n\n elapsed,\n\n } => {\n\n let obs_id = self.study.next_obs_id.fetch_and_increment();\n\n let trial_id = *self\n\n .trial_id_mapping\n\n .get(&orig_trial_id)\n", "file_path": "src/runner/loader.rs", "rank": 33, "score": 26725.517192436557 }, { "content": " Event::Study(StudyEvent::Defined { .. }) => {\n\n skip = false;\n\n }\n\n Event::Trial(event) if !skip => {\n\n self.handle_trial_event(event)?;\n\n }\n\n Event::Observation(event) if !skip => {\n\n self.handle_observation_event(event)?;\n\n }\n\n _ => {}\n\n }\n\n }\n\n self.study.elapsed_offset += self.last_elapsed;\n\n\n\n Ok(())\n\n }\n\n\n\n fn handle_trial_event(&mut self, event: TrialEvent) -> anyhow::Result<()> {\n\n if let TrialEvent::Started {\n\n trial_id: orig_trial_id,\n", "file_path": "src/runner/loader.rs", "rank": 34, "score": 26720.291933183722 }, { "content": "use crate::param::{NumParamType, ParamName, ParamType, ParamValue, StrParamType};\n\nuse crate::rng::{ArcRng, RngSeed};\n\nuse crate::trial::Observation;\n\nuse crate::tuners::{Action, ActionQueue, Tune};\n\nuse crate::types::FiniteF64;\n\nuse rand::distributions::Distribution;\n\nuse rand::seq::SliceRandom;\n\nuse rand::Rng;\n\n\n\n#[derive(Debug, Clone, Default, structopt::StructOpt, serde::Serialize, serde::Deserialize)]\n\npub struct RandomTunerSpec {\n\n #[structopt(long)]\n\n pub seed: Option<RngSeed>,\n\n}\n\n\n\nimpl RandomTunerSpec {\n\n pub fn build(&self) -> anyhow::Result<RandomTuner> {\n\n let rng = ArcRng::new(self.seed.unwrap_or_default());\n\n Ok(RandomTuner::new(rng))\n\n }\n", "file_path": "src/tuners/random.rs", "rank": 35, "score": 26120.399949362873 }, { "content": "use crate::param::{ParamName, ParamType, ParamValue};\n\nuse crate::trial::{Observation, TrialId};\n\nuse crate::tuners::{Action, ActionQueue, Tune, Tuner};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug)]\n\npub struct RetryTuner {\n\n tuner: Tuner,\n\n max_retries: usize,\n\n actions: ActionQueue,\n\n retryings: HashMap<TrialId, FailedObservation>,\n\n}\n\n\n\nimpl RetryTuner {\n\n pub fn new(tuner: Tuner, max_retries: usize) -> Self {\n\n Self {\n\n tuner,\n\n max_retries,\n\n actions: ActionQueue::new(),\n\n retryings: HashMap::new(),\n", "file_path": "src/tuners/retry.rs", "rank": 36, "score": 26118.99804359031 }, { "content": "}\n\n\n\n#[derive(Debug)]\n\npub struct RandomTuner {\n\n rng: ArcRng,\n\n actions: ActionQueue,\n\n}\n\n\n\nimpl RandomTuner {\n\n pub fn new(rng: ArcRng) -> Self {\n\n RandomTuner {\n\n rng,\n\n actions: ActionQueue::new(),\n\n }\n\n }\n\n}\n\n\n\nimpl Tune for RandomTuner {\n\n fn ask(\n\n &mut self,\n", "file_path": "src/tuners/random.rs", "rank": 37, "score": 26117.09674114463 }, { "content": " } else {\n\n self.tuner.tell(&failed.obs)?;\n\n }\n\n Ok(())\n\n }\n\n\n\n fn next_action(&mut self) -> Option<Action> {\n\n self.actions.next().or_else(|| self.tuner.next_action())\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "src/tuners/retry.rs", "rank": 38, "score": 26115.91431437436 }, { "content": " )\n\n })?\n\n .value\n\n .clone();\n\n Ok(param_value)\n\n } else {\n\n self.tuner.ask(obs, param_name, param_type)\n\n }\n\n }\n\n\n\n fn tell(&mut self, obs: &Observation) -> anyhow::Result<()> {\n\n assert!(obs.exit_status.is_some());\n\n\n\n if obs.is_succeeded() {\n\n let obs = if let Some(mut orig_obs) =\n\n self.retryings.remove(&obs.trial_id).map(|x| x.obs)\n\n {\n\n anyhow::ensure!(\n\n orig_obs.params == obs.params,\n\n \"retried trial has the different parameters with the original one: retried={:?}, original={:?}\",\n", "file_path": "src/tuners/retry.rs", "rank": 39, "score": 26114.394079092166 }, { "content": " obs.params, orig_obs.params);\n\n orig_obs.metrics = obs.metrics.clone();\n\n orig_obs.exit_status = obs.exit_status;\n\n orig_obs\n\n } else {\n\n obs.clone()\n\n };\n\n return self.tuner.tell(&obs);\n\n }\n\n\n\n let failed = self\n\n .retryings\n\n .entry(obs.trial_id)\n\n .or_insert_with(|| FailedObservation {\n\n obs: obs.clone(),\n\n retried_count: 0,\n\n });\n\n if failed.retried_count < self.max_retries {\n\n failed.retried_count += 1;\n\n self.actions.enqueue(Action::resume_trial(obs.trial_id));\n", "file_path": "src/tuners/retry.rs", "rank": 40, "score": 26113.814909555713 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Tune for RetryTuner {\n\n fn ask(\n\n &mut self,\n\n obs: &Observation,\n\n param_name: &ParamName,\n\n param_type: &ParamType,\n\n ) -> anyhow::Result<ParamValue> {\n\n if let Some(failed) = self.retryings.get(&obs.trial_id) {\n\n let param_value = failed\n\n .obs\n\n .params\n\n .get(param_name)\n\n .ok_or_else(|| {\n\n anyhow::anyhow!(\n\n \"retried trial asked a different parameter: {:?}\",\n\n param_name\n", "file_path": "src/tuners/retry.rs", "rank": 41, "score": 26113.4104705135 }, { "content": " }\n\n }\n\n ParamType::Num(NumParamType::Discrete(t)) => {\n\n let n = rng.gen_range(0, t.count());\n\n let v = t.range().min().get() + t.step().get() * n as f64;\n\n let v = FiniteF64::new(v)?;\n\n Ok(ParamValue::Num(v))\n\n }\n\n ParamType::Num(NumParamType::Normal(t)) => {\n\n let d = rand_distr::Normal::new(t.mean().get(), t.stddev().get())?;\n\n let v = d.sample(rng);\n\n let v = FiniteF64::new(v)?;\n\n Ok(ParamValue::Num(v))\n\n }\n\n ParamType::Num(NumParamType::Fidelity(t)) => Ok(ParamValue::Num(t.range().max())),\n\n }\n\n }\n\n\n\n fn tell(&mut self, obs: &Observation) -> anyhow::Result<()> {\n\n self.actions.enqueue(Action::finish_trial(obs.trial_id));\n\n Ok(())\n\n }\n\n\n\n fn next_action(&mut self) -> Option<Action> {\n\n self.actions.next()\n\n }\n\n}\n", "file_path": "src/tuners/random.rs", "rank": 42, "score": 26112.356395898638 }, { "content": " _obs: &Observation,\n\n _param_name: &ParamName,\n\n param_type: &ParamType,\n\n ) -> anyhow::Result<ParamValue> {\n\n let rng = &mut self.rng;\n\n match param_type {\n\n ParamType::Str(StrParamType::Categorical(t)) => Ok(ParamValue::Str(\n\n t.choices().get().choose(rng).expect(\"unreachable\").clone(),\n\n )),\n\n ParamType::Str(StrParamType::Ordinal(t)) => Ok(ParamValue::Str(\n\n t.choices().get().choose(rng).expect(\"unreachable\").clone(),\n\n )),\n\n ParamType::Num(NumParamType::Continous(t)) => {\n\n if t.ln() {\n\n let v = rng.gen_range(t.range().min().get().ln(), t.range().max().get().ln());\n\n let v = FiniteF64::new(v.exp())?;\n\n Ok(ParamValue::Num(v))\n\n } else {\n\n let v = rng.gen_range(t.range().min().get(), t.range().max().get());\n\n Ok(ParamValue::Num(FiniteF64::new(v).expect(\"unreachable\")))\n", "file_path": "src/tuners/random.rs", "rank": 43, "score": 26108.927709054245 }, { "content": " }\n\n\n\n pub fn obs(&self) -> &Observation {\n\n &self.obs\n\n }\n\n\n\n pub fn obs_mut(&mut self) -> &mut Observation {\n\n &mut self.obs\n\n }\n\n\n\n pub fn into_obs(self) -> Observation {\n\n self.obs\n\n }\n\n\n\n pub fn is_exited(&mut self) -> anyhow::Result<bool> {\n\n if let Some(exit_status) = self.proc.try_wait()? {\n\n self.obs.exit_status = exit_status.code();\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n }\n\n\n\n pub fn kill(&mut self) -> anyhow::Result<()> {\n\n self.proc.kill()?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/runner/command.rs", "rank": 44, "score": 25818.359397506836 }, { "content": "use crate::envvar;\n\nuse crate::study::StudySpec;\n\nuse crate::trial::Observation;\n\nuse anyhow::Context;\n\nuse std::os::unix::io::{AsRawFd, FromRawFd};\n\nuse std::process::{Child, Command, Stdio};\n\n\n\n#[derive(Debug)]\n\npub struct CommandRunner {\n\n obs: Observation,\n\n proc: Child,\n\n}\n\n\n\nimpl CommandRunner {\n\n pub fn spawn(\n\n study: &StudySpec,\n\n obs: Observation,\n\n rpc_server_addr: std::net::SocketAddr,\n\n ) -> anyhow::Result<Self> {\n\n let mut command = Command::new(&study.command.path);\n", "file_path": "src/runner/command.rs", "rank": 45, "score": 25816.318355463733 }, { "content": "use crate::event::{Event, EventReader, ObservationEvent, StudyEvent};\n\nuse crate::study::StudySpec;\n\nuse crate::trial::CompactObservation;\n\nuse std::collections::BTreeMap;\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub enum ShowOpt {\n\n Best(BestOpt),\n\n // ParetFront, Trial, Observation\n\n}\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub struct BestOpt {}\n\n\n\nimpl ShowOpt {\n\n pub fn show(&self) -> anyhow::Result<()> {\n\n match self {\n\n Self::Best(opt) => self.show_best(opt)?,\n", "file_path": "src/commands/show.rs", "rank": 46, "score": 25815.95047768659 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n fn show_best(&self, _opt: &BestOpt) -> anyhow::Result<()> {\n\n let stdin = std::io::stdin();\n\n let mut reader = EventReader::new(stdin.lock());\n\n let mut current_study = None;\n\n let mut best = BTreeMap::new();\n\n\n\n fn output(\n\n study: &StudySpec,\n\n best_per_metric: &BTreeMap<String, CompactObservation>,\n\n ) -> anyhow::Result<()> {\n\n let json = serde_json::json!({\n\n \"study\": {\n\n \"name\": study.name,\n\n \"id\": study.id.to_string()\n\n },\n\n \"best\": best_per_metric\n", "file_path": "src/commands/show.rs", "rank": 47, "score": 25812.202051932494 }, { "content": "impl GetOpt {\n\n pub fn get(&self) -> anyhow::Result<String> {\n\n let value = match self {\n\n Self::Id { scope } => match scope {\n\n Scope::Observation => envvar::get_string(envvar::KEY_OBSERVATION_ID)?,\n\n Scope::Trial => envvar::get_string(envvar::KEY_TRIAL_ID)?,\n\n Scope::Study => envvar::get_string(envvar::KEY_STUDY_ID)?,\n\n },\n\n Self::Tempdir { scope, parent } => {\n\n let observation_id = envvar::get_observation_id()?;\n\n let req = rpc::MktempReq {\n\n observation_id,\n\n parent: parent.clone(),\n\n scope: *scope,\n\n };\n\n let res = rpc::call::<rpc::MktempRpc>(req)?;\n\n res.to_str()\n\n .ok_or_else(|| anyhow::anyhow!(\"invalid path: {:?}\", res))?\n\n .to_owned()\n\n }\n\n };\n\n Ok(value)\n\n }\n\n}\n", "file_path": "src/commands/get.rs", "rank": 48, "score": 25810.52159258185 }, { "content": " });\n\n serde_json::to_writer_pretty(std::io::stdout().lock(), &json)?;\n\n println!();\n\n Ok(())\n\n }\n\n\n\n let mut skip = true;\n\n while let Some(event) = reader.read()? {\n\n match event {\n\n Event::Study(StudyEvent::Defined { spec }) => {\n\n if let Some(study) = current_study.take() {\n\n output(&study, &best)?;\n\n }\n\n current_study = Some(spec);\n\n best = BTreeMap::new();\n\n skip = false;\n\n }\n\n Event::Study(StudyEvent::Started) => {\n\n skip = true;\n\n }\n", "file_path": "src/commands/show.rs", "rank": 49, "score": 25809.96254958737 }, { "content": " } else {\n\n Ok(v)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub enum ParamSpec {\n\n Bool,\n\n Choice {\n\n choices: Vec<String>,\n\n #[structopt(long)]\n\n ordinal: bool,\n\n },\n\n Range {\n\n min: f64,\n\n max: f64,\n\n #[structopt(long)]\n\n ln: bool,\n", "file_path": "src/commands/ask.rs", "rank": 50, "score": 25809.712099434902 }, { "content": " Event::Observation(ObservationEvent::Finished { obs, .. }) => {\n\n if skip {\n\n continue;\n\n }\n\n for (name, metric) in &obs.metrics {\n\n // TODO: consider fidelity\n\n\n\n // TODO\n\n // if metric.ty.objective.is_none() {\n\n // continue;\n\n // }\n\n let current = best\n\n .entry(name.get().to_owned())\n\n .or_insert_with(|| obs.to_compact());\n\n if metric.is_better_than(current.metrics[name]) {\n\n *current = obs.to_compact()\n\n }\n\n }\n\n }\n\n _ => {}\n\n }\n\n }\n\n if let Some(study) = current_study.take() {\n\n output(&study, &best)?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/commands/show.rs", "rank": 51, "score": 25808.824954940883 }, { "content": "impl AskOpt {\n\n pub fn ask(&self) -> anyhow::Result<String> {\n\n let observation_id = envvar::get_observation_id()?;\n\n let param_type = self\n\n .param_spec\n\n .to_param_type()\n\n .with_context(|| format!(\"the specification of {:?} is invalid\", self.param_name))?;\n\n let req = rpc::AskReq {\n\n observation_id,\n\n param_name: ParamName::new(self.param_name.clone()),\n\n param_type,\n\n };\n\n let res = rpc::call::<rpc::AskRpc>(req)?;\n\n let v = res.to_string();\n\n if self.long_option {\n\n if matches!(self.param_spec, ParamSpec::Bool) && v == \"true\" {\n\n Ok(format!(\"--{}\", self.param_name))\n\n } else {\n\n Ok(format!(\"--{}={:?}\", self.param_name, v))\n\n }\n", "file_path": "src/commands/ask.rs", "rank": 52, "score": 25808.49394286855 }, { "content": "use crate::envvar;\n\nuse crate::param::{\n\n CategoricalParamType, ContinousParamType, DiscreteParamType, FidelityParamType,\n\n NormalParamType, NumParamType, OrdinalParamType, ParamName, ParamType, StrParamType,\n\n};\n\nuse crate::rpc;\n\nuse anyhow::Context;\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub struct AskOpt {\n\n pub param_name: String,\n\n\n\n #[structopt(long, short = \"l\")]\n\n pub long_option: bool,\n\n\n\n #[structopt(subcommand)]\n\n pub param_spec: ParamSpec,\n\n}\n\n\n", "file_path": "src/commands/ask.rs", "rank": 53, "score": 25808.350809649426 }, { "content": "use crate::envvar;\n\nuse crate::rpc;\n\nuse crate::types::Scope;\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub enum GetOpt {\n\n Id {\n\n #[structopt(long, short=\"s\", default_value= Scope::CHOICES[0], possible_values = Scope::CHOICES)]\n\n scope: Scope,\n\n },\n\n Tempdir {\n\n #[structopt(long)]\n\n parent: Option<std::path::PathBuf>,\n\n\n\n #[structopt(long, short=\"s\", default_value= Scope::CHOICES[0], possible_values = Scope::CHOICES)]\n\n scope: Scope,\n\n },\n\n}\n\n\n", "file_path": "src/commands/get.rs", "rank": 54, "score": 25807.74021171538 }, { "content": "\n\n let stdout = unsafe {\n\n let fd = libc::dup(std::io::stderr().as_raw_fd());\n\n if fd == -1 {\n\n Err(std::io::Error::last_os_error())?;\n\n }\n\n Stdio::from_raw_fd(fd)\n\n };\n\n command\n\n .args(&study.command.args)\n\n .env(envvar::KEY_SERVER_ADDR, rpc_server_addr.to_string())\n\n .env(envvar::KEY_STUDY_ID, study.id.to_string())\n\n .env(envvar::KEY_TRIAL_ID, obs.trial_id.get().to_string())\n\n .env(envvar::KEY_OBSERVATION_ID, obs.id.get().to_string())\n\n .stdout(stdout)\n\n .stdin(Stdio::null());\n\n let proc = command\n\n .spawn()\n\n .with_context(|| format!(\"Failed to spawn command: {:?}\", study.command.path))?;\n\n Ok(CommandRunner { obs, proc })\n", "file_path": "src/runner/command.rs", "rank": 55, "score": 25807.53182780912 }, { "content": " value: f64,\n\n },\n\n}\n\n\n\nimpl TellOpt {\n\n pub fn tell(&self) -> anyhow::Result<()> {\n\n let observation_id = envvar::get_observation_id()?;\n\n let (name, ty, value) = match self {\n\n Self::Minimize { name, value } => (name, MetricType::Minimize, value),\n\n Self::Maximize { name, value } => (name, MetricType::Maximize, value),\n\n Self::Record { name, value } => (name, MetricType::Record, value),\n\n };\n\n let req = rpc::TellReq {\n\n observation_id,\n\n metric_name: MetricName::new(name.clone()),\n\n metric_type: ty,\n\n metric_value: MetricValue::new(*value)?,\n\n };\n\n rpc::call::<rpc::TellRpc>(req)?;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/commands/tell.rs", "rank": 56, "score": 25806.657993902918 }, { "content": "use crate::envvar;\n\nuse crate::metric::{MetricName, MetricType, MetricValue};\n\nuse crate::rpc;\n\n\n\n#[derive(Debug, structopt::StructOpt)]\n\n#[structopt(rename_all = \"kebab-case\")]\n\npub enum TellOpt {\n\n Minimize {\n\n #[structopt(long, short = \"n\", default_value = \"objective value\")]\n\n name: String,\n\n value: f64,\n\n },\n\n Maximize {\n\n #[structopt(long, short = \"n\", default_value = \"objective value\")]\n\n name: String,\n\n value: f64,\n\n },\n\n Record {\n\n #[structopt(long, short = \"n\")]\n\n name: String,\n", "file_path": "src/commands/tell.rs", "rank": 57, "score": 25805.988154151168 }, { "content": " #[structopt(long)]\n\n step: Option<f64>,\n\n #[structopt(long)]\n\n fidelity: bool,\n\n },\n\n Normal {\n\n mean: f64,\n\n stddev: f64,\n\n },\n\n}\n\n\n\nimpl ParamSpec {\n\n fn to_param_type(&self) -> anyhow::Result<ParamType> {\n\n match self {\n\n Self::Bool => CategoricalParamType::new(vec![\"false\".to_owned(), \"true\".to_owned()])\n\n .map(StrParamType::Categorical)\n\n .map(ParamType::Str),\n\n Self::Choice {\n\n choices,\n\n ordinal: false,\n", "file_path": "src/commands/ask.rs", "rank": 58, "score": 25805.850138568054 }, { "content": " } => CategoricalParamType::new(choices.clone())\n\n .map(StrParamType::Categorical)\n\n .map(ParamType::Str),\n\n Self::Choice {\n\n choices,\n\n ordinal: true,\n\n } => OrdinalParamType::new(choices.clone())\n\n .map(StrParamType::Ordinal)\n\n .map(ParamType::Str),\n\n Self::Normal { mean, stddev } => NormalParamType::new(*mean, *stddev)\n\n .map(NumParamType::Normal)\n\n .map(ParamType::Num),\n\n Self::Range {\n\n min,\n\n max,\n\n ln,\n\n step: None,\n\n fidelity: false,\n\n } => ContinousParamType::new(*min, *max, *ln)\n\n .map(NumParamType::Continous)\n", "file_path": "src/commands/ask.rs", "rank": 59, "score": 25802.326799866252 }, { "content": " ln: true,\n\n step: Some(_),\n\n ..\n\n } => anyhow::bail!(\"Cannot specify both `--ln` and `--step` options.\"),\n\n Self::Range {\n\n ln: true,\n\n fidelity: true,\n\n ..\n\n } => anyhow::bail!(\"Cannot specify both `--ln` and `--fidelity` options.\"),\n\n }\n\n }\n\n}\n", "file_path": "src/commands/ask.rs", "rank": 60, "score": 25802.253910126023 }, { "content": " .map(ParamType::Num),\n\n Self::Range {\n\n min,\n\n max,\n\n ln: false,\n\n step: Some(step),\n\n fidelity: false,\n\n } => DiscreteParamType::new(*min, *max, *step)\n\n .map(NumParamType::Discrete)\n\n .map(ParamType::Num),\n\n Self::Range {\n\n min,\n\n max,\n\n ln: false,\n\n step,\n\n fidelity: true,\n\n } => FidelityParamType::new(*min, *max, *step)\n\n .map(NumParamType::Fidelity)\n\n .map(ParamType::Num),\n\n Self::Range {\n", "file_path": "src/commands/ask.rs", "rank": 61, "score": 25800.67530796845 }, { "content": "use crate::study::StudySpec;\n\nuse crate::trial::{Observation, ObservationId, TrialId};\n\nuse crate::types::ElapsedSeconds;\n\nuse std::io::{BufRead, Write};\n\nuse std::time::Duration;\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\n#[serde(rename_all = \"snake_case\")]\n\npub enum Event {\n\n Study(StudyEvent),\n\n Trial(TrialEvent),\n\n Observation(ObservationEvent),\n\n}\n\n\n\nimpl Event {\n\n pub fn elapsed(&self) -> Option<Duration> {\n\n match self {\n\n Self::Observation(ObservationEvent::Started { elapsed, .. })\n\n | Self::Observation(ObservationEvent::Finished { elapsed, .. }) => {\n\n Some(elapsed.to_duration())\n", "file_path": "src/event.rs", "rank": 69, "score": 19.51564639341871 }, { "content": " pub fn new(writer: W) -> Self {\n\n Self { writer }\n\n }\n\n\n\n pub fn write(&mut self, event: Event) -> anyhow::Result<()> {\n\n serde_json::to_writer(&mut self.writer, &event)?;\n\n writeln!(&mut self.writer)?;\n\n self.writer.flush()?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EventReader<R> {\n\n reader: R,\n\n}\n\n\n\nimpl<R: BufRead> EventReader<R> {\n\n pub fn new(reader: R) -> Self {\n\n Self { reader }\n", "file_path": "src/event.rs", "rank": 71, "score": 17.9757089718372 }, { "content": "pub enum Scope {\n\n Observation,\n\n Trial,\n\n Study,\n\n}\n\n\n\nimpl Scope {\n\n pub const CHOICES: &'static [&'static str] = &[\"observation\", \"trial\", \"study\"];\n\n}\n\n\n\nimpl std::str::FromStr for Scope {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s {\n\n \"observation\" => Ok(Self::Observation),\n\n \"trial\" => Ok(Self::Trial),\n\n \"study\" => Ok(Self::Study),\n\n _ => anyhow::bail!(\"unknown scope {:?}\", s),\n\n }\n", "file_path": "src/types.rs", "rank": 75, "score": 14.900498245760883 }, { "content": "\n\n pub fn observation_started(\n\n obs_id: ObservationId,\n\n trial_id: TrialId,\n\n elapsed: Duration,\n\n ) -> Event {\n\n Self::Observation(ObservationEvent::Started {\n\n obs_id,\n\n trial_id,\n\n elapsed: elapsed.into(),\n\n })\n\n }\n\n\n\n pub fn observation_finished(obs: Observation, elapsed: Duration) -> Event {\n\n Self::Observation(ObservationEvent::Finished {\n\n obs,\n\n elapsed: elapsed.into(),\n\n })\n\n }\n\n}\n", "file_path": "src/event.rs", "rank": 76, "score": 14.778533910119302 }, { "content": "pub mod attr;\n\npub mod commands;\n\npub mod envvar;\n\npub mod event;\n\npub mod json;\n\npub mod metric;\n\npub mod param;\n\npub mod rng;\n\npub mod rpc;\n\npub mod runner;\n\npub mod study;\n\npub mod trial;\n\npub mod tuners;\n\npub mod types;\n", "file_path": "src/lib.rs", "rank": 77, "score": 13.831276994970342 }, { "content": "pub enum ObservationEvent {\n\n Started {\n\n obs_id: ObservationId,\n\n trial_id: TrialId,\n\n elapsed: ElapsedSeconds,\n\n },\n\n // TODO: Queued\n\n Finished {\n\n #[serde(flatten)]\n\n obs: Observation,\n\n elapsed: ElapsedSeconds,\n\n },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct EventWriter<W> {\n\n writer: W,\n\n}\n\n\n\nimpl<W: Write> EventWriter<W> {\n", "file_path": "src/event.rs", "rank": 78, "score": 13.817128401002611 }, { "content": " let id = Self(self.0);\n\n self.0 += 1;\n\n id\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct Observation {\n\n #[serde(rename = \"obs_id\")]\n\n pub id: ObservationId,\n\n pub trial_id: TrialId,\n\n pub params: BTreeMap<ParamName, ParamInstance>,\n\n pub metrics: BTreeMap<MetricName, MetricInstance>,\n\n pub exit_status: Option<i32>,\n\n}\n\n\n\nimpl Observation {\n\n pub fn new(obs_id: ObservationId, trial_id: TrialId) -> Self {\n\n Self {\n\n id: obs_id,\n", "file_path": "src/trial.rs", "rank": 80, "score": 12.774633375322283 }, { "content": "use crate::trial::{ObservationId, TrialId};\n\nuse std::net::SocketAddr;\n\n\n\npub const KEY_SERVER_ADDR: &str = \"HONE_SERVER_ADDR\";\n\npub const KEY_STUDY_ID: &str = \"HONE_STUDY_INSTANCE_ID\";\n\npub const KEY_TRIAL_ID: &str = \"HONE_TRIAL_ID\";\n\npub const KEY_OBSERVATION_ID: &str = \"HONE_OBSERVATION_ID\";\n\npub const KEY_STUDY_DIR: &str = \"HONE_STUDY_DIR\";\n\npub const KEY_TRIAL_DIR: &str = \"HONE_TRIAL_DIR\";\n\npub const KEY_OBSERVATION_DIR: &str = \"HONE_OBS_DIR\";\n\npub const KEY_STUDY_TEMP_DIR: &str = \"HONE_STUDY_TEMP_DIR\";\n\npub const KEY_TRIAL_TEMP_DIR: &str = \"HONE_TRIAL_TEMP_DIR\";\n\npub const KEY_OBSERVATION_TEMP_DIR: &str = \"HONE_OBS_TEMP_DIR\";\n\n\n", "file_path": "src/envvar.rs", "rank": 81, "score": 12.70459614055414 }, { "content": " self.0 += 1;\n\n id\n\n }\n\n}\n\n\n\n#[derive(\n\n Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize,\n\n)]\n\npub struct ObservationId(u64);\n\n\n\nimpl ObservationId {\n\n pub const fn new(id: u64) -> Self {\n\n Self(id)\n\n }\n\n\n\n pub const fn get(self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn fetch_and_increment(&mut self) -> Self {\n", "file_path": "src/trial.rs", "rank": 82, "score": 12.478839516966366 }, { "content": "use ordered_float::OrderedFloat;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::convert::TryFrom;\n\nuse std::time::Duration;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\n#[serde(try_from = \"Vec<T>\")]\n\npub struct NonEmptyVec<T>(Vec<T>);\n\n\n\nimpl<T> NonEmptyVec<T> {\n\n pub fn new(inner: Vec<T>) -> anyhow::Result<Self> {\n\n anyhow::ensure!(!inner.is_empty(), \"empty vector isn't allowed\");\n\n Ok(Self(inner))\n\n }\n\n\n\n pub fn get(&self) -> &[T] {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl<T> TryFrom<Vec<T>> for NonEmptyVec<T> {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(from: Vec<T>) -> Result<Self, Self::Error> {\n\n Self::new(from)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, Serialize, Deserialize)]\n", "file_path": "src/types.rs", "rank": 83, "score": 12.232008359361146 }, { "content": "//! **R**andom **N**number **G**enerator.\n\nuse rand::rngs::StdRng;\n\nuse rand::{Error, RngCore, SeedableRng};\n\nuse std::sync::{Arc, Mutex};\n\n\n\n/// The random number generator for `kurobako`.\n\n#[derive(Debug, Clone)]\n\npub struct ArcRng(Arc<Mutex<StdRng>>);\n\nimpl ArcRng {\n\n /// Makes a new `ArcRng` with the given random seed.\n\n pub fn new(seed: RngSeed) -> Self {\n\n let mut seed256 = [0; 32];\n\n (&mut seed256[0..8]).copy_from_slice(&seed.0.to_be_bytes());\n\n\n\n let inner = StdRng::from_seed(seed256);\n\n Self(Arc::new(Mutex::new(inner)))\n\n }\n\n}\n\n\n\nimpl RngCore for ArcRng {\n", "file_path": "src/rng.rs", "rank": 84, "score": 11.463093316111333 }, { "content": "use crate::metric::{MetricInstance, MetricName, MetricValue};\n\nuse crate::param::{ParamInstance, ParamName, ParamValue};\n\nuse std::collections::BTreeMap;\n\n\n\n#[derive(\n\n Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, serde::Serialize, serde::Deserialize,\n\n)]\n\npub struct TrialId(u64);\n\n\n\nimpl TrialId {\n\n pub const fn new(id: u64) -> Self {\n\n Self(id)\n\n }\n\n\n\n pub const fn get(self) -> u64 {\n\n self.0\n\n }\n\n\n\n pub fn fetch_and_increment(&mut self) -> Self {\n\n let id = Self(self.0);\n", "file_path": "src/trial.rs", "rank": 85, "score": 11.407185193130541 }, { "content": "#[derive(Debug, Clone)]\n\npub struct Attr {\n\n pub key: String,\n\n pub value: String,\n\n}\n\n\n\nimpl std::str::FromStr for Attr {\n\n type Err = anyhow::Error;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let mut iter = s.splitn(2, ':');\n\n let key = iter.next().expect(\"unreachable\");\n\n let value = iter\n\n .next()\n\n .ok_or_else(|| anyhow::anyhow!(\"No value part in an attribute string: {:?}\", s))?;\n\n Ok(Self {\n\n key: key.to_owned(),\n\n value: value.to_owned(),\n\n })\n\n }\n\n}\n", "file_path": "src/attr.rs", "rank": 86, "score": 11.079373017761323 }, { "content": " }\n\n\n\n pub fn read(&mut self) -> anyhow::Result<Option<Event>> {\n\n let mut buf = String::new();\n\n let size = self.reader.read_line(&mut buf)?;\n\n if size == 0 {\n\n return Ok(None);\n\n }\n\n let event = serde_json::from_str(&buf)?;\n\n Ok(Some(event))\n\n }\n\n}\n", "file_path": "src/event.rs", "rank": 87, "score": 10.538714752143786 }, { "content": " self.range\n\n }\n\n\n\n pub const fn step(&self) -> Option<NonNegF64> {\n\n self.step\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\n#[serde(untagged, rename_all = \"snake_case\")]\n\npub enum ParamValue {\n\n Str(String),\n\n Num(FiniteF64),\n\n}\n\n\n\nimpl std::fmt::Display for ParamValue {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n match self {\n\n Self::Str(v) => write!(f, \"{}\", v),\n\n Self::Num(v) => write!(f, \"{}\", v.get()),\n\n }\n\n }\n\n}\n", "file_path": "src/param.rs", "rank": 88, "score": 10.524837518640688 }, { "content": "use structopt::StructOpt;\n\n\n\n#[derive(StructOpt, Debug)]\n\n#[structopt(rename_all = \"kebab-case\")]\n", "file_path": "src/main.rs", "rank": 89, "score": 10.476314433985657 }, { "content": " CompactObservation {\n\n id: self.id,\n\n trial_id: self.trial_id,\n\n params: self\n\n .params\n\n .iter()\n\n .map(|(k, v)| (k.clone(), v.value.clone()))\n\n .collect(),\n\n metrics: self\n\n .metrics\n\n .iter()\n\n .map(|(k, v)| (k.clone(), v.value.clone()))\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]\n\npub struct CompactObservation {\n\n #[serde(rename = \"obs_id\")]\n\n pub id: ObservationId,\n\n pub trial_id: TrialId,\n\n pub params: BTreeMap<ParamName, ParamValue>,\n\n pub metrics: BTreeMap<MetricName, MetricValue>,\n\n}\n", "file_path": "src/trial.rs", "rank": 90, "score": 10.12825856192081 }, { "content": " trial_id,\n\n params: BTreeMap::new(),\n\n metrics: BTreeMap::new(),\n\n exit_status: None,\n\n }\n\n }\n\n\n\n pub fn is_succeeded(&self) -> bool {\n\n self.exit_status == Some(0)\n\n }\n\n\n\n pub fn is_max_fidelity(&self) -> bool {\n\n self.exit_status == Some(0)\n\n && self\n\n .params\n\n .values()\n\n .all(|p| p.is_max_fidelity().unwrap_or(true))\n\n }\n\n\n\n pub fn to_compact(&self) -> CompactObservation {\n", "file_path": "src/trial.rs", "rank": 91, "score": 9.819044739457814 }, { "content": "use crate::types::FiniteF64;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct MetricName(String);\n\n\n\nimpl MetricName {\n\n pub const fn new(name: String) -> Self {\n\n Self(name)\n\n }\n\n\n\n pub fn get(&self) -> &str {\n\n &self.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct MetricValue(FiniteF64);\n\n\n\nimpl MetricValue {\n", "file_path": "src/metric.rs", "rank": 92, "score": 9.74951358058824 }, { "content": "use crate::types::{FiniteF64, InclusiveRange, NonEmptyVec, NonNegF64};\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct ParamName(String);\n\n\n\nimpl ParamName {\n\n pub const fn new(name: String) -> Self {\n\n Self(name)\n\n }\n\n\n\n pub fn get(&self) -> &str {\n\n &self.0\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\npub struct ParamInstance {\n\n pub ty: ParamType,\n\n pub value: ParamValue,\n", "file_path": "src/param.rs", "rank": 93, "score": 9.668842224964784 }, { "content": "use crate::Result;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::ops::Deref;\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\npub struct SafeString(String);\n\n\n\nimpl SafeString {\n\n pub fn new(s: &str) -> Result<Self> {\n\n panic!()\n\n }\n\n}\n\n\n\nimpl Deref for EscapedString {\n\n type Target = str;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n self.0.deref()\n\n }\n\n}\n", "file_path": "src/str.rs", "rank": 94, "score": 9.593354300559493 }, { "content": " pub fn new(value: f64) -> anyhow::Result<Self> {\n\n Ok(Self(FiniteF64::new(value)?))\n\n }\n\n\n\n pub const fn get(self) -> f64 {\n\n self.0.get()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]\n\n#[serde(rename_all = \"SCREAMING_SNAKE_CASE\")]\n\npub enum MetricType {\n\n Minimize,\n\n Maximize,\n\n Record,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct MetricInstance {\n\n pub ty: MetricType,\n", "file_path": "src/metric.rs", "rank": 95, "score": 9.509186769120602 }, { "content": "#[serde(rename_all = \"snake_case\")] // TOOD: try_from UncheckedContinousParamType\n\npub struct ContinousParamType {\n\n range: InclusiveRange,\n\n ln: bool,\n\n}\n\n\n\nimpl ContinousParamType {\n\n pub fn new(min: f64, max: f64, ln: bool) -> anyhow::Result<Self> {\n\n let range = InclusiveRange::new(min, max)?;\n\n if ln {\n\n anyhow::ensure!(range.min().get().is_sign_positive(), \"TODO\");\n\n }\n\n Ok(Self { range, ln })\n\n }\n\n\n\n pub const fn range(&self) -> InclusiveRange {\n\n self.range\n\n }\n\n\n\n pub const fn ln(&self) -> bool {\n", "file_path": "src/param.rs", "rank": 96, "score": 9.404830735404142 }, { "content": " }\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn study_started() -> Event {\n\n Self::Study(StudyEvent::Started)\n\n }\n\n\n\n pub fn study_defined(spec: StudySpec) -> Event {\n\n Self::Study(StudyEvent::Defined { spec })\n\n }\n\n\n\n pub fn trial_started(trial_id: TrialId) -> Event {\n\n Self::Trial(TrialEvent::Started { trial_id })\n\n }\n\n\n\n pub fn trial_finished(trial_id: TrialId) -> Event {\n\n Self::Trial(TrialEvent::Finished { trial_id })\n\n }\n", "file_path": "src/event.rs", "rank": 97, "score": 9.400205341123861 }, { "content": " self.0\n\n }\n\n\n\n /// Converts the elapsed seconds to `Duration`.\n\n pub fn to_duration(self) -> Duration {\n\n Duration::from_secs_f64(self.0)\n\n }\n\n}\n\n\n\nimpl From<Duration> for ElapsedSeconds {\n\n fn from(f: Duration) -> Self {\n\n Self(f.as_secs_f64())\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 98, "score": 9.369145567276057 }, { "content": " Ok(Self(OrderedFloat(x)))\n\n }\n\n\n\n pub const fn get(self) -> f64 {\n\n (self.0).0\n\n }\n\n}\n\n\n\nimpl TryFrom<f64> for NonNegF64 {\n\n type Error = anyhow::Error;\n\n\n\n fn try_from(from: f64) -> Result<Self, Self::Error> {\n\n Self::new(from)\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]\n\n#[serde(try_from = \"f64\")]\n\npub struct FiniteF64(OrderedFloat<f64>);\n\n\n", "file_path": "src/types.rs", "rank": 99, "score": 9.05506642985593 } ]
Rust
src/component/model/mmd/mod.rs
funmaker/Project39-ar
f61334c1409eb884fab67413374d0ad8e08a6a5e
use std::cell::RefCell; use std::mem::size_of; use std::sync::Arc; use num_traits::Zero; use simba::scalar::SubsetOf; use vulkano::buffer::{BufferUsage, DeviceLocalBuffer, TypedBufferAccess}; use vulkano::command_buffer::{AutoCommandBufferBuilder, PrimaryAutoCommandBuffer}; use vulkano::descriptor_set::{DescriptorSet, PersistentDescriptorSet}; use vulkano::device::DeviceOwned; use vulkano::DeviceSize; use vulkano::pipeline::PipelineBindPoint; pub mod test; mod bone; mod sub_mesh; mod shared; pub use crate::renderer::pipelines::mmd::{MORPH_GROUP_SIZE, Vertex}; use crate::renderer::Renderer; use crate::application::Entity; use crate::utils::AutoCommandBufferBuilderEx; use crate::component::{Component, ComponentBase, ComponentInner, ComponentError}; use crate::debug; use crate::math::{AMat4, Isometry3, IVec4, Vec4}; use super::ModelError; pub use bone::{Bone, BoneConnection}; pub use shared::{MMDModelShared, SubMeshDesc}; pub struct MMDModelState { pub bones: Vec<Bone>, pub morphs: Vec<f32>, bones_mats: Vec<AMat4>, morphs_vec: Vec<IVec4>, } #[derive(ComponentBase)] pub struct MMDModel { #[inner] inner: ComponentInner, pub state: RefCell<MMDModelState>, shared: Arc<MMDModelShared>, bones_ubo: Arc<DeviceLocalBuffer<[AMat4]>>, morphs_ubo: Arc<DeviceLocalBuffer<[IVec4]>>, offsets_ubo: Arc<DeviceLocalBuffer<[IVec4]>>, morphs_set: Arc<dyn DescriptorSet + Send + Sync>, model_set: Arc<dyn DescriptorSet + Send + Sync>, } #[allow(dead_code)] impl MMDModel { pub fn new(shared: Arc<MMDModelShared>, renderer: &mut Renderer) -> Result<MMDModel, ModelError> { let bone_count = shared.default_bones.len(); let bones = shared.default_bones.clone(); let bones_mats = Vec::with_capacity(bone_count); let bones_ubo = DeviceLocalBuffer::array(shared.vertices.device().clone(), (size_of::<AMat4>() * bone_count) as DeviceSize, BufferUsage { transfer_destination: true, storage_buffer: true, ..BufferUsage::none() }, Some(renderer.queue.family()))?; let morphs = vec![0.0; shared.morphs_sizes.len()]; let morphs_vec_count = (shared.morphs_sizes.len() + 1) / 2; let morphs_vec = Vec::with_capacity(morphs_vec_count); let morphs_ubo = DeviceLocalBuffer::array(shared.vertices.device().clone(), morphs_vec_count as DeviceSize, BufferUsage { transfer_destination: true, storage_buffer: true, ..BufferUsage::none() }, Some(renderer.queue.family()))?; let offsets_ubo = DeviceLocalBuffer::array(shared.vertices.device().clone(), shared.vertices.len(), BufferUsage { transfer_destination: true, storage_buffer: true, ..BufferUsage::none() }, Some(renderer.queue.family()))?; let compute_layout = shared.morphs_pipeline .layout() .descriptor_set_layouts() .get(0) .ok_or(ModelError::NoLayout)? .clone(); let morphs_set = { let mut set_builder = PersistentDescriptorSet::start(compute_layout); set_builder.add_buffer(morphs_ubo.clone())? .add_buffer(shared.morphs_offsets.clone())? .add_buffer(offsets_ubo.clone())?; Arc::new(set_builder.build()?) }; let model_set = { let mut set_builder = PersistentDescriptorSet::start(shared.commons_layout(renderer)?); set_builder.add_buffer(renderer.commons.clone())? .add_buffer(bones_ubo.clone())? .add_buffer(offsets_ubo.clone())?; Arc::new(set_builder.build()?) }; Ok(MMDModel { inner: ComponentInner::new(), state: RefCell::new(MMDModelState { bones, morphs, bones_mats, morphs_vec, }), shared, bones_ubo, morphs_ubo, morphs_set, offsets_ubo, model_set, }) } pub fn loaded(&self) -> bool { self.shared.fence.check() } fn draw_debug_bones(&self, model_matrix: Isometry3, bones: &[Bone], bones_mats: &[AMat4]) { for (id, bone) in bones.iter().enumerate() { if bone.display { let pos = model_matrix.transform_point(&bones_mats[id].transform_point(&bone.rest_pos())); debug::draw_point(&pos, 10.0, bone.color.clone()); debug::draw_text(&bone.name, &pos, debug::DebugOffset::bottom_right(8.0, 8.0), 32.0, bone.color.clone()); match &bone.connection { BoneConnection::None => {} BoneConnection::Bone(con) => { let cpos = model_matrix.transform_point(&bones_mats[*con].transform_point(&bones[*con].rest_pos())); debug::draw_line(pos, cpos, 3.0, bone.color.clone()); } BoneConnection::Offset(cpos) => { let cpos = model_matrix.transform_point(&bones_mats[id].transform_point(&(&bone.rest_pos() + cpos))); debug::draw_line(pos, cpos, 3.0, bone.color.clone()); } } } } } } impl Component for MMDModel { fn pre_render(&self, entity: &Entity, _renderer: &Renderer, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ComponentError> { let state = &mut *self.state.borrow_mut(); for bone in &state.bones { let transform = match bone.parent { None => { let transform: AMat4 = bone.anim_transform.to_superset(); &bone.local_transform * &transform }, Some(id) => &state.bones_mats[id] * &bone.local_transform * &bone.anim_transform, }; state.bones_mats.push(transform); } for (id, mat) in state.bones_mats.iter_mut().enumerate() { *mat = *mat * &state.bones[id].inv_model_transform; } if debug::get_flag_or_default("DebugBonesDraw") { self.draw_debug_bones(entity.state().position, &state.bones, &state.bones_mats); } let bone_buf = self.shared.bones_pool.chunk(state.bones_mats.drain(..))?; builder.copy_buffer(bone_buf, self.bones_ubo.clone())?; state.morphs_vec.clear(); let mut max_size = 0; let mut packing = false; for (id, scale) in state.morphs.iter().enumerate() { if scale.abs() > f32::EPSILON { if packing { if let Some(last) = state.morphs_vec.last_mut() { last.z = id as i32; last.w = scale.to_bits() as i32; } } else { state.morphs_vec.push(vector!(id as i32, scale.to_bits() as i32, 0, 0)); } packing = !packing; if self.shared.morphs_sizes[id] > max_size { max_size = self.shared.morphs_sizes[id]; } } } if state.morphs_vec.is_empty() { builder.fill_buffer(self.offsets_ubo.clone(), 0)?; } else { let groups = (max_size + MORPH_GROUP_SIZE - 1) / MORPH_GROUP_SIZE; let morph_buf = self.shared.morphs_pool.chunk(state.morphs_vec.iter().copied())?; builder.copy_buffer(morph_buf, self.morphs_ubo.clone())? .fill_buffer(self.offsets_ubo.clone(), 0)? .bind_pipeline_compute(self.shared.morphs_pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Compute, self.shared.morphs_pipeline.layout().clone(), 0, self.morphs_set.clone()) .push_constants(self.shared.morphs_pipeline.layout().clone(), 0, self.shared.morphs_max_size as u32) .dispatch([groups as u32, state.morphs_vec.len() as u32 * 2, 1])?; } Ok(()) } fn render(&self, entity: &Entity, _renderer: &Renderer, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ComponentError> { if !self.loaded() { return Ok(()) } let model_matrix = entity.state().position.to_homogeneous(); builder.bind_vertex_buffers(0, self.shared.vertices.clone()) .bind_any_index_buffer(self.shared.indices.clone()); for sub_mesh in self.shared.sub_meshes.iter() { if let Some((pipeline, mesh_set)) = sub_mesh.edge.clone() { let pixel = (110.0_f32 / 360.0 * std::f32::consts::PI).tan() * 2.0 / 1440.0; let scale: f32 = pixel * sub_mesh.edge_scale; builder.bind_pipeline_graphics(pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Graphics, pipeline.layout().clone(), 0, (self.model_set.clone(), mesh_set)) .push_constants(pipeline.layout().clone(), 0, (model_matrix.clone(), sub_mesh.edge_color, scale)) .draw_indexed(sub_mesh.range.len() as u32, 1, sub_mesh.range.start, 0, 0)?; } } for sub_mesh in self.shared.sub_meshes.iter() { let (pipeline, mesh_set) = sub_mesh.main.clone(); builder.bind_pipeline_graphics(pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Graphics, pipeline.layout().clone(), 0, (self.model_set.clone(), mesh_set)) .push_constants(self.shared.sub_meshes.first().unwrap().main.0.layout().clone(), 0, (model_matrix.clone(), Vec4::zero(), 0.0_f32)) .draw_indexed(sub_mesh.range.len() as u32, 1, sub_mesh.range.start, 0, 0)?; } for sub_mesh in self.shared.sub_meshes.iter() { if let Some((pipeline, mesh_set)) = sub_mesh.transparent.clone() { builder.bind_pipeline_graphics(pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Graphics, pipeline.layout().clone(), 0, (self.model_set.clone(), mesh_set)) .push_constants(self.shared.sub_meshes.first().unwrap().main.0.layout().clone(), 0, (model_matrix.clone(), Vec4::zero(), 0.0_f32)) .draw_indexed(sub_mesh.range.len() as u32, 1, sub_mesh.range.start, 0, 0)?; } } Ok(()) } }
use std::cell::RefCell; use std::mem::size_of; use std::sync::Arc; use num_traits::Zero; use simba::scalar::SubsetOf; use vulkano::buffer::{BufferUsage, DeviceLocalBuffer, TypedBufferAccess}; use vulkano::command_buffer::{AutoCommandBufferBuilder, PrimaryAutoCommandBuffer}; use vulkano::descriptor_set::{DescriptorSet, PersistentDescriptorSet}; use vulkano::device::DeviceOwned; use vulkano::DeviceSize; use vulkano::pipeline::PipelineBindPoint; pub mod test; mod bone; mod sub_mesh; mod shared; pub use crate::renderer::pipelines::mmd::{MORPH_GROUP_SIZE, Vertex}; use crate::renderer::Renderer; use crate::application::Entity; use crate::utils::AutoCommandBufferBuilderEx; use crate::component::{Component, ComponentBase, ComponentInner, ComponentError}; use crate::debug; use crate::math::{AMat4, Isometry3, IVec4, Vec4}; use super::ModelError; pub use bone::{Bone, BoneConnection}; pub use shared::{MMDModelShared, SubMeshDesc}; pub struct MMDModelState { pub bones: Vec<Bone>, pub morphs: Vec<f32>, bones_mats: Vec<AMat4>, morphs_vec: Vec<IVec4>, } #[derive(ComponentBase)] pub struct MMDModel { #[inner] inner: ComponentInner, pub state: RefCell<MMDModelState>, shared: Arc<MMDModelShared>, bones_ubo: Arc<DeviceLocalBuffer<[AMat4]>>, morphs_ubo: Arc<DeviceLocalBuffer<[IVec4]>>, offsets_ubo: Arc<DeviceLocalBuffer<[IVec4]>>, morphs_set: Arc<dyn DescriptorSet + Send + Sync>, model_set: Arc<dyn DescriptorSet + Send + Sync>, } #[allow(dead_code)] impl MMDModel { pub fn new(shared: Arc<MMDModelShared>, renderer: &mut Renderer) -> Result<MMDModel, ModelError> { let bone_count = shared.default_bones.len(); let bones = shared.default_bones.clone(); let bones_mats = Vec::with_capacity(bone_count); let bones_ubo = DeviceLocalBuffer::array(shared.vertices.device().clone(), (size_of::<AMat4>() * bone_count) as DeviceSize, BufferUsage { transfer_destination: true, storage_buffer: true, ..BufferUsage::none() }, Some(renderer.queue.family()))?; let morphs = vec![0.0; shared.morphs_sizes.len()]; let morphs_vec_count = (shared.morphs_sizes.len() + 1) / 2; let morphs_vec = Vec::with_capacity(morphs_vec_count); let morphs_ubo = DeviceLocalBuffer::array(shared.vertices.device().clone(), morphs_vec_count as DeviceSize, BufferUsage { transfer_destination: true, storage_buffer: true, ..BufferUsage::none() }, Some(renderer.queue.family()))?; let offsets_ubo = DeviceLocalBuffer::array(shared.vertices.device().clone(), shared.vertices.len(), BufferUsage { transfer_destination: true, storage_buffer: true, ..BufferUsage::none() }, Some(renderer.queue.family()))?; let compute_layout = shared.morphs_pipeline .layout() .descriptor_set_layouts() .get(0) .ok_or(ModelError::NoLayout)? .clone(); let morphs_set = { let mut set_builder = PersistentDescriptorSet::start(compute_layout); set_builder.add_buffer(morphs_ubo.clone())? .add_buffer(shared.morphs_offsets.clone())? .add_buffer(offsets_ubo.clone())?; Arc::new(set_builder.build()?) }; let model_set = { let mut set_builder = PersistentDescriptorSet::start(shared.commons_layout(renderer)?); set_builder.add_buffer(renderer.commons.clone())? .add_buffer(bones_ubo.clone())? .add_buffer(offsets_ubo.clone())?; Arc::new(set_builder.build()?) }; Ok(MMDModel { inner: ComponentInner::new(), state: RefCell::new(MMDModelState { bones, morphs, bones_mats, morphs_vec, }), shared, bones_ubo, morphs_ubo, morphs_set, offsets_ubo, model_set, }) } pub fn loaded(&self) -> bool { self.shared.fence.check() } fn draw_debug_bones(&self, model_matrix: Isometry3, bones: &[Bone], bones_mats: &[AMat4]) { for (id, bone) in bones.iter().enumerate() { if bone.display { let pos = model_matrix.transform_point(&bones_mats[id].transform_point(&bone.rest_pos())); debug::draw_point(&pos, 10.0, bone.color.clone()); debug::draw_text(&bone.name, &pos, debug::DebugOffset::bottom_right(8.0, 8.0), 32.0, bone.color.clone()); match &bone.connection { BoneConnection::None => {} BoneConnection::Bone(con) => { let cpos = model_matrix.transform_point(&bones_mats[*con].transform_point(&bones[*con].rest_pos())); debug::draw_line(pos, cpos, 3.0, bone.color.clone()); } BoneConnection::Offset(cpos) => { let cpos = model_matrix.transform_point(&bones_mats[id].transform_point(&(&bone.rest_pos() + cpos))); debug::draw_line(pos, cpos, 3.0, bone.color.clone()); } } } } } } impl Component for MMDModel { fn pre_render(&self, entity: &Entity, _renderer: &Renderer, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ComponentError> { let state = &mut *self.state.borrow_mut(); for bone in &state.bones { let transform = match bone.parent { None => { let transform: AMat4 = bone.anim_transform.to_superset(); &bone.local_transform * &transform }, Some(id) => &state.bones_mats[id] * &bone.local_transform * &bone.anim_transform, }; state.bones_mats.push(transform); } for (id, mat) in state.bones_mats.iter_mut().enumerate() { *mat = *mat * &state.bones[id].inv_model_transform; } if debug::get_flag_or_default("DebugBonesDraw") { self.draw_debug_bones(entity.state().position, &state.bones, &state.bones_mats); } let bone_buf = self.shared.bones_pool.chunk(state.bones_mats.drain(..))?; builder.copy_buffer(bone_buf, self.bones_ubo.clone())?; state.morphs_vec.clear(); let mut max_size = 0; let mut packing = false; for (id, scale) in state.morphs.iter().enumerate() { if scale.abs() > f32::EPSILON { if packing { if let Some(last) = state.morphs_vec.last_mut() { last.z = id as i32; last.w = scale.to_bits() as i32; } } else { state.morphs_vec.push(vector!(id as i32, scale.to_bits() as i32, 0, 0)); } packing = !packing; if self.shared.morphs_sizes[id] > max_size { max_size = self.shared.morphs_sizes[id]; } } } if state.morphs_vec.is_empty() { builder.fill_buffer(self.offsets_ubo.clone(), 0)?; } else { let groups = (max_size + MORPH_GROUP_SIZE - 1) / MORPH_GROUP_SIZE; let morph_buf = self.shared.morphs_pool.chunk(state.morphs_vec.iter().copied())?; builder.copy_buffer(morph_buf, self.morphs_ubo.clone())? .fill_buffer(self.offsets_ubo.clone(), 0)? .bind_pipeline_compute(self.shared.morphs_pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Compute, self.shared.morphs_pipeline.layout().clone(), 0, self.morphs_set.clone()) .push_constants(self.shared.morphs_pipeline.layout().clone(), 0, self.shared.morphs_max_size as u32) .dispatch([groups as u32, state.morphs_vec.len() as u32 * 2, 1])?; } Ok(()) } fn render(&self, entity: &Entity, _renderer: &Renderer, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ComponentError> { if !self.loaded() { return Ok(()) } let model_matrix = entity.state().position.to_homogeneous(); builder.bind_vertex_buffers(0, self.shared.vertices.clone()) .bind_any_index_buffer(self.shared.indices.clone()); for sub_mesh in self.shared.sub_meshes.iter() { if let Some((pipeline, mesh_set)) = sub_mesh.edge.clone() { let pixel = (110.0_f32 / 360.0 * std::f32::consts::PI).tan() * 2.0 / 1440.0; let scale: f32 = pixel * sub_mesh.edge_scale; builder.bind_pipeline_graphics(pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Graphics, pipeline.layout().clone(), 0, (self.model_set.clone(), mesh_set)) .push_constants(pipeline.layout().clone(), 0, (model_matrix.clone(), sub_mesh.edge_color, scale)) .draw_indexed(sub_mesh.range.len() as u32, 1, sub_mesh.range.start, 0, 0)?; } } for sub_mesh in self.shared.sub_meshes.iter() { let (pipeline, mesh_set) = sub_mesh.main.clone(); builder.bind_pipeline_graphics(pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::
(model_matrix.clone(), Vec4::zero(), 0.0_f32)) .draw_indexed(sub_mesh.range.len() as u32, 1, sub_mesh.range.start, 0, 0)?; } for sub_mesh in self.shared.sub_meshes.iter() { if let Some((pipeline, mesh_set)) = sub_mesh.transparent.clone() { builder.bind_pipeline_graphics(pipeline.clone()) .bind_descriptor_sets(PipelineBindPoint::Graphics, pipeline.layout().clone(), 0, (self.model_set.clone(), mesh_set)) .push_constants(self.shared.sub_meshes.first().unwrap().main.0.layout().clone(), 0, (model_matrix.clone(), Vec4::zero(), 0.0_f32)) .draw_indexed(sub_mesh.range.len() as u32, 1, sub_mesh.range.start, 0, 0)?; } } Ok(()) } }
Graphics, pipeline.layout().clone(), 0, (self.model_set.clone(), mesh_set)) .push_constants(self.shared.sub_meshes.first().unwrap().main.0.layout().clone(), 0,
random
[ { "content": "#[allow(dead_code)]\n\npub fn test_model(renderer: &mut Renderer) -> MMDModel {\n\n\tlet mut vertices = vec![];\n\n\tlet mut indices = vec![];\n\n\tlet bones_num = 1;\n\n\tlet height = 2.0;\n\n\t\n\n\tlet mut make_wall = |from: Vec3, to: Vec3, normal: Vec3, divs: usize, bones: usize| {\n\n\t\tlet base_index = vertices.len();\n\n\t\t\n\n\t\tfor d in 0..=divs {\n\n\t\t\tlet part = d as f32 / divs as f32;\n\n\t\t\t\n\n\t\t\tlet bone = (part * bones as f32).trunc() as u32;\n\n\t\t\tlet bone_w = 1.0 - (part * bones as f32).fract();\n\n\t\t\t\n\n\t\t\tvertices.push(Vertex::new([from.x, (to.y - from.y) * part + from.y, from.z], normal.clone(), [0.0, part], 1.0, [bone, bone + 1, 0, 0], [bone_w, 1.0 - bone_w, 0.0, 0.0]));\n\n\t\t\tvertices.push(Vertex::new([ to.x, (to.y - from.y) * part + from.y, to.z], normal.clone(), [1.0, part], 1.0, [bone, bone + 1, 0, 0], [bone_w, 1.0 - bone_w, 0.0, 0.0]));\n\n\t\t}\n\n\t\t\n\n\t\tfor d in 0..divs {\n", "file_path": "src/component/model/mmd/test.rs", "rank": 0, "score": 270852.3242191442 }, { "content": "pub trait VertexIndex: Index + Copy + Send + Sync + Sized + Into<u32> + Hash + Debug + 'static {}\n\nimpl<T> VertexIndex for T where T: Index + Copy + Send + Sync + Sized + Into<u32> + Hash + Debug + 'static {}\n\n\n\n#[derive(Debug, Error)]\n\npub enum ModelError {\n\n\t#[error(display = \"Pipeline doesn't have specified layout\")] NoLayout,\n\n\t#[error(display = \"Invalid indices range: {:?}, len: {}\", _0, _1)] IndicesRangeError(Range<DeviceSize>, DeviceSize),\n\n\t#[error(display = \"{}\", _0)] PipelineError(#[error(source)] PipelineError),\n\n\t#[error(display = \"{}\", _0)] ImageError(#[error(source)] image::ImageError),\n\n\t#[error(display = \"{}\", _0)] DeviceMemoryAllocError(#[error(source)] memory::DeviceMemoryAllocError),\n\n\t#[error(display = \"{}\", _0)] ImageCreationError(#[error(source)] vulkano::image::ImageCreationError),\n\n\t#[error(display = \"{}\", _0)] ImageViewCreationError(#[error(source)] vulkano::image::view::ImageViewCreationError),\n\n\t#[error(display = \"{}\", _0)] FlushError(#[error(source)] sync::FlushError),\n\n\t#[error(display = \"{}\", _0)] DescriptorSetError(#[error(source)] descriptor_set::DescriptorSetError),\n\n}\n", "file_path": "src/component/model/mod.rs", "rank": 1, "score": 251497.8089427731 }, { "content": "pub fn rcu(update: impl Fn(&mut Config)) {\n\n\tCONFIG.rcu(|current| {\n\n\t\tlet mut new = (**current).clone();\n\n\t\tupdate(&mut new);\n\n\t\tnew\n\n\t});\n\n}\n", "file_path": "src/config.rs", "rank": 2, "score": 218490.37876554055 }, { "content": "pub fn pre_mul_alpha_blending() -> AttachmentBlend {\n\n\tAttachmentBlend {\n\n\t\tenabled: true,\n\n\t\tcolor_op: BlendOp::Add,\n\n\t\tcolor_source: BlendFactor::One,\n\n\t\tcolor_destination: BlendFactor::OneMinusSrcAlpha,\n\n\t\talpha_op: BlendOp::Add,\n\n\t\talpha_source: BlendFactor::One,\n\n\t\talpha_destination: BlendFactor::OneMinusSrcAlpha,\n\n\t\tmask_red: true,\n\n\t\tmask_green: true,\n\n\t\tmask_blue: true,\n\n\t\tmask_alpha: true,\n\n\t}\n\n}\n\n\n\n\n\n#[derive(Debug, Error)]\n\npub enum PipelineError {\n\n\t#[error(display = \"{}\", _0)] RenderPassCreationError(#[error(source)] vulkano::render_pass::RenderPassCreationError),\n\n\t#[error(display = \"{}\", _0)] GraphicsPipelineCreationError(#[error(source)] vulkano::pipeline::GraphicsPipelineCreationError),\n\n\t#[error(display = \"{}\", _0)] ComputePipelineCreationError(#[error(source)] vulkano::pipeline::ComputePipelineCreationError),\n\n}\n", "file_path": "src/renderer/pipelines/mod.rs", "rank": 3, "score": 193588.31974614368 }, { "content": "pub fn draw_line(from: impl Into<DebugPosition>, to: impl Into<DebugPosition>, width: f32, color: Color) {\n\n\tDEBUG_LINES.with(|lines| {\n\n\t\tlines.borrow_mut().push(DebugLine{ from: from.into(), to: to.into(), width, color });\n\n\t})\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 4, "score": 182646.1417040595 }, { "content": "pub fn draw_point(position: impl Into<DebugPosition>, radius: f32, color: Color) {\n\n\tDEBUG_POINTS.with(|points| {\n\n\t\tpoints.borrow_mut().push(DebugPoint{ position: position.into(), radius, color });\n\n\t})\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 5, "score": 179844.07745799664 }, { "content": "fn lookup_component(cur_dir: &PathBuf, name: &OsStr, full_path: &PathBuf, dir: bool) -> Result<OsString, AssetError> {\n\n\tlet mut next_dir = None;\n\n\t\n\n\tlet result = try {\n\n\t\tfor file in std::fs::read_dir(&cur_dir)? {\n\n\t\t\tlet file = file?;\n\n\t\t\t\n\n\t\t\tif (!dir && file.file_type()?.is_file()) || (dir && file.file_type()?.is_dir()) {\n\n\t\t\t\tif file.file_name() == name {\n\n\t\t\t\t\tnext_dir = Some(name.to_owned());\n\n\t\t\t\t\tbreak;\n\n\t\t\t\t} else if file.file_name().to_ascii_lowercase() == name.to_ascii_lowercase() {\n\n\t\t\t\t\tnext_dir = Some(file.file_name());\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t};\n\n\t\n\n\tmatch (result, next_dir) {\n\n\t\t(Ok(()), Some(next_dir)) => Ok(next_dir),\n", "file_path": "src/renderer/assets_manager/mod.rs", "rank": 6, "score": 178715.79996088013 }, { "content": "pub fn debug() -> bool {\n\n\tlet read = DEBUG.load(Ordering::Relaxed);\n\n\tread\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 7, "score": 175644.19202320182 }, { "content": "pub fn draw_text(text: impl Into<String>, position: impl Into<DebugPosition>, offset: DebugOffset, size: f32, color: Color) {\n\n\tDEBUG_TEXTS.with(|texts| {\n\n\t\ttexts.borrow_mut().push(DebugText{ text: text.into(), position: position.into(), offset, size, color });\n\n\t})\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 8, "score": 169182.3434066097 }, { "content": "pub trait Camera: Send + Sized + 'static {\n\n\tfn capture(&mut self) -> Result<(&[u8], Option<Isometry3>), CameraCaptureError>;\n\n\t\n\n\tfn start(mut self, queue: Arc<Queue>)\n\n\t\t -> Result<(Arc<AttachmentImage>, mpsc::Receiver<(PrimaryAutoCommandBuffer, Option<Isometry3>)>), CameraStartError> {\n\n\t\tlet target = AttachmentImage::with_usage(queue.device().clone(),\n\n\t\t [CAPTURE_WIDTH, CAPTURE_HEIGHT],\n\n\t\t Format::B8G8R8A8_UNORM,\n\n\t\t ImageUsage { sampled: true,\n\n\t\t\t transfer_destination: true,\n\n\t\t\t ..ImageUsage::none() })?;\n\n\t\tlet ret = target.clone();\n\n\t\t\n\n\t\tlet (sender, receiver) = mpsc::sync_channel(1);\n\n\t\t\n\n\t\tthread::spawn(move || {\n\n\t\t\tmatch self.capture_loop(queue, target, sender) {\n\n\t\t\t\tOk(()) => {},\n\n\t\t\t\tErr(CaptureLoopError::Quitting) => return,\n\n\t\t\t\tErr(err) => panic!(\"Error while capturing background: {:?}\", err),\n", "file_path": "src/renderer/camera/mod.rs", "rank": 9, "score": 167558.69089003062 }, { "content": "pub fn default_tracked_pose() -> TrackedDevicePose {\n\n\tTrackedDevicePose::from(TrackedDevicePose_t {\n\n\t\tmDeviceToAbsoluteTracking: HmdMatrix34_t { m: [[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0]] },\n\n\t\tvVelocity: HmdVector3_t { v: [0.0, 0.0, 0.0] },\n\n\t\tvAngularVelocity: HmdVector3_t { v: [0.0, 0.0, 0.0] },\n\n\t\teTrackingResult: ETrackedDeviceClass_TrackedDeviceClass_Invalid,\n\n\t\tbPoseIsValid: false,\n\n\t\tbDeviceIsConnected: false\n\n\t})\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 10, "score": 167460.39659702 }, { "content": "pub fn set_debug(value: bool) {\n\n\tDEBUG.store(value, Ordering::Relaxed);\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 11, "score": 165195.48447261855 }, { "content": "pub trait PipelineConstructor: 'static {\n\n\ttype PipeType: Any + Send + Sync;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32))\n\n\t -> Result<Arc<Self::PipeType>, PipelineError>\n\n\t where Self: Sized;\n\n}\n\n\n\npub struct Pipelines {\n\n\tpipelines: HashMap<TypeId, Arc<dyn Any + Send + Sync>>,\n\n\trender_pass: Arc<RenderPass>,\n\n\tframe_buffer_size: (u32, u32),\n\n}\n\n\n\nimpl Pipelines {\n\n\tpub fn new(render_pass: Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Pipelines {\n\n\t\tPipelines{\n\n\t\t\tpipelines: HashMap::new(),\n\n\t\t\trender_pass,\n\n\t\t\tframe_buffer_size,\n", "file_path": "src/renderer/pipelines/mod.rs", "rank": 12, "score": 161734.99144026233 }, { "content": "pub fn get() -> impl Deref<Target = Arc<Config>> + 'static {\n\n\tCONFIG.load()\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 13, "score": 144616.1247313795 }, { "content": "pub fn next_uid() -> u64 {\n\n\tID_GEN.fetch_add(1, Ordering::Relaxed)\n\n}\n", "file_path": "src/utils/id_gen.rs", "rank": 14, "score": 142334.7688935858 }, { "content": "pub fn cast_ray_on_plane(plane: Isometry3, ray: Ray) -> Option<Point3> {\n\n\tlet norm = plane.transform_vector(&Vec3::z_axis());\n\n\tlet origin = plane.transform_point(&Point3::origin());\n\n\tlet toi = (origin - ray.origin).dot(&norm) / ray.dir.dot(&norm);\n\n\t\n\n\tif toi.is_nan() || toi < 0.0 {\n\n\t\tNone\n\n\t} else {\n\n\t\tlet intersection = ray.point_at(toi);\n\n\t\tSome(plane.inverse_transform_point(&intersection))\n\n\t}\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 15, "score": 140087.2324635996 }, { "content": "// Windows why\n\nfn lookup_windows_path(root: &str, orig_path: &Path) -> Result<PathBuf, AssetError> {\n\n\tif cfg!(target_os = \"windows\") {\n\n\t\treturn Ok(PathBuf::from(root).join(orig_path));\n\n\t}\n\n\t\n\n\tlet mut cur_dir = PathBuf::from(root);\n\n\tlet mut path = PathBuf::from(orig_path.to_string_lossy().replace(\"\\\\\", \"/\"));\n\n\tlet full_path = cur_dir.join(&path);\n\n\tlet file_name = path.file_name().ok_or_else(|| AssetError::new(orig_path.to_string_lossy()))?.to_owned();\n\n\tpath.pop();\n\n\t\n\n\tfor component in path.components() {\n\n\t\tcur_dir.push(lookup_component(&cur_dir, component.as_os_str(), &full_path, true)?);\n\n\t}\n\n\t\n\n\tcur_dir.push(lookup_component(&cur_dir, &file_name, &full_path, false)?);\n\n\t\n\n\tOk(cur_dir)\n\n}\n\n\n", "file_path": "src/renderer/assets_manager/mod.rs", "rank": 16, "score": 137237.97727024753 }, { "content": "pub fn default_wait_poses() -> WaitPoses {\n\n\tWaitPoses {\n\n\t\trender: [default_tracked_pose(); MAX_TRACKED_DEVICE_COUNT],\n\n\t\tgame: [default_tracked_pose(); MAX_TRACKED_DEVICE_COUNT],\n\n\t}\n\n}\n", "file_path": "src/utils/mod.rs", "rank": 17, "score": 137181.45723805318 }, { "content": "#[allow(unused_variables)]\n\npub trait Component: ComponentBase {\n\n\tfn start(&self, entity: &Entity, application: &Application) -> Result<(), ComponentError> { Ok(()) }\n\n\tfn tick(&self, entity: &Entity, application: &Application, delta_time: Duration) -> Result<(), ComponentError> { Ok(()) }\n\n\tfn pre_render(&self, entity: &Entity, renderer: &Renderer, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ComponentError> { Ok(()) }\n\n\tfn render(&self, entity: &Entity, renderer: &Renderer, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ComponentError> { Ok(()) }\n\n\tfn end(&self, entity: &Entity, application: &Application) -> Result<(), ComponentError> { Ok(()) }\n\n\t\n\n\tfn boxed(self)\n\n\t -> Box<dyn Component>\n\n\t\twhere Self: Sized\n\n\t{ Box::new(self) }\n\n}\n\n\n\nimpl IntoBoxed<dyn Component> for Box<dyn Component> {\n\n\tfn into(self) -> Box<dyn Component> {\n\n\t\tself\n\n\t}\n\n}\n\n\n\nimpl<M: Component + 'static> IntoBoxed<dyn Component> for M {\n", "file_path": "src/component/mod.rs", "rank": 18, "score": 134673.21642936885 }, { "content": "pub fn check_err(fn_tab: FnTable, code: sys::EVRTrackedCameraError) -> Result<(), TrackedCameraError> {\n\n\tif code == sys::EVRTrackedCameraError_VRTrackedCameraError_None {\n\n\t\tOk(())\n\n\t} else {\n\n\t\tlet name = fn_tab.GetCameraErrorNameFromEnum\n\n\t\t .map(|f| unsafe { f(code) })\n\n\t\t .map(|msg| unsafe { CStr::from_ptr(msg) })\n\n\t\t .map(CStr::to_str)\n\n\t\t .map(Result::ok)\n\n\t\t .flatten()\n\n\t\t .unwrap_or(\"VRTrackedCameraError_UnknownError\")\n\n\t\t .into();\n\n\t\t\n\n\t\tErr(TrackedCameraError{ code, name })\n\n\t}\n\n}\n", "file_path": "src/application/vr/tracked_camera/error.rs", "rank": 19, "score": 133938.5570383952 }, { "content": "pub trait ComponentBase: Any {\n\n\tfn inner(&self) -> &ComponentInner;\n\n\tfn inner_mut(&mut self) -> &mut ComponentInner;\n\n\tfn as_any(&self) -> &dyn Any;\n\n\t\n\n\tfn id(&self) -> u64 {\n\n\t\tself.inner().id\n\n\t}\n\n\t\n\n\tfn remove(&self) -> bool {\n\n\t\t!self.inner().removed.replace(true)\n\n\t}\n\n\t\n\n\tfn entity<'a>(&self, application: &'a Application) -> &'a Entity {\n\n\t\tlet eid = self.inner().entity_id.expect(\"Attempted to get entity of unmounted component\");\n\n\t\tapplication.entity(eid).expect(\"Attempted to get entity of unmounted component\")\n\n\t}\n\n\t\n\n\tfn as_cref(&self) -> ComponentRef<Self> where Self: Sized {\n\n\t\tComponentRef::new(self.inner().entity_id.expect(\"Attempted to get reference of unmounted component\"), self.inner().id)\n\n\t}\n\n}\n\n\n", "file_path": "src/component/mod.rs", "rank": 20, "score": 133174.06139005418 }, { "content": "pub fn get_all_tools() -> Vec<Box<dyn Tool>> {\n\n\tvec![\n\n\t\tBox::new(Spawner::new()),\n\n\t\tBox::new(Remover::new()),\n\n\t\tBox::new(Axis::new()),\n\n\t\tBox::new(BallSocket::new()),\n\n\t\tBox::new(Weld::new()),\n\n\t\tBox::new(RopeTool::new()),\n\n\t]\n\n}\n\n\n", "file_path": "src/component/toolgun/tool.rs", "rank": 21, "score": 126106.4974563272 }, { "content": "fn panic_hook() -> impl Fn(&PanicInfo) {\n\n\tlet default_hook = panic::take_hook();\n\n\t\n\n\tmove |info| {\n\n\t\tdefault_hook(info);\n\n\t\t\n\n\t\tlet payload;\n\n\t\tif let Some(string) = info.payload().downcast_ref::<String>() {\n\n\t\t\tpayload = string.clone()\n\n\t\t} else if let Some(string) = info.payload().downcast_ref::<&'static str>() {\n\n\t\t\tpayload = string.to_string()\n\n\t\t} else {\n\n\t\t\tpayload = format!(\"Unformattable panic payload! ({})\", std::any::type_name_of_val(info.payload()))\n\n\t\t};\n\n\t\t\n\n\t\tlet thread = std::thread::current()\n\n\t\t .name()\n\n\t\t .unwrap_or(\"<unnamed>\")\n\n\t\t .to_string();\n\n\t\t\n", "file_path": "src/main.rs", "rank": 22, "score": 125825.27472596592 }, { "content": "pub fn vulkan_device_extensions_required(compositor: &Compositor, physical: &PhysicalDevice) -> Vec<CString> {\n\n\tunsafe { compositor.vulkan_device_extensions_required(physical.as_ptr()) }\n\n}\n\n\n", "file_path": "src/utils/vulkan.rs", "rank": 23, "score": 124248.06438251992 }, { "content": "// Translates OpenGL projection matrix to Vulkan\n\n// Can't be const because Mat4::new is not const fn or something\n\nfn clip() -> AMat4 {\n\n\tAMat4::from_matrix_unchecked(Mat4::new(\n\n\t\t1.0, 0.0, 0.0, 0.0,\n\n\t\t0.0,-1.0, 0.0, 0.0,\n\n\t\t0.0, 0.0, 0.5, 0.5,\n\n\t\t0.0, 0.0, 0.0, 1.0,\n\n\t))\n\n}\n\n\n\npub struct Eyes {\n\n\tpub main_image: Arc<AttachmentImage>,\n\n\tpub resolved_image: Arc<AttachmentImage>,\n\n\tpub side_image: Arc<AttachmentImage>, // TODO: https://github.com/ValveSoftware/openvr/issues/663\n\n\tpub depth_image: Arc<AttachmentImage>,\n\n\tpub frame_buffer: Arc<dyn FramebufferAbstract + Send + Sync>,\n\n\tpub frame_buffer_size: (u32, u32),\n\n\tpub textures: (Texture, Texture),\n\n\tpub view: (AMat4, AMat4),\n\n\tpub projection: (PMat4, PMat4),\n\n\tpub raw_projection: (Vec4, Vec4),\n", "file_path": "src/renderer/eyes.rs", "rank": 24, "score": 120104.97211976728 }, { "content": "pub fn debugger() {\n\n\t() // Breakpoint\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 25, "score": 119022.81432043188 }, { "content": "fn load<T>(suffix: &[u8]) -> Result<*const T, InitError> {\n\n\tlet mut magic = Vec::from(b\"FnTable:\".as_ref());\n\n\tmagic.extend(suffix);\n\n\tlet mut error = sys::EVRInitError_VRInitError_None;\n\n\tlet result = unsafe { sys::VR_GetGenericInterface(magic.as_ptr() as *const i8, &mut error) };\n\n\tif error != sys::EVRInitError_VRInitError_None {\n\n\t\treturn Err(InitError(\n\n\t\t\tsys::EVRInitError_VRInitError_Init_InterfaceNotFound,\n\n\t\t));\n\n\t}\n\n\tOk(result as *const T)\n\n}\n", "file_path": "src/application/vr/tracked_camera/mod.rs", "rank": 26, "score": 117576.70399512319 }, { "content": "fn run_application() -> Result<(), RunError> {\n\n\tlet config_path = \"config.toml\";\n\n\tlet file_name = std::env::args().next().unwrap_or(\"project39-ar.exe\".to_string());\n\n\t\n\n\tlet mut config = if fs::metadata(config_path).is_ok() {\n\n\t\tlet config_file = fs::read_to_string(config_path)?;\n\n\t\ttoml::from_str(&config_file)?\n\n\t} else {\n\n\t\teprintln!(\"\\nUnable to locate config.toml!\");\n\n\t\teprintln!(\"Use `{} --example_config` to print an example config.\\n\", file_name);\n\n\t\t\n\n\t\tConfig::default()\n\n\t};\n\n\t\n\n\tif let Err(err) = config.apply_args() {\n\n\t\tprint_usage(&file_name, config);\n\n\t\treturn Err(err.into());\n\n\t}\n\n\t\n\n\tif config.help {\n", "file_path": "src/main.rs", "rank": 27, "score": 114884.04438951702 }, { "content": "#[proc_macro_derive(ComponentBase, attributes(inner))]\n\npub fn component_base_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &ast.ident;\n\n let mut inner = None;\n\n let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl();\n\n \n\n match ast.data {\n\n Data::Struct(data) => {\n\n for field in &data.fields {\n\n for attr in &field.attrs {\n\n if attr.path\n\n .get_ident()\n\n .map_or(false, |i| i.to_string() == \"inner\") {\n\n let field_name = field.ident.clone().unwrap();\n\n \n\n if let Some(_) = inner.replace(field_name) {\n\n panic!(\"Duplicate #[inner] attribute.\");\n\n }\n\n }\n\n }\n", "file_path": "derive/src/lib.rs", "rank": 28, "score": 113742.18097569884 }, { "content": "\t\t}\n\n\t}\n\n\t\n\n\tpub fn get<P: PipelineConstructor>(&mut self) -> Result<Arc<P::PipeType>, PipelineError> {\n\n\t\tif let Some(pipeline) = self.pipelines.get(&TypeId::of::<P>()) {\n\n\t\t\tOk(pipeline.clone().downcast().unwrap())\n\n\t\t} else {\n\n\t\t\tlet pipeline = P::new(&self.render_pass, self.frame_buffer_size)?;\n\n\t\t\tself.pipelines.insert(TypeId::of::<P>(), pipeline.clone());\n\n\t\t\t\n\n\t\t\tOk(pipeline)\n\n\t\t}\n\n\t}\n\n}\n\n\n\n\n", "file_path": "src/renderer/pipelines/mod.rs", "rank": 29, "score": 111217.67531532924 }, { "content": "use std::collections::HashMap;\n\nuse std::sync::Arc;\n\nuse std::any::{TypeId, Any};\n\nuse std::fmt::Debug;\n\nuse err_derive::Error;\n\nuse vulkano::pipeline::blend::{AttachmentBlend, BlendOp, BlendFactor};\n\nuse vulkano::render_pass::RenderPass;\n\n\n\npub mod default;\n\npub mod background;\n\npub mod mmd;\n\npub mod debug;\n\npub mod toolgun_text;\n\n\n", "file_path": "src/renderer/pipelines/mod.rs", "rank": 30, "score": 111215.1130119432 }, { "content": "pub trait AssetKey: Hash + Display {\n\n\ttype Asset: Clone + 'static;\n\n\ttype Error: std::error::Error;\n\n\t\n\n\tfn load(&self, assets_manager: &mut AssetsManager, renderer: &mut Renderer) -> Result<Self::Asset, Self::Error>;\n\n}\n\n\n", "file_path": "src/renderer/assets_manager/mod.rs", "rank": 31, "score": 111159.11579159822 }, { "content": "use crate::math::IntoArray;\n\n\n\n#[derive(Default, Copy, Clone)]\n\npub struct Vertex {\n\n\tpos: [f32; 3],\n\n\tnormal: [f32; 3],\n\n\tuv: [f32; 2],\n\n\tedge_scale: f32,\n\n\tbones_indices: [u32; 4],\n\n\tbones_weights: [f32; 4],\n\n}\n\n\n\nvulkano::impl_vertex!(Vertex, pos, normal, uv, edge_scale, bones_indices, bones_weights);\n\n\n\nimpl Vertex {\n\n\tpub fn new(pos: impl IntoArray<[f32; 3]>, normal: impl IntoArray<[f32; 3]>, uv: impl IntoArray<[f32; 2]>, edge_scale: f32, bones_indices: impl IntoArray<[u32; 4]>, bones_weights: impl IntoArray<[f32; 4]>) -> Self {\n\n\t\tVertex {\n\n\t\t\tpos: pos.into_array(),\n\n\t\t\tnormal: normal.into_array(),\n\n\t\t\tuv: uv.into_array(),\n\n\t\t\tedge_scale,\n\n\t\t\tbones_indices: bones_indices.into_array(),\n\n\t\t\tbones_weights: bones_weights.into_array(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/mmd/vertex.rs", "rank": 32, "score": 110448.20891184786 }, { "content": "use crate::math::IntoArray;\n\n\n\n#[derive(Default, Copy, Clone)]\n\npub struct Vertex {\n\n\tpub pos: [f32; 3],\n\n\tpub normal: [f32; 3],\n\n\tpub uv: [f32; 2],\n\n}\n\n\n\nvulkano::impl_vertex!(Vertex, pos, normal, uv);\n\n\n\nimpl Vertex {\n\n\tpub fn new(pos: impl IntoArray<[f32; 3]>, normal: impl IntoArray<[f32; 3]>, uv: impl IntoArray<[f32; 2]>) -> Self {\n\n\t\tVertex {\n\n\t\t\tpos: pos.into_array(),\n\n\t\t\tnormal: normal.into_array(),\n\n\t\t\tuv: uv.into_array(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/default/vertex.rs", "rank": 33, "score": 110437.6693178707 }, { "content": "use crate::math::IntoArray;\n\n\n\n#[derive(Default, Copy, Clone)]\n\npub struct Vertex {\n\n\tpos: [f32; 2],\n\n}\n\n\n\nvulkano::impl_vertex!(Vertex, pos);\n\n\n\nimpl Vertex {\n\n\tpub fn new(pos: impl IntoArray<[f32; 2]>) -> Self {\n\n\t\tVertex {\n\n\t\t\tpos: pos.into_array(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/background/vertex.rs", "rank": 34, "score": 110437.62786964883 }, { "content": "use crate::math::{IntoArray};\n\n\n\n#[derive(Default, Copy, Clone, Debug)]\n\npub struct Vertex {\n\n\tpos_left: [f32; 3],\n\n\tpos_right: [f32; 3],\n\n\tcolor: [f32; 4],\n\n}\n\n\n\nvulkano::impl_vertex!(Vertex, pos_left, pos_right, color);\n\n\n\nimpl Vertex {\n\n\tpub fn new(pos_left: impl IntoArray<[f32; 3]>, pos_right: impl IntoArray<[f32; 3]>, color: impl IntoArray<[f32; 4]>) -> Self {\n\n\t\tVertex {\n\n\t\t\tpos_left: pos_left.into_array(),\n\n\t\t\tpos_right: pos_right.into_array(),\n\n\t\t\tcolor: color.into_array(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/debug/vertex.rs", "rank": 35, "score": 110436.13707640007 }, { "content": "\n\n#[derive(Default, Copy, Clone, Debug)]\n\npub struct TexturedVertex {\n\n\tpos_left: [f32; 3],\n\n\tpos_right: [f32; 3],\n\n\tuv: [f32; 2],\n\n\tcolor: [f32; 4],\n\n}\n\n\n\nvulkano::impl_vertex!(TexturedVertex, pos_left, pos_right, uv, color);\n\n\n\nimpl TexturedVertex {\n\n\tpub fn new(pos_left: impl IntoArray<[f32; 3]>, pos_right: impl IntoArray<[f32; 3]>, uv: impl IntoArray<[f32; 2]>, color: impl IntoArray<[f32; 4]>) -> Self {\n\n\t\tTexturedVertex {\n\n\t\t\tpos_left: pos_left.into_array(),\n\n\t\t\tpos_right: pos_right.into_array(),\n\n\t\t\tuv: uv.into_array(),\n\n\t\t\tcolor: color.into_array(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/debug/vertex.rs", "rank": 36, "score": 110433.05292492153 }, { "content": "pub fn set(config: Config) {\n\n\tCONFIG.store(Arc::new(config));\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 37, "score": 109145.05051159693 }, { "content": "\t\t}\n\n\t}\n\n);\n\n\n\npub const MORPH_GROUP_SIZE: usize = 32;\n\n\n\npub struct MMDPipelineMorphs;\n\n\n\nimpl PipelineConstructor for MMDPipelineMorphs {\n\n\ttype PipeType = ComputePipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, _frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device().clone();\n\n\t\tlet cs = morph_comp::Shader::load(device.clone()).unwrap();\n\n\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tComputePipeline::new(device, &cs.main_entry_point(), &(), None, |_| {})?\n\n\t\t))\n\n\t}\n\n}\n\n\n", "file_path": "src/renderer/pipelines/mmd/mod.rs", "rank": 38, "score": 107865.85640732467 }, { "content": "pub struct DefaultGlowPipeline;\n\n\n\nimpl PipelineConstructor for DefaultGlowPipeline {\n\n\ttype PipeType = GraphicsPipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device();\n\n\t\tlet vs = glow_vert::Shader::load(device.clone()).unwrap();\n\n\t\tlet fs = glow_frag::Shader::load(device.clone()).unwrap();\n\n\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tGraphicsPipeline::start()\n\n\t\t\t\t.vertex_input_single_buffer::<Vertex>()\n\n\t\t\t\t.vertex_shader(vs.main_entry_point(), ())\n\n\t\t\t\t.viewports(Some(Viewport {\n\n\t\t\t\t\torigin: [0.0, 0.0],\n\n\t\t\t\t\tdimensions: [frame_buffer_size.0 as f32, frame_buffer_size.1 as f32],\n\n\t\t\t\t\tdepth_range: 0.0..1.0,\n\n\t\t\t\t}))\n\n\t\t\t\t.fragment_shader(fs.main_entry_point(), ())\n\n\t\t\t\t.depth_stencil_simple_depth()\n\n\t\t\t\t.cull_mode_front()\n\n\t\t\t\t.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t\t.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())\n\n\t\t\t\t.build(device.clone())?\n\n\t\t))\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/default/mod.rs", "rank": 39, "score": 107857.87465250108 }, { "content": "\n\nimpl PipelineConstructor for DebugTexturedPipeline {\n\n\ttype PipeType = GraphicsPipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device();\n\n\t\tlet vs = tex_vert::Shader::load(device.clone()).unwrap();\n\n\t\tlet fs = tex_frag::Shader::load(device.clone()).unwrap();\n\n\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tGraphicsPipeline::start()\n\n\t\t\t\t.vertex_input_single_buffer::<TexturedVertex>()\n\n\t\t\t\t.vertex_shader(vs.main_entry_point(), ())\n\n\t\t\t\t.viewports(Some(Viewport {\n\n\t\t\t\t\torigin: [0.0, 0.0],\n\n\t\t\t\t\tdimensions: [frame_buffer_size.0 as f32, frame_buffer_size.1 as f32],\n\n\t\t\t\t\tdepth_range: 0.0..1.0,\n\n\t\t\t\t}))\n\n\t\t\t\t.fragment_shader(fs.main_entry_point(), ())\n\n\t\t\t\t.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t\t.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())\n\n\t\t\t\t.cull_mode_back()\n\n\t\t\t\t.build(device.clone())?\n\n\t\t))\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/debug/mod.rs", "rank": 40, "score": 107856.29826119111 }, { "content": "use std::sync::Arc;\n\nuse vulkano::pipeline::GraphicsPipeline;\n\nuse vulkano::render_pass::{RenderPass, Subpass};\n\nuse vulkano::pipeline::viewport::Viewport;\n\nuse vulkano::device::DeviceOwned;\n\n\n\nmod vertex;\n\n\n\nuse super::{PipelineConstructor, PipelineError};\n\npub use vertex::Vertex;\n\n\n\nmod vert {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./vert.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"vertex\",\n\n\t\tpath: \"src/renderer/pipelines/background/vert.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/background/mod.rs", "rank": 41, "score": 107853.1149022944 }, { "content": "use std::sync::Arc;\n\nuse vulkano::pipeline::GraphicsPipeline;\n\nuse vulkano::render_pass::{RenderPass, Subpass};\n\nuse vulkano::pipeline::viewport::Viewport;\n\nuse vulkano::device::DeviceOwned;\n\n\n\nmod vertex;\n\n\n\nuse super::{PipelineConstructor, PipelineError, pre_mul_alpha_blending};\n\npub use vertex::{Vertex, TexturedVertex};\n\n\n\nmod vert {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./vert.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"vertex\",\n\n\t\tpath: \"src/renderer/pipelines/debug/vert.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/debug/mod.rs", "rank": 42, "score": 107852.76619159094 }, { "content": "use std::sync::Arc;\n\nuse vulkano::pipeline::GraphicsPipeline;\n\nuse vulkano::render_pass::{RenderPass, Subpass};\n\nuse vulkano::pipeline::viewport::Viewport;\n\nuse vulkano::device::DeviceOwned;\n\n\n\nmod vertex;\n\n\n\nuse super::{PipelineConstructor, PipelineError, pre_mul_alpha_blending};\n\npub use vertex::Vertex;\n\n\n\nmod vert {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./vert.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"vertex\",\n\n\t\tpath: \"src/renderer/pipelines/default/vert.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/default/mod.rs", "rank": 43, "score": 107852.72772952002 }, { "content": "\n\nmod frag {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./frag.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"fragment\",\n\n\t\tpath: \"src/renderer/pipelines/background/frag.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\npub struct BackgroundPipeline;\n\n\n\nimpl PipelineConstructor for BackgroundPipeline {\n\n\ttype PipeType = GraphicsPipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device();\n\n\t\tlet vs = vert::Shader::load(device.clone()).unwrap();\n\n\t\tlet fs = frag::Shader::load(device.clone()).unwrap();\n", "file_path": "src/renderer/pipelines/background/mod.rs", "rank": 44, "score": 107852.43791649367 }, { "content": "\n\nmod frag {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./frag.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"fragment\",\n\n\t\tpath: \"src/renderer/pipelines/debug/frag.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\npub struct DebugPipeline;\n\n\n\nimpl PipelineConstructor for DebugPipeline {\n\n\ttype PipeType = GraphicsPipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device();\n\n\t\tlet vs = vert::Shader::load(device.clone()).unwrap();\n\n\t\tlet fs = frag::Shader::load(device.clone()).unwrap();\n", "file_path": "src/renderer/pipelines/debug/mod.rs", "rank": 45, "score": 107852.43791649367 }, { "content": "\n\nmod glow_frag {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"glow_frag.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"fragment\",\n\n\t\tpath: \"src/renderer/pipelines/default/glow_frag.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\npub struct DefaultPipeline;\n\n\n\nimpl PipelineConstructor for DefaultPipeline {\n\n\ttype PipeType = GraphicsPipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device();\n\n\t\tlet vs = vert::Shader::load(device.clone()).unwrap();\n\n\t\tlet fs = frag::Shader::load(device.clone()).unwrap();\n", "file_path": "src/renderer/pipelines/default/mod.rs", "rank": 46, "score": 107852.11441079454 }, { "content": "use vulkano::pipeline::ComputePipeline;\n\n\n\n#[macro_use] mod macros;\n\nmod vertex;\n\n\n\npub use vertex::Vertex;\n\n\n\nmmd_shaders!(\n\n\t\"vertex\" base_vert = \"src/renderer/pipelines/mmd/base_vert.glsl\";\n\n\t\"fragment\" base_frag = \"src/renderer/pipelines/mmd/base_frag.glsl\";\n\n\t\"vertex\" outline_vert = \"src/renderer/pipelines/mmd/outline_vert.glsl\";\n\n\t\"fragment\" outline_frag = \"src/renderer/pipelines/mmd/outline_frag.glsl\";\n\n\t\"compute\" morph_comp = \"src/renderer/pipelines/mmd/morph_comp.glsl\";\n\n);\n\n\n\nmmd_pipelines!(\n\n\tpub pipeline MMDPipelineOpaque {\n\n\t\tshader vs = base_vert;\n\n\t\tshader fs = base_frag { transparent_pass: 0 };\n\n\t\t\n", "file_path": "src/renderer/pipelines/mmd/mod.rs", "rank": 47, "score": 107850.68896047567 }, { "content": "\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tGraphicsPipeline::start()\n\n\t\t\t\t.vertex_input_single_buffer::<Vertex>()\n\n\t\t\t\t.vertex_shader(vs.main_entry_point(), ())\n\n\t\t\t\t.viewports(Some(Viewport {\n\n\t\t\t\t\torigin: [0.0, 0.0],\n\n\t\t\t\t\tdimensions: [frame_buffer_size.0 as f32, frame_buffer_size.1 as f32],\n\n\t\t\t\t\tdepth_range: 0.0..1.0,\n\n\t\t\t\t}))\n\n\t\t\t\t.fragment_shader(fs.main_entry_point(), ())\n\n\t\t\t\t.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t\t.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())\n\n\t\t\t\t.cull_mode_back()\n\n\t\t\t\t.build(device.clone())?\n\n\t\t))\n\n\t}\n\n}\n\n\n\nmod tex_vert {\n", "file_path": "src/renderer/pipelines/debug/mod.rs", "rank": 48, "score": 107849.27380519587 }, { "content": "\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tGraphicsPipeline::start()\n\n\t\t\t\t.vertex_input_single_buffer::<Vertex>()\n\n\t\t\t\t.vertex_shader(vs.main_entry_point(), ())\n\n\t\t\t\t.viewports(Some(Viewport {\n\n\t\t\t\t\torigin: [0.0, 0.0],\n\n\t\t\t\t\tdimensions: [frame_buffer_size.0 as f32, frame_buffer_size.1 as f32],\n\n\t\t\t\t\tdepth_range: 0.0..1.0,\n\n\t\t\t\t}))\n\n\t\t\t\t.fragment_shader(fs.main_entry_point(), ())\n\n\t\t\t\t.depth_stencil_disabled()\n\n\t\t\t\t.cull_mode_back()\n\n\t\t\t\t.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())\n\n\t\t\t\t.build(device.clone())?\n\n\t\t))\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/background/mod.rs", "rank": 49, "score": 107846.96952681318 }, { "content": "\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tGraphicsPipeline::start()\n\n\t\t\t\t.vertex_input_single_buffer::<Vertex>()\n\n\t\t\t\t.vertex_shader(vs.main_entry_point(), ())\n\n\t\t\t\t.viewports(Some(Viewport {\n\n\t\t\t\t\torigin: [0.0, 0.0],\n\n\t\t\t\t\tdimensions: [frame_buffer_size.0 as f32, frame_buffer_size.1 as f32],\n\n\t\t\t\t\tdepth_range: 0.0..1.0,\n\n\t\t\t\t}))\n\n\t\t\t\t.fragment_shader(fs.main_entry_point(), ())\n\n\t\t\t\t.depth_stencil_simple_depth()\n\n\t\t\t\t.cull_mode_back()\n\n\t\t\t\t.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t\t.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())\n\n\t\t\t\t.build(device.clone())?\n\n\t\t))\n\n\t}\n\n}\n\n\n", "file_path": "src/renderer/pipelines/default/mod.rs", "rank": 50, "score": 107846.15724471258 }, { "content": "\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./tex_vert.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"vertex\",\n\n\t\tpath: \"src/renderer/pipelines/debug/tex_vert.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\nmod tex_frag {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./tex_frag.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"fragment\",\n\n\t\tpath: \"src/renderer/pipelines/debug/tex_frag.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\npub struct DebugTexturedPipeline;\n", "file_path": "src/renderer/pipelines/debug/mod.rs", "rank": 51, "score": 107837.34352568594 }, { "content": "\n\nmod frag {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./frag.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"fragment\",\n\n\t\tpath: \"src/renderer/pipelines/default/frag.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\nmod glow_vert {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"glow_vert.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"vertex\",\n\n\t\tpath: \"src/renderer/pipelines/default/glow_vert.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/default/mod.rs", "rank": 52, "score": 107835.57817546287 }, { "content": "\t\tconfig builder {\n\n\t\t\tbuilder\n\n\t\t}\n\n\t}\n\n\t\n\n\tpub pipeline MMDPipelineOpaqueNoCull {\n\n\t\tshader vs = base_vert;\n\n\t\tshader fs = base_frag { transparent_pass: 0 };\n\n\t\t\n\n\t\tconfig builder {\n\n\t\t\tbuilder.cull_mode_disabled()\n\n\t\t}\n\n\t}\n\n\n\n\tpub pipeline MMDPipelineTrans {\n\n\t\tshader vs = base_vert;\n\n\t\tshader fs = base_frag { transparent_pass: 1 };\n\n\t\t\n\n\t\tconfig builder {\n\n\t\t\tbuilder.blend_collective(pre_mul_alpha_blending())\n", "file_path": "src/renderer/pipelines/mmd/mod.rs", "rank": 53, "score": 107835.29675093108 }, { "content": "\t\t}\n\n\t}\n\n\t\n\n\tpub pipeline MMDPipelineTransNoCull {\n\n\t\tshader vs = base_vert;\n\n\t\tshader fs = base_frag { transparent_pass: 1 };\n\n\t\t\n\n\t\tconfig builder {\n\n\t\t\tbuilder.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t .cull_mode_disabled()\n\n\t\t}\n\n\t}\n\n\n\n\tpub pipeline MMDPipelineOutline {\n\n\t\tshader vs = outline_vert;\n\n\t\tshader fs = outline_frag;\n\n\t\t\n\n\t\tconfig builder {\n\n\t\t\tbuilder.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t .cull_mode_front()\n", "file_path": "src/renderer/pipelines/mmd/mod.rs", "rank": 54, "score": 107834.82188768734 }, { "content": "use crate::math::IntoArray;\n\n\n\n#[derive(Default, Copy, Clone)]\n\npub struct Vertex {\n\n\tpos: [f32; 2],\n\n}\n\n\n\nvulkano::impl_vertex!(Vertex, pos);\n\n\n\nimpl Vertex {\n\n\tpub fn new(pos: impl IntoArray<[f32; 2]>) -> Self {\n\n\t\tVertex {\n\n\t\t\tpos: pos.into_array(),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/toolgun_text/vertex.rs", "rank": 55, "score": 107193.67439380604 }, { "content": "fn find_image_format<P: AsRef<Path>>(path: P) -> Result<ImageFormat, MMDModelLoadError> {\n\n\tOk(match imghdr::from_file(&path)? {\n\n\t\tSome(imghdr::Type::Gif) => ImageFormat::Gif,\n\n\t\tSome(imghdr::Type::Tiff) => ImageFormat::Tiff,\n\n\t\tSome(imghdr::Type::Jpeg) => ImageFormat::Jpeg,\n\n\t\tSome(imghdr::Type::Bmp) => ImageFormat::Bmp,\n\n\t\tSome(imghdr::Type::Png) => ImageFormat::Png,\n\n\t\tSome(imghdr::Type::Webp) => ImageFormat::WebP,\n\n\t\t_ => ImageFormat::from_path(&path)?,\n\n\t})\n\n}\n\n\n\nconst MMD_UNIT_SIZE: f32 = 7.9 / 100.0; // https://www.deviantart.com/hogarth-mmd/journal/1-MMD-unit-in-real-world-units-685870002\n\n\n\nimpl<I: Into<i32>> From<mmd::Vertex<I>> for Vertex {\n\n\tfn from(vertex: mmd::Vertex<I>) -> Self {\n\n\t\tlet (bones, bones_weights) = match vertex.weight_deform {\n\n\t\t\tWeightDeform::Bdef1(bdef) => ([bdef.bone_index.into(), 0, 0, 0],\n\n\t\t\t [1.0, 0.0, 0.0, 0.0]),\n\n\t\t\tWeightDeform::Bdef2(bdef) => ([bdef.bone_1_index.into(), bdef.bone_2_index.into(), 0, 0],\n", "file_path": "src/renderer/assets_manager/pmx.rs", "rank": 56, "score": 107075.13545770754 }, { "content": "struct Rasterizer {\n\n\tglyphs: Vec<&'static Glyph>,\n\n\tx: usize,\n\n\ty: usize,\n\n\tglyph: usize,\n\n\tremaining: usize,\n\n}\n\n\n\nimpl Rasterizer {\n\n\tfn new(glyphs: Vec<&'static Glyph>) -> Rasterizer {\n\n\t\t\n\n\t\tlet width = glyphs.iter().fold(0, |acc, g| acc + g.get_width() as usize);\n\n\t\tlet height = 16;\n\n\t\t\n\n\t\tRasterizer {\n\n\t\t\tglyphs,\n\n\t\t\tx: 0,\n\n\t\t\ty: 0,\n\n\t\t\tglyph: 0,\n\n\t\t\tremaining: width * height,\n", "file_path": "src/renderer/debug_renderer/text_cache.rs", "rank": 57, "score": 105545.1079727071 }, { "content": "use std::sync::Arc;\n\nuse vulkano::pipeline::GraphicsPipeline;\n\nuse vulkano::render_pass::{RenderPass, Subpass};\n\nuse vulkano::pipeline::viewport::Viewport;\n\nuse vulkano::device::DeviceOwned;\n\n\n\nmod vertex;\n\n\n\nuse crate::renderer::pipelines::pre_mul_alpha_blending;\n\nuse super::{PipelineConstructor, PipelineError};\n\npub use vertex::Vertex;\n\n\n\nmod vert {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./vert.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"vertex\",\n\n\t\tpath: \"src/renderer/pipelines/toolgun_text/vert.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n", "file_path": "src/renderer/pipelines/toolgun_text/mod.rs", "rank": 58, "score": 104684.96383644883 }, { "content": "}\n\n\n\nmod frag {\n\n\t#[allow(dead_code)]\n\n\tconst SOURCE: &'static str = include_str!(\"./frag.glsl\"); // https://github.com/vulkano-rs/vulkano/issues/1349\n\n\tvulkano_shaders::shader! {\n\n\t\tty: \"fragment\",\n\n\t\tpath: \"src/renderer/pipelines/toolgun_text/frag.glsl\",\n\n\t\tspirv_version: \"1.3\"\n\n\t}\n\n}\n\n\n\npub struct ToolGunTextPipeline;\n\n\n\nimpl PipelineConstructor for ToolGunTextPipeline {\n\n\ttype PipeType = GraphicsPipeline;\n\n\t\n\n\tfn new(render_pass: &Arc<RenderPass>, frame_buffer_size: (u32, u32)) -> Result<Arc<Self::PipeType>, PipelineError> {\n\n\t\tlet device = render_pass.device();\n\n\t\tlet vs = vert::Shader::load(device.clone()).unwrap();\n", "file_path": "src/renderer/pipelines/toolgun_text/mod.rs", "rank": 59, "score": 104683.48775656328 }, { "content": "\t\tlet fs = frag::Shader::load(device.clone()).unwrap();\n\n\t\t\n\n\t\tOk(Arc::new(\n\n\t\t\tGraphicsPipeline::start()\n\n\t\t\t\t.vertex_input_single_buffer::<Vertex>()\n\n\t\t\t\t.vertex_shader(vs.main_entry_point(), ())\n\n\t\t\t\t.viewports(Some(Viewport {\n\n\t\t\t\t\torigin: [0.0, 0.0],\n\n\t\t\t\t\tdimensions: [frame_buffer_size.0 as f32, frame_buffer_size.1 as f32],\n\n\t\t\t\t\tdepth_range: 0.0..1.0,\n\n\t\t\t\t}))\n\n\t\t\t\t.fragment_shader(fs.main_entry_point(), ())\n\n\t\t\t\t.depth_stencil_simple_depth()\n\n\t\t\t\t.cull_mode_back()\n\n\t\t\t\t.blend_collective(pre_mul_alpha_blending())\n\n\t\t\t\t.render_pass(Subpass::from(render_pass.clone(), 0).unwrap())\n\n\t\t\t\t.build(device.clone())?\n\n\t\t))\n\n\t}\n\n}\n", "file_path": "src/renderer/pipelines/toolgun_text/mod.rs", "rank": 60, "score": 104679.05614718594 }, { "content": "fn print_usage(filename: &str, mut config: Config) {\n\n\tconfig.help = false;\n\n\t\n\n\tprintln!(\"Usage:\");\n\n\tprintln!(\" {} [options]\", filename);\n\n\tprintln!(\"\\n{}\", config.usage());\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 61, "score": 103833.57928790592 }, { "content": "#[allow(dead_code)]\n\n#[derive(Copy, Clone)]\n\nstruct Intrinsics {\n\n\trawproj: [Vec4; 2],\n\n\tfocal: [Vec2; 2],\n\n\tcoeffs: [Vec4; 2],\n\n\tscale: [Vec2; 2],\n\n\tcenter: [Vec2; 2],\n\n}\n\n\n\npub struct Background {\n\n\tpipeline: Arc<GraphicsPipeline>,\n\n\tvertices: Arc<ImmutableBuffer<[Vertex]>>,\n\n\t// intrinsics: Arc<CpuAccessibleBuffer<Intrinsics>>,\n\n\tset: Arc<dyn DescriptorSet + Send + Sync>,\n\n\tfence: FenceCheck,\n\n\textrinsics: (Mat3, Mat3),\n\n\tlast_frame_pose: Isometry3,\n\n}\n\n\n\nimpl Background {\n\n\tpub fn new(camera_image: Arc<AttachmentImage>, eyes: &Eyes, queue: &Arc<Queue>, pipelines: &mut Pipelines) -> Result<Background, BackgroundError> {\n", "file_path": "src/renderer/background.rs", "rank": 62, "score": 102971.32823867816 }, { "content": "pub fn get_flag<T>(key: &str)\n\n -> Option<T>\n\n where T: Clone + Send + Sync + 'static {\n\n\tFLAGS.read()\n\n\t .unwrap()\n\n\t .get(key)\n\n\t .and_then(|val| val.downcast_ref::<T>())\n\n\t .map(|val| val.clone())\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 63, "score": 102924.37606435912 }, { "content": "pub fn aabb_from_points<'a, I>(pts: I) -> AABB\n\n\twhere\n\n\t\tI: IntoIterator<Item = Point3>,\n\n{\n\n\tlet mut it = pts.into_iter();\n\n\t\n\n\tlet p0 = it.next().expect(\n\n\t\t\"Point cloud AABB construction: the input iterator should yield at least one point.\",\n\n\t);\n\n\tlet mut min = p0;\n\n\tlet mut max = p0;\n\n\t\n\n\tfor pt in it {\n\n\t\tmin = min.inf(&pt);\n\n\t\tmax = max.sup(&pt);\n\n\t}\n\n\t\n\n\tAABB::new(min, max)\n\n}\n\n\n", "file_path": "src/math.rs", "rank": 64, "score": 101200.6161049013 }, { "content": "pub fn num_key(num: usize) -> Key {\n\n\tmatch num {\n\n\t\t0 => Key::Key0,\n\n\t\t1 => Key::Key1,\n\n\t\t2 => Key::Key2,\n\n\t\t3 => Key::Key3,\n\n\t\t4 => Key::Key4,\n\n\t\t5 => Key::Key5,\n\n\t\t6 => Key::Key6,\n\n\t\t7 => Key::Key7,\n\n\t\t8 => Key::Key8,\n\n\t\t9 => Key::Key9,\n\n\t\tn => panic!(\"Invalid numeric key: {}\", n),\n\n\t}\n\n}\n", "file_path": "src/utils/input.rs", "rank": 65, "score": 101073.36903162257 }, { "content": "pub fn get_flag_or_default<T>(key: &str)\n\n -> T\n\n where T: Clone + Send + Sync + Default + 'static {\n\n\tget_flag(key).unwrap_or_default()\n\n}\n\n\n", "file_path": "src/debug.rs", "rank": 66, "score": 101073.36903162257 }, { "content": "pub fn face_towards_lossy(dir: Vec3) -> Rot3 {\n\n\tif dir.cross(&Vec3::y_axis()).magnitude_squared() <= f32::EPSILON {\n\n\t\tRot3::face_towards(&dir, &Vec3::z_axis())\n\n\t} else {\n\n\t\tRot3::face_towards(&dir, &Vec3::y_axis())\n\n\t}\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub struct Color(Vec4);\n\n\n\nimpl Color {\n\n\tpub fn new(r: f32, g: f32, b: f32, a: f32) -> Self { Color(vector!(r, g, b, a)) }\n\n\t\n\n\tpub fn dblack() -> Self { Color(vector!(0.0, 0.0, 0.0, 1.0)) }\n\n\tpub fn dred() -> Self { Color(vector!(0.5, 0.0, 0.0, 1.0)) }\n\n\tpub fn dgreen() -> Self { Color(vector!(0.0, 0.6, 0.0, 1.0)) }\n\n\tpub fn dyellow() -> Self { Color(vector!(0.1, 0.5, 0.0, 1.0)) }\n\n\tpub fn dblue() -> Self { Color(vector!(0.0, 0.0, 0.5, 1.0)) }\n\n\tpub fn dmagenta() -> Self { Color(vector!(0.6, 0.0, 0.6, 1.0)) }\n", "file_path": "src/math.rs", "rank": 67, "score": 101073.36903162257 }, { "content": "pub fn translate(text: &str) -> Option<&'static str> {\n\n\tTRANSLATIONS.get(text)\n\n\t .cloned() // Remove double ref\n\n}\n", "file_path": "src/debug.rs", "rank": 68, "score": 97392.9396466547 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct PropConfig {\n\n\tmodel: String,\n\n\ttexture: String,\n\n\t#[serde(default)] collider: PropCollider,\n\n\ttip: Option<String>,\n\n}\n\n\n\npub struct Prop {\n\n\tpub model: SimpleModel,\n\n\tpub name: String,\n\n\tpub collider: Collider,\n\n\tpub tip: Option<String>,\n\n}\n\n\n\npub struct PropCollection {\n\n\tpub props: Vec<Prop>,\n\n}\n\n\n\nimpl PropCollection {\n\n\tpub fn new(renderer: &mut Renderer) -> Result<Self, PropManagerError> {\n", "file_path": "src/component/toolgun/prop_manager.rs", "rank": 69, "score": 97296.03467401414 }, { "content": "pub fn set_flag<T>(key: &str, value: T)\n\n\t where T: Clone + Send + Sync + 'static {\n\n\tFLAGS.write()\n\n\t .unwrap()\n\n\t .insert(key.to_string(), Box::new(value));\n\n}\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! dprint {\n\n\t($( $args:expr ),*) => { if crate::debug::debug() { print!( $( $args ),* ); } }\n\n}\n\n\n\n#[allow(unused_macros)]\n\nmacro_rules! dprintln {\n\n\t($( $args:expr ),*) => { if crate::debug::debug() { println!( $( $args ),* ); } }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub enum DebugPosition {\n\n\tScreen(Point2),\n", "file_path": "src/debug.rs", "rank": 70, "score": 95639.26118898223 }, { "content": "pub fn get_userdata(eid: u64, cid: u64) -> u128 {\n\n\teid as u128 + ((cid as u128) << 64)\n\n}\n", "file_path": "src/utils/rapier.rs", "rank": 71, "score": 93975.43144544505 }, { "content": "#[allow(unused_variables)]\n\npub trait Tool {\n\n\tfn name(&self) -> &str;\n\n\t// fn activate(&mut self, toolgun: &ToolGun, application: &Application) -> Result<(), ToolError> { Ok(()) }\n\n\tfn tick(&mut self, toolgun: &ToolGun, hand: Hand, ray: Ray, application: &Application) -> Result<(), ToolError> { Ok(()) }\n\n\tfn pre_render(&mut self, toolgun: &ToolGun, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ToolError> { Ok(()) }\n\n\tfn render(&mut self, toolgun: &ToolGun, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>) -> Result<(), ToolError> { Ok(()) }\n\n\t// fn deactivate(&mut self, toolgun: &ToolGun, application: &Application) -> Result<(), ToolError> { Ok(()) }\n\n}\n", "file_path": "src/component/toolgun/tool.rs", "rank": 72, "score": 91325.02316032823 }, { "content": "#[proc_macro_derive(FromArgs, attributes(arg_short, arg_rename, arg_skip))]\n\npub fn from_args_derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let ast: syn::DeriveInput = syn::parse(input).unwrap();\n\n \n\n let data = match ast.data {\n\n Data::Struct(data) => data,\n\n Data::Enum(_) => unimplemented!(),\n\n Data::Union(_) => unimplemented!(),\n\n };\n\n \n\n let mut usage_impl = Vec::new();\n\n let mut prepare_opts = Vec::new();\n\n let mut apply_matches = Vec::new();\n\n \n\n for field in &data.fields {\n\n let field_name = field.ident.as_ref().unwrap();\n\n let mut doc = quote! { \"\" };\n\n let mut short = quote! { \"\" };\n\n let mut name = quote!( stringify!(#field_name) );\n\n \n\n for attr in &field.attrs {\n", "file_path": "derive/src/lib.rs", "rank": 73, "score": 88093.3349164699 }, { "content": "pub trait AutoCommandBufferBuilderEx {\n\n\tfn bind_any_index_buffer(&mut self, index_buffer: ImmutableIndexBuffer) -> &mut Self;\n\n}\n\n\n\nimpl<L, P> AutoCommandBufferBuilderEx for AutoCommandBufferBuilder<L, P> {\n\n\tfn bind_any_index_buffer(&mut self, index_buffer: ImmutableIndexBuffer) -> &mut Self {\n\n\t\tmatch index_buffer {\n\n\t\t\tImmutableIndexBuffer::U8(buffer) => self.bind_index_buffer(buffer),\n\n\t\t\tImmutableIndexBuffer::U16(buffer) => self.bind_index_buffer(buffer),\n\n\t\t\tImmutableIndexBuffer::U32(buffer) => self.bind_index_buffer(buffer),\n\n\t\t};\n\n\t\t\n\n\t\tself\n\n\t}\n\n}\n", "file_path": "src/utils/index_buffer.rs", "rank": 74, "score": 85491.71779437186 }, { "content": "pub trait IntoBoxed<T: ?Sized>: 'static {\n\n\tfn into(self) -> Box<T>;\n\n}\n\n\n", "file_path": "src/utils/mod.rs", "rank": 75, "score": 83112.37284775378 }, { "content": "\n\npub struct DebugRenderer {\n\n\tpub text_cache: RefCell<TextCache>,\n\n\tpipeline: Arc<GraphicsPipeline>,\n\n\ttext_pipeline: Arc<GraphicsPipeline>,\n\n\tvertices_pool: CpuBufferPool<Vertex>,\n\n\ttext_vertices_pool: CpuBufferPool<TexturedVertex>,\n\n\tindexes_pool: CpuBufferPool<u32>,\n\n\tvertices: Vec<Vertex>,\n\n\ttext_vertices: Vec<TexturedVertex>,\n\n\tindexes: Vec<u32>,\n\n}\n\n\n\nconst RING_MIN: f32 = 5.0;\n\nconst RING_WIDTH: f32 = 0.9;\n\n\n\nimpl DebugRenderer {\n\n\tpub fn new(load_queue: &Arc<Queue>, pipelines: &mut Pipelines) -> Result<DebugRenderer, DebugRendererError> {\n\n\t\tlet device = load_queue.device();\n\n\t\tlet pipeline = pipelines.get::<DebugPipeline>()?;\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 76, "score": 78971.37909480385 }, { "content": "\t\t}\n\n\t}\n\n\t\n\n\tfn draw_text(&mut self, text: DebugText, viewproj: &(PMat4, PMat4), pixel_scale: &Vec2) -> Result<Option<Arc<dyn DescriptorSet + Send + Sync>>, DebugRendererRenderError> {\n\n\t\tlet entry = self.text_cache.get_mut().get(&text.text)?;\n\n\t\t\n\n\t\tlet size_px = vector!(entry.size.0 as f32 / entry.size.1 as f32 * text.size, text.size);\n\n\t\tlet offset = text.offset.evaluate(size_px).coords.component_mul(&pixel_scale).to_homogeneous();\n\n\t\tlet top_left = text.position.project(viewproj);\n\n\t\tlet top_left = (\n\n\t\t\ttop_left.0 + &offset,\n\n\t\t\ttop_left.1 + &offset,\n\n\t\t);\n\n\t\tlet size = size_px.component_mul(&pixel_scale);\n\n\t\t\n\n\t\tlet start_id = self.text_vertices.len() as u32;\n\n\t\tself.indexes.push(start_id);\n\n\t\tself.indexes.push(start_id + 2);\n\n\t\tself.indexes.push(start_id + 1);\n\n\t\tself.indexes.push(start_id);\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 77, "score": 78970.48043345584 }, { "content": "\t\t}\n\n\t\t\n\n\t\tfor id in 0..edges {\n\n\t\t\tlet angle = dir * Rot2::from_angle(PI / edges as f32 * id as f32 - PI / 2.0);\n\n\t\t\tlet offset = angle.transform_vector(&Vec2::x()).component_mul(pixel_scale) * line.width;\n\n\t\t\tself.vertices.push(Vertex::new(\n\n\t\t\t\t&from.0 + offset.to_homogeneous(),\n\n\t\t\t\t&from.1 + offset.to_homogeneous(),\n\n\t\t\t\t&line.color\n\n\t\t\t));\n\n\t\t}\n\n\t\n\n\t\tfor id in 0..edges {\n\n\t\t\tlet angle = dir * Rot2::from_angle(PI / edges as f32 * id as f32 - PI / 2.0);\n\n\t\t\tlet offset = angle.transform_vector(&-Vec2::x()).component_mul(pixel_scale) * line.width;\n\n\t\t\tself.vertices.push(Vertex::new(\n\n\t\t\t\t&to.0 + offset.to_homogeneous(),\n\n\t\t\t\t&to.1 + offset.to_homogeneous(),\n\n\t\t\t\t&line.color\n\n\t\t\t));\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 78, "score": 78964.16355469174 }, { "content": "\t\tlet mut sub = true;\n\n\t\t\n\n\t\twhile last_ids.0 + 1 < last_ids.1 {\n\n\t\t\tself.indexes.push(last_ids.0);\n\n\t\t\tself.indexes.push(last_ids.1);\n\n\t\t\t\n\n\t\t\tif sub {\n\n\t\t\t\tlast_ids.0 += 1;\n\n\t\t\t\tself.indexes.push(last_ids.0);\n\n\t\t\t} else {\n\n\t\t\t\tlast_ids.1 -= 1;\n\n\t\t\t\tself.indexes.push(last_ids.1);\n\n\t\t\t}\n\n\t\t\t\n\n\t\t\tsub = !sub\n\n\t\t}\n\n\t\t\n\n\t\tfor id in 0..edges {\n\n\t\t\tlet angle = std::f32::consts::TAU / edges as f32 * id as f32;\n\n\t\t\tlet offset = Rot2::new(angle).transform_vector(&Vec2::x()).component_mul(pixel_scale) * point.radius;\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 79, "score": 78963.74686507703 }, { "content": "\t\t\tself.indexes.push(start + id * 2);\n\n\t\t\tself.indexes.push(start + id * 2 + 1);\n\n\t\t\tself.indexes.push(start + id * 2 + 2);\n\n\t\t\tself.indexes.push(start + id * 2 + 1);\n\n\t\t\tself.indexes.push(start + id * 2 + 3);\n\n\t\t\tself.indexes.push(start + id * 2 + 2);\n\n\t\t}\n\n\t\t\n\n\t\tfor id in 0..edges {\n\n\t\t\tlet angle = std::f32::consts::TAU / edges as f32 * id as f32;\n\n\t\t\tlet dir = Rot2::new(angle).transform_vector(&Vec2::x()).component_mul(&pixel_scale);\n\n\t\t\tlet offset: Vec2 = &dir * point.radius;\n\n\t\t\tlet offset_inner: Vec2 = &dir * ((point.radius - RING_MIN / 2.0) * RING_WIDTH);\n\n\t\t\tself.vertices.push(Vertex::new(\n\n\t\t\t\t&center.0 + offset.to_homogeneous(),\n\n\t\t\t\t&center.1 + offset.to_homogeneous(),\n\n\t\t\t\t&point.color,\n\n\t\t\t));\n\n\t\t\tself.vertices.push(Vertex::new(\n\n\t\t\t\t&center.0 + offset_inner.to_homogeneous(),\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 80, "score": 78962.65299216312 }, { "content": "\t\t\t\t\t set)\n\n\t\t\t\t\t .draw_indexed(index_count as u32,\n\n\t\t\t\t\t 1,\n\n\t\t\t\t\t 0,\n\n\t\t\t\t\t 0,\n\n\t\t\t\t\t 0)?;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t\n\n\t\t\tOk::<_, DebugRendererRenderError>(())\n\n\t\t})?;\n\n\t\t\n\n\t\tOk(())\n\n\t}\n\n\t\n\n\tfn draw_circle(&mut self, point: DebugPoint, viewproj: &(PMat4, PMat4), pixel_scale: &Vec2) {\n\n\t\tlet edges = point.radius.log(1.2).max(4.0) as u32;\n\n\t\tlet center = point.position.project(viewproj);\n\n\t\t\n\n\t\tlet mut last_ids = (self.vertices.len() as u32, self.vertices.len() as u32 + edges - 1);\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 81, "score": 78961.95859677896 }, { "content": "\t\n\n\tpub fn render(&mut self, builder: &mut AutoCommandBufferBuilder<PrimaryAutoCommandBuffer>, commons: &CommonsUBO, pixel_scale: Vec2) -> Result<(), DebugRendererRenderError> {\n\n\t\tlet viewproj = (\n\n\t\t\tcommons.projection[0] * commons.view[0],\n\n\t\t\tcommons.projection[1] * commons.view[1],\n\n\t\t);\n\n\t\t\n\n\t\t\n\n\t\tDEBUG_LINES.with(|lines| {\n\n\t\t\tfor line in lines.borrow_mut().drain(..) {\n\n\t\t\t\tif line.width <= 0.0 {\n\n\t\t\t\t\tcontinue;\n\n\t\t\t\t} else {\n\n\t\t\t\t\tself.draw_line(line, &viewproj, &pixel_scale);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t});\n\n\t\t\n\n\t\tDEBUG_POINTS.with(|points| {\n\n\t\t\tfor point in points.borrow_mut().drain(..) {\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 82, "score": 78961.83235498326 }, { "content": "\t\t\t 0,\n\n\t\t\t 0,\n\n\t\t\t 0)?;\n\n\t\t}\n\n\t\t\n\n\t\tDEBUG_TEXTS.with(|texts| {\n\n\t\t\tfor text in texts.borrow_mut().drain(..) {\n\n\t\t\t\tif text.size <= 0.0 || text.text.is_empty() {\n\n\t\t\t\t\tcontinue;\n\n\t\t\t\t} else if let Some(set) = self.draw_text(text, &viewproj, &pixel_scale)? {\n\n\t\t\t\t\tlet vertex_buffer = self.text_vertices_pool.chunk(self.text_vertices.drain(..))?;\n\n\t\t\t\t\tlet index_buffer = self.indexes_pool.chunk(self.indexes.drain(..))?;\n\n\t\t\t\t\tlet index_count = index_buffer.len();\n\n\t\t\t\t\t\n\n\t\t\t\t\tbuilder.bind_pipeline_graphics(self.text_pipeline.clone())\n\n\t\t\t\t\t .bind_index_buffer(index_buffer)\n\n\t\t\t\t\t .bind_vertex_buffers(0, vertex_buffer)\n\n\t\t\t\t\t .bind_descriptor_sets(PipelineBindPoint::Graphics,\n\n\t\t\t\t\t self.text_pipeline.layout().clone(),\n\n\t\t\t\t\t 0,\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 83, "score": 78961.81044033707 }, { "content": "use std::sync::Arc;\n\nuse std::f32::consts::PI;\n\nuse std::cell::RefCell;\n\nuse err_derive::Error;\n\nuse vulkano::{memory, command_buffer};\n\nuse vulkano::command_buffer::{AutoCommandBufferBuilder, PrimaryAutoCommandBuffer};\n\nuse vulkano::buffer::{CpuBufferPool, BufferUsage, TypedBufferAccess};\n\nuse vulkano::device::Queue;\n\nuse vulkano::descriptor_set::DescriptorSet;\n\nuse vulkano::pipeline::{GraphicsPipeline, PipelineBindPoint};\n\nuse nalgebra::Unit;\n\n\n\nmod text_cache;\n\n\n\nuse crate::debug::{DEBUG_POINTS, DebugPoint, DEBUG_LINES, DebugLine, DEBUG_TEXTS, DebugText};\n\nuse crate::math::{Vec2, Rot2, PMat4};\n\nuse super::pipelines::debug::{DebugPipeline, DebugTexturedPipeline, Vertex, TexturedVertex};\n\nuse super::pipelines::{Pipelines, PipelineError};\n\nuse super::CommonsUBO;\n\npub use text_cache::{TextCache, TextCacheError, TextCacheGetError};\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 84, "score": 78961.1656209439 }, { "content": "\t\t\t\tif point.radius <= 0.0 {\n\n\t\t\t\t\tcontinue;\n\n\t\t\t\t} else if point.radius <= RING_MIN {\n\n\t\t\t\t\tself.draw_circle(point, &viewproj, &pixel_scale);\n\n\t\t\t\t} else {\n\n\t\t\t\t\tself.draw_ring(point, &viewproj, &pixel_scale);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t});\n\n\t\t\n\n\t\tif !self.vertices.is_empty() {\n\n\t\t\tlet vertex_buffer = self.vertices_pool.chunk(self.vertices.drain(..))?;\n\n\t\t\tlet index_buffer = self.indexes_pool.chunk(self.indexes.drain(..))?;\n\n\t\t\tlet index_count = index_buffer.len();\n\n\t\t\t\n\n\t\t\tbuilder.bind_pipeline_graphics(self.pipeline.clone())\n\n\t\t\t .bind_index_buffer(index_buffer)\n\n\t\t\t .bind_vertex_buffers(0, vertex_buffer)\n\n\t\t\t .draw_indexed(index_count as u32,\n\n\t\t\t 1,\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 85, "score": 78960.41606496798 }, { "content": "\t\t\t\t&center.1 + offset_inner.to_homogeneous(),\n\n\t\t\t\t&point.color,\n\n\t\t\t));\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn draw_line(&mut self, line: DebugLine, viewproj: &(PMat4, PMat4), pixel_scale: &Vec2) {\n\n\t\tlet edges = (line.width.ln() * 4.5).max(2.0) as u32;\n\n\t\tlet from = line.from.project(viewproj);\n\n\t\tlet to = line.to.project(viewproj);\n\n\t\t\n\n\t\tif from.0.z < 0.0 || to.0.z < 0.0 || from.0.z > 1.0 || to.0.z > 1.0\n\n\t\t|| from.1.z < 0.0 || to.1.z < 0.0 || from.1.z > 1.0 || to.1.z > 1.0 {\n\n\t\t\treturn\n\n\t\t}\n\n\t\t\n\n\t\tlet dir = Unit::try_new((from.0 - to.0).xy(), std::f32::EPSILON).unwrap_or(Vec2::x_axis());\n\n\t\tlet dir = Rot2::rotation_between_axis(&Vec2::x_axis(), &dir);\n\n\t\t\n\n\t\tlet base_id = self.vertices.len() as u32;\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 86, "score": 78957.69085147977 }, { "content": "\t\tlet text_pipeline = pipelines.get::<DebugTexturedPipeline>()?;\n\n\t\t\n\n\t\tlet vertices_pool = CpuBufferPool::new(device.clone(), BufferUsage::vertex_buffer());\n\n\t\tlet text_vertices_pool = CpuBufferPool::new(device.clone(), BufferUsage::vertex_buffer());\n\n\t\tlet indexes_pool = CpuBufferPool::new(device.clone(), BufferUsage::index_buffer());\n\n\t\t\n\n\t\tlet text_cache = RefCell::new(TextCache::new(load_queue, pipelines)?);\n\n\t\t\n\n\t\tOk(DebugRenderer {\n\n\t\t\tpipeline,\n\n\t\t\ttext_pipeline,\n\n\t\t\tvertices_pool,\n\n\t\t\ttext_vertices_pool,\n\n\t\t\tindexes_pool,\n\n\t\t\tvertices: vec![],\n\n\t\t\ttext_vertices: vec![],\n\n\t\t\tindexes: vec![],\n\n\t\t\ttext_cache,\n\n\t\t})\n\n\t}\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 87, "score": 78957.40276763006 }, { "content": "\t\t));\n\n\t\tself.text_vertices.push(TexturedVertex::new(\n\n\t\t\ttop_left.0 + vector!(0.0, size.y, 0.0),\n\n\t\t\ttop_left.1 + vector!(0.0, size.y, 0.0),\n\n\t\t\t[0.0, 1.0],\n\n\t\t\t&text.color,\n\n\t\t));\n\n\t\t\n\n\t\tOk(Some(entry.set.clone()))\n\n\t}\n\n}\n\n\n\n\n\n#[derive(Debug, Error)]\n\npub enum DebugRendererError {\n\n\t#[error(display = \"{}\", _0)] PipelineError(#[error(source)] PipelineError),\n\n\t#[error(display = \"{}\", _0)] TextCacheError(#[error(source)] TextCacheError),\n\n}\n\n\n\n#[derive(Debug, Error)]\n\npub enum DebugRendererRenderError {\n\n\t#[error(display = \"{}\", _0)] TextCacheGetError(#[error(source)] TextCacheGetError),\n\n\t#[error(display = \"{}\", _0)] DeviceMemoryAllocError(#[error(source)] memory::DeviceMemoryAllocError),\n\n\t#[error(display = \"{}\", _0)] DrawIndexedError(#[error(source)] command_buffer::DrawIndexedError),\n\n}\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 88, "score": 78956.80379787381 }, { "content": "\t\t\tself.vertices.push(Vertex::new(\n\n\t\t\t\t&center.0.coords + offset.to_homogeneous(),\n\n\t\t\t\t&center.1.coords + offset.to_homogeneous(),\n\n\t\t\t\t&point.color,\n\n\t\t\t));\n\n\t\t}\n\n\t}\n\n\t\n\n\tfn draw_ring(&mut self, point: DebugPoint, viewproj: &(PMat4, PMat4), pixel_scale: &Vec2) {\n\n\t\tlet edges = (point.radius.ln() * 9.0).max(4.0) as u32;\n\n\t\tlet center = point.position.project(viewproj);\n\n\t\t\n\n\t\tlet start = self.vertices.len() as u32;\n\n\t\tself.indexes.push(start);\n\n\t\tself.indexes.push(start + edges * 2 - 2);\n\n\t\tself.indexes.push(start + edges * 2 - 1);\n\n\t\tself.indexes.push(start);\n\n\t\tself.indexes.push(start + edges * 2 - 1);\n\n\t\tself.indexes.push(start + 1);\n\n\t\tfor id in 0..(edges - 1) {\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 89, "score": 78953.40324475424 }, { "content": "\t\tlet mut last_ids = (edges / 2 - 1, edges / 2);\n\n\t\tlet mut sub = false;\n\n\t\t\n\n\t\twhile last_ids.0 != last_ids.1 + 1 {\n\n\t\t\tself.indexes.push(base_id + last_ids.1);\n\n\t\t\tself.indexes.push(base_id + last_ids.0);\n\n\t\t\t\n\n\t\t\tif sub {\n\n\t\t\t\tif last_ids.0 == 0 {\n\n\t\t\t\t\tlast_ids.0 = edges * 2 - 1;\n\n\t\t\t\t} else {\n\n\t\t\t\t\tlast_ids.0 = last_ids.0 - 1;\n\n\t\t\t\t}\n\n\t\t\t\tself.indexes.push(base_id + last_ids.0);\n\n\t\t\t} else {\n\n\t\t\t\tlast_ids.1 += 1;\n\n\t\t\t\tself.indexes.push(base_id + last_ids.1);\n\n\t\t\t}\n\n\t\t\t\n\n\t\t\tsub = !sub\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 90, "score": 78944.90805138185 }, { "content": "\t\tself.indexes.push(start_id + 3);\n\n\t\tself.indexes.push(start_id + 2);\n\n\t\t\n\n\t\tself.text_vertices.push(TexturedVertex::new(\n\n\t\t\ttop_left.0,\n\n\t\t\ttop_left.1,\n\n\t\t\t[0.0, 0.0],\n\n\t\t\t&text.color,\n\n\t\t));\n\n\t\tself.text_vertices.push(TexturedVertex::new(\n\n\t\t\ttop_left.0 + vector!(size.x, 0.0, 0.0),\n\n\t\t\ttop_left.1 + vector!(size.x, 0.0, 0.0),\n\n\t\t\t[1.0, 0.0],\n\n\t\t\t&text.color,\n\n\t\t));\n\n\t\tself.text_vertices.push(TexturedVertex::new(\n\n\t\t\ttop_left.0 + vector!(size.x, size.y, 0.0),\n\n\t\t\ttop_left.1 + vector!(size.x, size.y, 0.0),\n\n\t\t\t[1.0, 1.0],\n\n\t\t\t&text.color,\n", "file_path": "src/renderer/debug_renderer/mod.rs", "rank": 91, "score": 78943.98249364067 }, { "content": "use std::cell::{RefCell, Cell};\n\nuse std::collections::{BTreeMap, HashMap};\n\nuse std::any::Any;\n\nuse rapier3d::dynamics::{RigidBody, RigidBodyHandle, RigidBodyType};\n\nuse rapier3d::prelude::RigidBodyBuilder;\n\nuse rapier3d::geometry::{Collider, ColliderBuilder};\n\n\n\nuse crate::math::{Isometry3, Vec3, Point3, Rot3};\n\nuse crate::component::Component;\n\nuse super::Entity;\n\nuse crate::utils::next_uid;\n\nuse crate::application::entity::EntityState;\n\nuse crate::component::physics::collider::ColliderComponent;\n\nuse crate::component::model::SimpleModel;\n\n\n\npub struct EntityBuilder {\n\n\tpub name: String,\n\n\tpub rigid_body: RigidBody,\n\n\tpub position: Isometry3,\n\n\tpub velocity: Vec3,\n", "file_path": "src/application/entity/builder.rs", "rank": 92, "score": 77805.4795855344 }, { "content": "\tpub angular_velocity: Vec3,\n\n\tpub hidden: bool,\n\n\tpub components: Vec<Box<dyn Component>>,\n\n\tpub tags: HashMap<String, Box<dyn Any>>,\n\n}\n\n\n\nimpl EntityBuilder {\n\n\tpub fn new(name: impl Into<String>) -> Self {\n\n\t\tEntityBuilder {\n\n\t\t\tname: name.into(),\n\n\t\t\trigid_body: RigidBodyBuilder::new_static().build(),\n\n\t\t\tposition: Isometry3::identity(),\n\n\t\t\tvelocity: Vec3::zeros(),\n\n\t\t\tangular_velocity: Vec3::zeros(),\n\n\t\t\thidden: false,\n\n\t\t\tcomponents: vec![],\n\n\t\t\ttags: HashMap::new(),\n\n\t\t}\n\n\t}\n\n\t\n", "file_path": "src/application/entity/builder.rs", "rank": 93, "score": 77804.1656680126 }, { "content": "\t\t}\n\n\t\t\n\n\t\teprintln!(\"Unable to create collider from aabb without SimpleModel component! ({})\", self.name);\n\n\t\t\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn tag<T: 'static>(mut self, key: impl Into<String>, val: T) -> Self {\n\n\t\tself.tags.insert(key.into(), Box::new(val));\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn build(self) -> Entity {\n\n\t\tlet entity = Entity {\n\n\t\t\tid: next_uid(),\n\n\t\t\tname: self.name,\n\n\t\t\ttags: RefCell::new(self.tags),\n\n\t\t\tstate: RefCell::new(EntityState {\n\n\t\t\t\tposition: self.position,\n\n\t\t\t\tvelocity: self.velocity,\n", "file_path": "src/application/entity/builder.rs", "rank": 94, "score": 77801.70916329467 }, { "content": "\tpub fn component<C: Component>(mut self, component: C) -> Self {\n\n\t\tself.components.push(component.boxed());\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn collider(mut self, collider: Collider) -> Self {\n\n\t\tself.components.push(ColliderComponent::new(collider).boxed());\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn collider_from_aabb(self) -> Self {\n\n\t\tfor component in &self.components {\n\n\t\t\tif let Some(model) = component.as_any().downcast_ref::<SimpleModel>() {\n\n\t\t\t\tlet aabb = model.aabb();\n\n\t\t\t\tlet hsize = aabb.half_extents();\n\n\t\t\t\t\n\n\t\t\t\treturn self.collider(ColliderBuilder::cuboid(hsize.x, hsize.y, hsize.z)\n\n\t\t\t\t .translation(aabb.center().coords)\n\n\t\t\t\t .build());\n\n\t\t\t}\n", "file_path": "src/application/entity/builder.rs", "rank": 95, "score": 77793.3267478005 }, { "content": "\tpub fn rotation(mut self, angle: Rot3) -> Self {\n\n\t\tself.position.rotation = angle;\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn velocity(mut self, velocity: Vec3) -> Self {\n\n\t\tself.velocity = velocity;\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn angular_velocity(mut self, angular_velocity: Vec3) -> Self {\n\n\t\tself.angular_velocity = angular_velocity;\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn hidden(mut self, hidden: bool) -> Self {\n\n\t\tself.hidden = hidden;\n\n\t\tself\n\n\t}\n\n\t\n", "file_path": "src/application/entity/builder.rs", "rank": 96, "score": 77789.8762942482 }, { "content": "\tpub fn rigid_body(mut self, rigid_body: RigidBody) -> Self {\n\n\t\tself.rigid_body = rigid_body;\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn rigid_body_type(mut self, rb_type: RigidBodyType) -> Self {\n\n\t\tself.rigid_body.set_body_type(rb_type);\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn position(mut self, position: Isometry3) -> Self {\n\n\t\tself.position = position;\n\n\t\tself\n\n\t}\n\n\t\n\n\tpub fn translation(mut self, point: Point3) -> Self {\n\n\t\tself.position.translation = point.into();\n\n\t\tself\n\n\t}\n\n\t\n", "file_path": "src/application/entity/builder.rs", "rank": 97, "score": 77789.36750942742 }, { "content": "\t\t\t\tangular_velocity: self.angular_velocity,\n\n\t\t\t\thidden: self.hidden,\n\n\t\t\t}),\n\n\t\t\tremoved: Cell::new(false),\n\n\t\t\tcomponents: BTreeMap::new(),\n\n\t\t\tnew_components: RefCell::new(self.components),\n\n\t\t\trigid_body: RigidBodyHandle::invalid(),\n\n\t\t\trigid_body_template: self.rigid_body,\n\n\t\t};\n\n\t\t\n\n\t\tentity\n\n\t}\n\n}\n", "file_path": "src/application/entity/builder.rs", "rank": 98, "score": 77785.31385653485 }, { "content": "impl ComponentInner {\n\n\tpub fn new() -> Self {\n\n\t\tComponentInner::from_render_type(RenderType::Opaque)\n\n\t}\n\n\t\n\n\tpub fn from_render_type(render_type: RenderType) -> Self {\n\n\t\tComponentInner {\n\n\t\t\tid: next_uid(),\n\n\t\t\tentity_id: None,\n\n\t\t\tremoved: Cell::new(false),\n\n\t\t\trender_type,\n\n\t\t}\n\n\t}\n\n\t\n\n\tpub fn set_entity_id(&mut self, entity_id: u64) {\n\n\t\tassert!(self.entity_id.is_none(), \"Component {} already mounted! Old: {} New: {}.\", self.id, self.entity_id.unwrap(), entity_id);\n\n\t\t\n\n\t\tself.entity_id = Some(entity_id);\n\n\t}\n\n\t\n", "file_path": "src/component/mod.rs", "rank": 99, "score": 75767.42273469019 } ]
Rust
codegen/src/lib.rs
Atul9/canrun_rs
4e9c4dd3ddfdcf0f5666c2122614708e14f2eaa1
extern crate proc_macro; use proc_macro::TokenStream; use quote::{format_ident, quote}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::{parse_macro_input, Result, Token}; struct DomainDef { canrun_internal: bool, domain_visibility: syn::Visibility, domain_name: syn::Ident, domain_types: Vec<syn::Type>, } impl Parse for DomainDef { fn parse(input: ParseStream) -> Result<Self> { let domain_visibility = input.parse()?; let domain_name: syn::Ident = input.parse()?; let content; syn::braced!(content in input); let raw_types: Punctuated<syn::Type, Token![,]> = content.parse_terminated(syn::Type::parse)?; let domain_types: Vec<_> = raw_types.into_iter().collect(); Ok(DomainDef { canrun_internal: false, domain_visibility, domain_name, domain_types, }) } } impl quote::ToTokens for DomainDef { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let DomainDef { canrun_internal, domain_visibility, domain_name, domain_types, } = self; let canrun_mod = if *canrun_internal { format_ident!("crate") } else { format_ident!("canrun") }; let (fields, variants): (Vec<_>, Vec<_>) = (0..domain_types.len()) .map(|n| (format_ident!("t{}", n), format_ident!("T{}", n))) .unzip(); let value_name = format_ident!("{}Value", domain_name); let result = quote! { #[doc="A custom Domain generated by the domain! macro."] #[doc="TODO: Figure out how to interpolate something useful here"] #[derive(std::fmt::Debug)] #domain_visibility struct #domain_name { #(#fields: #canrun_mod::domains::DomainValues<#domain_types>),* } impl<'a> #canrun_mod::domains::Domain<'a> for #domain_name { type Value = #value_name; fn new() -> Self { #domain_name { #(#fields: #canrun_mod::domains::DomainValues::new(),)* } } fn unify_domain_values( state: #canrun_mod::state::State<'a, Self>, a: Self::Value, b: Self::Value, ) -> Option<#canrun_mod::state::State<Self>> { use #canrun_mod::value::{Val, IntoVal}; match (a, b) { #( (#value_name::#variants(a), #value_name::#variants(b)) => { state.unify::<#domain_types>(&a.into_val(), &b.into_val()) } ,)* _ => None, } } } #( impl<'a> #canrun_mod::domains::DomainType<'a, #domain_types> for #domain_name { fn values_as_ref( &self, ) -> &#canrun_mod::domains::DomainValues<#domain_types> { &self.#fields } fn values_as_mut( &mut self, ) -> &mut #canrun_mod::domains::DomainValues<#domain_types> { &mut self.#fields } fn into_domain_val(val: #canrun_mod::value::Val<#domain_types>) -> #value_name { #value_name::#variants(val) } } )* impl<'a> Clone for #domain_name { fn clone(&self) -> Self { #domain_name { #(#fields: self.#fields.clone()),* } } } #[doc(hidden)] #[derive(std::fmt::Debug)] #domain_visibility enum #value_name { #(#variants(#canrun_mod::value::Val<#domain_types>)),* } impl Clone for #value_name { fn clone(&self) -> Self { match self { #(#value_name::#variants(val) => #value_name::#variants(val.clone())),* } } } }; result.to_tokens(tokens); } } #[proc_macro] pub fn domain(item: TokenStream) -> TokenStream { let def = parse_macro_input!(item as DomainDef); quote!(#def).into() } #[proc_macro] #[doc(hidden)] pub fn canrun_internal_domain(item: TokenStream) -> TokenStream { let mut def = parse_macro_input!(item as DomainDef); def.canrun_internal = true; quote!(#def).into() }
extern crate proc_macro; use proc_macro::TokenStream; use quote::{format_ident, quote}; use syn::parse::{Parse, ParseStream}; use syn::punctuated::Punctuated; use syn::{parse_macro_input, Result, Token}; struct DomainDef { canrun_internal: bool, domain_visibility: syn::Visibility, domain_name: syn::Ident, domain_types: Vec<syn::Type>, } impl Parse for DomainDef { fn parse(input: ParseStream) -> Result<Self> { let domain_visibility = input.parse()?; let domain_name: syn::Ident = input.parse()?; let content; syn::braced!(content in input); let raw_types: Punctuated<syn::Type, Token![,]> = content.parse_terminated(syn::Type::parse)?; let domain_types: Vec<_> = raw_types.into_iter().collect();
} } impl quote::ToTokens for DomainDef { fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { let DomainDef { canrun_internal, domain_visibility, domain_name, domain_types, } = self; let canrun_mod = if *canrun_internal { format_ident!("crate") } else { format_ident!("canrun") }; let (fields, variants): (Vec<_>, Vec<_>) = (0..domain_types.len()) .map(|n| (format_ident!("t{}", n), format_ident!("T{}", n))) .unzip(); let value_name = format_ident!("{}Value", domain_name); let result = quote! { #[doc="A custom Domain generated by the domain! macro."] #[doc="TODO: Figure out how to interpolate something useful here"] #[derive(std::fmt::Debug)] #domain_visibility struct #domain_name { #(#fields: #canrun_mod::domains::DomainValues<#domain_types>),* } impl<'a> #canrun_mod::domains::Domain<'a> for #domain_name { type Value = #value_name; fn new() -> Self { #domain_name { #(#fields: #canrun_mod::domains::DomainValues::new(),)* } } fn unify_domain_values( state: #canrun_mod::state::State<'a, Self>, a: Self::Value, b: Self::Value, ) -> Option<#canrun_mod::state::State<Self>> { use #canrun_mod::value::{Val, IntoVal}; match (a, b) { #( (#value_name::#variants(a), #value_name::#variants(b)) => { state.unify::<#domain_types>(&a.into_val(), &b.into_val()) } ,)* _ => None, } } } #( impl<'a> #canrun_mod::domains::DomainType<'a, #domain_types> for #domain_name { fn values_as_ref( &self, ) -> &#canrun_mod::domains::DomainValues<#domain_types> { &self.#fields } fn values_as_mut( &mut self, ) -> &mut #canrun_mod::domains::DomainValues<#domain_types> { &mut self.#fields } fn into_domain_val(val: #canrun_mod::value::Val<#domain_types>) -> #value_name { #value_name::#variants(val) } } )* impl<'a> Clone for #domain_name { fn clone(&self) -> Self { #domain_name { #(#fields: self.#fields.clone()),* } } } #[doc(hidden)] #[derive(std::fmt::Debug)] #domain_visibility enum #value_name { #(#variants(#canrun_mod::value::Val<#domain_types>)),* } impl Clone for #value_name { fn clone(&self) -> Self { match self { #(#value_name::#variants(val) => #value_name::#variants(val.clone())),* } } } }; result.to_tokens(tokens); } } #[proc_macro] pub fn domain(item: TokenStream) -> TokenStream { let def = parse_macro_input!(item as DomainDef); quote!(#def).into() } #[proc_macro] #[doc(hidden)] pub fn canrun_internal_domain(item: TokenStream) -> TokenStream { let mut def = parse_macro_input!(item as DomainDef); def.canrun_internal = true; quote!(#def).into() }
Ok(DomainDef { canrun_internal: false, domain_visibility, domain_name, domain_types, })
call_expression
[ { "content": "/// Resolve one [`Val`] or return an [`Err(VarWatch)`](VarWatch) in a\n\n/// [`Constraint`].\n\npub fn resolve_1<'a, A, D>(val: &Val<A>, state: &State<'a, D>) -> Result<Rc<A>, VarWatch>\n\nwhere\n\n A: Debug,\n\n D: DomainType<'a, A>,\n\n{\n\n let a = state.resolve_val(val);\n\n match a {\n\n Resolved(a) => Ok(a.clone()),\n\n Var(var) => Err(VarWatch::one(*var)),\n\n }\n\n}\n\n\n", "file_path": "canrun/src/state/constraints.rs", "rank": 3, "score": 68025.889432493 }, { "content": "#[derive(Debug)]\n\nstruct Either<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n{\n\n a: GoalEnum<'a, D>,\n\n b: GoalEnum<'a, D>,\n\n}\n\n\n\nimpl<'a, D> Fork<'a, D> for Either<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n{\n\n fn fork(&self, state: State<'a, D>) -> crate::state::StateIter<'a, D> {\n\n let a = self.a.clone().apply(state.clone()).into_iter();\n\n let b = self.b.clone().apply(state).into_iter();\n\n Box::new(a.chain(b))\n\n }\n\n}\n\n\n", "file_path": "canrun/src/goals/either.rs", "rank": 4, "score": 55150.15040253302 }, { "content": "#[derive(Debug)]\n\nstruct Member<I: Debug> {\n\n item: Val<I>,\n\n collection: Val<LVec<I>>,\n\n}\n\n\n\nimpl<'a, I, D> Constraint<'a, D> for Member<I>\n\nwhere\n\n I: UnifyIn<'a, D>,\n\n D: DomainType<'a, I> + DomainType<'a, LVec<I>>,\n\n{\n\n fn attempt(&self, state: &State<'a, D>) -> Result<ResolveFn<'a, D>, VarWatch> {\n\n let collection = resolve_1(&self.collection, state)?;\n\n let goals: Vec<_> = collection\n\n .vec\n\n .iter()\n\n .zip(repeat(self.item.clone()))\n\n .map(|(a, b)| unify::<I, &Val<I>, Val<I>, D>(a, b) as Goal<D>)\n\n .collect();\n\n Ok(Box::new(|state| Goal::any(goals).apply(state)))\n\n }\n", "file_path": "canrun/src/collections/lvec/member.rs", "rank": 5, "score": 54041.35270288444 }, { "content": "#[derive(Debug)]\n\nstruct Subset<I: Debug> {\n\n subset: Val<LVec<I>>,\n\n collection: Val<LVec<I>>,\n\n}\n\n\n\nimpl<'a, I, D> Constraint<'a, D> for Subset<I>\n\nwhere\n\n I: UnifyIn<'a, D>,\n\n D: DomainType<'a, I> + DomainType<'a, LVec<I>>,\n\n{\n\n fn attempt(&self, state: &State<'a, D>) -> Result<ResolveFn<'a, D>, VarWatch> {\n\n let (subset, collection) = resolve_2(&self.subset, &self.collection, state)?;\n\n\n\n let col_size = collection.len();\n\n let sub_size = subset.len();\n\n\n\n if col_size < sub_size {\n\n Ok(Box::new(|state| Goal::fail().apply(state)))\n\n } else {\n\n let subset: LVec<I> = subset.vec.iter().into();\n", "file_path": "canrun/src/collections/lvec/subset.rs", "rank": 6, "score": 54041.35270288444 }, { "content": "#[test]\n\nfn test_zebra() {\n\n assert_eq!(\n\n zebra(),\n\n Some(vec![\n\n (\"norwegian\", \"kools\", \"water\", \"fox\", \"yellow\"),\n\n (\"ukrainian\", \"chesterfields\", \"tea\", \"horse\", \"blue\"),\n\n (\"englishman\", \"oldgolds\", \"milk\", \"snails\", \"red\"),\n\n (\"spaniard\", \"luckystrikes\", \"oj\", \"dog\", \"ivory\"),\n\n (\"japanese\", \"parliaments\", \"coffee\", \"zebra\", \"green\"),\n\n ])\n\n )\n\n}\n", "file_path": "examples/src/zebra.rs", "rank": 7, "score": 51932.628036105794 }, { "content": "struct Assert<'a, T: Debug> {\n\n val: Val<T>,\n\n assert: Rc<dyn Fn(&T) -> bool + 'a>,\n\n}\n\n\n\nimpl<'a, T: fmt::Debug> fmt::Debug for Assert<'a, T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Assert({:?})\", self.val)\n\n }\n\n}\n\n\n\nimpl<'a, T, D> Constraint<'a, D> for Assert<'a, T>\n\nwhere\n\n T: Debug + 'a,\n\n D: DomainType<'a, T>,\n\n{\n\n fn attempt(&self, state: &State<'a, D>) -> Result<ResolveFn<'a, D>, VarWatch> {\n\n let resolved = state.resolve_val(&self.val).clone();\n\n match resolved {\n\n Resolved(val) => {\n", "file_path": "canrun/src/tests/test_constrain.rs", "rank": 8, "score": 51576.784094751754 }, { "content": "struct Either<'a, D: Domain<'a>>(\n\n Rc<dyn Fn(State<'a, D>) -> Option<State<'a, D>> + 'a>,\n\n Rc<dyn Fn(State<'a, D>) -> Option<State<'a, D>> + 'a>,\n\n);\n\n\n\nimpl<'a, D: Domain<'a>> fmt::Debug for Either<'a, D> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Either\")\n\n }\n\n}\n\n\n\nimpl<'a, D> Fork<'a, D> for Either<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n{\n\n fn fork(&self, state: State<'a, D>) -> crate::state::StateIter<'a, D> {\n\n let a = (self.0)(state.clone()).into_iter();\n\n let b = (self.1)(state).into_iter();\n\n Box::new(a.chain(b))\n\n }\n\n}\n\n\n", "file_path": "canrun/src/tests/test_fork.rs", "rank": 9, "score": 50302.23052947501 }, { "content": "#[test]\n\nfn multipart_unifying_vars() {\n\n let s: State<I32> = State::new();\n\n let x = var();\n\n let y = var();\n\n let s = s.apply(|s| {\n\n let s = s.unify(&val!(x), &val!(y))?;\n\n s.unify(&val!(1), &val!(y))\n\n });\n\n let results: Vec<_> = s.query((x, y)).collect();\n\n assert_eq!(results, vec![(1, 1)]);\n\n}\n", "file_path": "canrun/src/tests/test_unify.rs", "rank": 10, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn basic_unifying_vars() {\n\n let s: State<I32> = State::new();\n\n let x = var();\n\n\n\n assert_eq!(\n\n &val!(1),\n\n s.clone()\n\n .unify(&val!(x), &val!(1))\n\n .unwrap()\n\n .resolve_val(&val!(x))\n\n );\n\n assert_eq!(\n\n &val!(1),\n\n s.clone()\n\n .unify(&val!(1), &val!(x))\n\n .unwrap()\n\n .resolve_val(&val!(x))\n\n );\n\n}\n\n\n", "file_path": "canrun/src/tests/test_unify.rs", "rank": 11, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn unifying_var_success() {\n\n let s: State<I32> = State::new();\n\n let x = var();\n\n let s = s.apply(|s| s.unify(&val!(x), &val!(1))?.unify(&val!(1), &val!(x)));\n\n let results: Vec<i32> = s.query(x).collect();\n\n assert_eq!(results, vec![1]);\n\n}\n\n\n", "file_path": "canrun/src/tests/test_unify.rs", "rank": 12, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn basic_constrain_fails() {\n\n let x = var();\n\n let goals: Vec<Goal<I32>> = vec![\n\n unify(&val!(2), x.clone()),\n\n custom(|s| s.constrain(assert(x.clone(), |x| x > &1))),\n\n custom(|s| s.constrain(assert(x, |x| x > &3))),\n\n ];\n\n util::assert_permutations_resolve_to(goals, x, vec![]);\n\n}\n\n\n", "file_path": "canrun/src/tests/test_constrain.rs", "rank": 13, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn basic_constrain_succeeds() {\n\n let x = var();\n\n let goals: Vec<Goal<I32>> = vec![\n\n unify(2, x),\n\n custom(|s| s.constrain(assert(x, |x| x > &1))),\n\n custom(|s| s.constrain(assert(x, |x| x > &0))),\n\n ];\n\n util::assert_permutations_resolve_to(goals, x, vec![2]);\n\n}\n\n\n", "file_path": "canrun/src/tests/test_constrain.rs", "rank": 14, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn unsatisfied_constrain_fails() {\n\n let x = var();\n\n let y = var();\n\n let goals: Vec<Goal<I32>> = vec![\n\n unify(&val!(1), x),\n\n custom(|s| s.constrain(assert(y, |y| y < &3))),\n\n ];\n\n util::assert_permutations_resolve_to(goals.clone(), x, vec![]);\n\n util::assert_permutations_resolve_to(goals, y, vec![]);\n\n}\n", "file_path": "canrun/src/tests/test_constrain.rs", "rank": 15, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn basic_unifying_literals() {\n\n let s: State<I32> = State::new();\n\n assert!(s.clone().unify(&val!(1), &val!(1)).is_some());\n\n assert!(s.clone().unify(&val!(1), &val!(2)).is_none());\n\n}\n\n\n", "file_path": "canrun/src/tests/test_unify.rs", "rank": 16, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn unifying_var_fails() {\n\n let s: State<I32> = State::new();\n\n let s = s.apply(|s| {\n\n let x = var();\n\n s.unify(&x.into_val(), &val!(1))?\n\n .unify(&val!(2), &x.into_val())\n\n });\n\n assert!(s.is_none());\n\n}\n\n\n", "file_path": "canrun/src/tests/test_unify.rs", "rank": 17, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn basic_fork_both_success() {\n\n let state: State<I32> = State::new();\n\n let state = state.fork(Rc::new(Either(\n\n Rc::new(|s| s.unify(&val!(1), &val!(1))),\n\n Rc::new(|s| s.unify(&val!(2), &val!(2))),\n\n )));\n\n assert_eq!(2, state.iter_resolved().count());\n\n}\n", "file_path": "canrun/src/tests/test_fork.rs", "rank": 18, "score": 48693.141997757804 }, { "content": "#[test]\n\nfn basic_fork_second_success() {\n\n let state: State<I32> = State::new();\n\n let state = state.fork(Rc::new(Either(\n\n Rc::new(|s| s.unify(&val!(1), &val!(2))),\n\n Rc::new(|s| s.unify(&val!(2), &val!(2))),\n\n )));\n\n assert_eq!(1, state.iter_resolved().count());\n\n}\n\n\n", "file_path": "canrun/src/tests/test_fork.rs", "rank": 19, "score": 47758.89922825258 }, { "content": "#[test]\n\nfn basic_fork_first_success() {\n\n let state: State<I32> = State::new();\n\n let state = state.fork(Rc::new(Either(\n\n Rc::new(|s| s.unify(&val!(2), &val!(2))),\n\n Rc::new(|s| s.unify(&val!(1), &val!(2))),\n\n )));\n\n let results: Vec<_> = state.unwrap().iter_resolved().collect();\n\n assert_eq!(1, results.len());\n\n}\n\n\n", "file_path": "canrun/src/tests/test_fork.rs", "rank": 20, "score": 47758.89922825258 }, { "content": "fn get_id() -> LVarId {\n\n static COUNTER: AtomicUsize = AtomicUsize::new(1);\n\n COUNTER.fetch_add(1, Ordering::Relaxed)\n\n}\n\n\n\n/// A logical variable that represents a potential value of type `T`.\n\n///\n\n/// They are typically created with the [`var()`](crate::value::var) function.\n\n///\n\n/// `LVars` are are passed into [goals](crate::goals) to relate\n\n/// [values](crate::value) and other variables to each other. They can also be\n\n/// used to [query](crate::Query) for values in a\n\n/// [`ResolvedState`](crate::state::ResolvedState).\n\n///\n\n/// The identity of each `LVar` is tracked using an internal id. While this id\n\n/// is visible through the `Debug` implementation, it should only be used for\n\n/// debugging purposes as no guarantees are made about the type or generation of\n\n/// the id value.\n\n#[derive(Default)]\n\npub struct LVar<T: ?Sized> {\n\n pub(in super::super) id: LVarId,\n\n label: Option<&'static str>,\n\n t: PhantomData<T>,\n\n}\n\n\n", "file_path": "canrun/src/value/lvar.rs", "rank": 21, "score": 46367.536516821994 }, { "content": "pub fn benches(c: &mut Criterion) {\n\n goal_bench! {c \"one\" (x) {\n\n unify(x, 1) as Goal<I32>\n\n }}\n\n\n\n goal_bench! {c \"two\" (x, y) {\n\n all![unify(x, 1), unify(y, 1), unify(y, x)] as Goal<I32>\n\n }}\n\n\n\n goal_bench! {c \"three\" (x, y, z) {\n\n all![\n\n unify(x, 1),\n\n unify(y, 1),\n\n unify(z, 1),\n\n unify(y, x),\n\n unify(x, z),\n\n ] as Goal<I32>\n\n }}\n\n\n\n goal_bench! {c \"forking\" (a, b, c, d, e) {\n", "file_path": "canrun/benches/core.rs", "rank": 22, "score": 45053.16297329929 }, { "content": "pub fn zebra() -> Option<Vec<House>> {\n\n let houses: LVec<LHouse> = lvec![\n\n ltup!(\"norwegian\", var(), var(), var(), var()),\n\n var(),\n\n ltup!(var(), var(), \"milk\", var(), var()),\n\n var(),\n\n var(),\n\n ];\n\n let goal: Goal<Zebra> = all![\n\n member(ltup!(\"englishman\", var(), var(), var(), \"red\"), &houses),\n\n on_right(\n\n &ltup!(var(), var(), var(), var(), \"ivory\"),\n\n &ltup!(var(), var(), var(), var(), \"green\"),\n\n &houses\n\n ),\n\n next_to(\n\n &ltup!(\"norwegian\", var(), var(), var(), var()),\n\n &ltup!(var(), var(), var(), var(), \"blue\"),\n\n &houses\n\n ),\n", "file_path": "examples/src/zebra.rs", "rank": 23, "score": 44051.389042584415 }, { "content": "pub fn unify_lmaps(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"unify_lmaps\");\n\n group.sample_size(30);\n\n for size in (2..MAX_EXP).map(|n| BASE.pow(n)) {\n\n group.throughput(Throughput::Elements(size as u64));\n\n\n\n group.bench_with_input(\n\n BenchmarkId::new(\"matched/.nth(0)\", size),\n\n &size,\n\n |bench, size| {\n\n bench.iter(|| {\n\n let x = var();\n\n let goal: Goal<Collections> = both(\n\n unify(x, range_lmap(0..*size)),\n\n unify(x, range_lmap(0..*size)),\n\n );\n\n goal.query(x).nth(0)\n\n });\n\n },\n\n );\n", "file_path": "canrun/benches/collections.rs", "rank": 24, "score": 44051.389042584415 }, { "content": "pub fn zebra_benchmark(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"zebra\");\n\n group.sample_size(10);\n\n group.bench_function(\"zebra\", |b| b.iter(|| zebra()));\n\n group.finish();\n\n}\n\n\n\ncriterion_group!(benches, zebra_benchmark);\n\ncriterion_main!(benches);\n", "file_path": "examples/benches/main.rs", "rank": 25, "score": 44051.389042584415 }, { "content": "#[derive(Debug)]\n\nstruct LMapFork<K: Eq + Hash + Debug, V: Debug> {\n\n a_key: Val<K>,\n\n a_value: Val<V>,\n\n b_map: Rc<LMap<K, V>>,\n\n}\n\n\n\nimpl<'a, K: Eq + Hash + Debug, V: Debug, D> Fork<'a, D> for LMapFork<K, V>\n\nwhere\n\n K: UnifyIn<'a, D> + 'a,\n\n V: UnifyIn<'a, D> + 'a,\n\n D: DomainType<'a, K> + DomainType<'a, V>,\n\n{\n\n fn fork(&self, state: State<'a, D>) -> StateIter<'a, D> {\n\n let a_key = self.a_key.clone();\n\n let a_value = self.a_value.clone();\n\n let b_map = self.b_map.map.clone();\n\n Box::new(b_map.into_iter().filter_map(move |(b_key, b_value)| {\n\n state\n\n .clone()\n\n .unify(&a_key, &b_key)?\n", "file_path": "canrun/src/collections/lmap.rs", "rank": 26, "score": 43895.8863593455 }, { "content": "/// Resolve two [`Val`]s or return an [`Err(VarWatch)`](VarWatch) in a\n\n/// [`Constraint`].\n\npub fn resolve_2<'a, A, B, D>(\n\n a: &Val<A>,\n\n b: &Val<B>,\n\n state: &State<'a, D>,\n\n) -> Result<(Rc<A>, Rc<B>), VarWatch>\n\nwhere\n\n A: Debug,\n\n B: Debug,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n{\n\n let a = state.resolve_val(a);\n\n let b = state.resolve_val(b);\n\n match (a, b) {\n\n (Resolved(a), Resolved(b)) => Ok((a.clone(), b.clone())),\n\n (Var(var), _) => Err(VarWatch::one(*var)),\n\n (_, Var(var)) => Err(VarWatch::one(*var)),\n\n }\n\n}\n\n\n\n/// Resolve one out of two [`Val`]s or return an [`Err(VarWatch)`](VarWatch) in\n", "file_path": "canrun/src/state/constraints.rs", "rank": 27, "score": 43195.119727053534 }, { "content": "/// Test helper for ensuring that goals work no matter the order they are\n\n/// applied.\n\n///\n\n/// When building lower level goals, it can be easy to make mistakes where\n\n/// something appears to work fine but breaks when you reorder the goals. This\n\n/// is especially a problem with [projection goals](crate::goals::project).\n\n///\n\n/// This function takes a `Vec<Goal<_>>`, a [Query](crate::Query) and a `Vec`\n\n/// containing the expected values. It will try every permutation of the goals\n\n/// (wrapped in an [`all` goal](crate::goals::all!)) and panic if any of the\n\n/// results vary.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use canrun::{Goal, var, unify, assert_1, util};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goals: Vec<Goal<I32>> = vec![\n\n/// unify(2, x),\n\n/// assert_1(x, |x| *x > 1),\n\n/// ];\n\n///\n\n/// util::assert_permutations_resolve_to(goals, x, vec![2]);\n\n/// ```\n\npub fn assert_permutations_resolve_to<'a, D, Q>(\n\n goals: Vec<Goal<'a, D>>,\n\n query: Q,\n\n expected: Vec<Q::Reified>,\n\n) where\n\n D: Domain<'a> + Debug + 'a,\n\n Q: ReifyIn<'a, D> + Clone + 'a,\n\n Q::Reified: PartialEq + Clone + Debug,\n\n{\n\n for permutation in all_permutations(goals) {\n\n let results: Vec<Q::Reified> = goals_resolve_to(&permutation, query.clone());\n\n if !expected\n\n .clone()\n\n .into_iter()\n\n .permutations(expected.len())\n\n .any(|e: Vec<Q::Reified>| e == results)\n\n {\n\n dbg!(permutation, results, expected);\n\n panic!(\"The permutation of the goals printed above failed!\");\n\n }\n\n }\n\n}\n", "file_path": "canrun/src/util.rs", "rank": 28, "score": 43127.07839033457 }, { "content": "fn unify_entries<'a, K, V, D>(\n\n mut state: State<'a, D>,\n\n a: Rc<LMap<K, V>>,\n\n b: Rc<LMap<K, V>>,\n\n) -> Option<State<'a, D>>\n\nwhere\n\n K: UnifyIn<'a, D> + Eq + Hash + fmt::Debug + 'a,\n\n V: UnifyIn<'a, D> + fmt::Debug + 'a,\n\n D: DomainType<'a, K> + DomainType<'a, V>,\n\n{\n\n for (a_key, a_value) in a.map.iter() {\n\n // In the best case, all of the keys in `a` exist in both maps\n\n if let Some(b_value) = b.map.get(a_key) {\n\n // So we can unify directly and continue or bail\n\n state = state.unify(a_value, b_value)?;\n\n } else {\n\n // Otherwise, we need to consider every possible match, which means\n\n // forking. The bad news is that this could blow up to a lot of\n\n // alternates if the map is large. The good news is that even if we\n\n // queue up a fork, any other matching keys that fail to unify will\n", "file_path": "canrun/src/collections/lmap.rs", "rank": 29, "score": 43117.14627307919 }, { "content": "/// Create a new [logical var](LVar).\n\n///\n\n/// This is simply a shorthand for [`LVar::new()`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{var, LVar};\n\n///\n\n/// let x: LVar<i32> = var();\n\n/// ```\n\npub fn var<T>() -> LVar<T> {\n\n LVar::new()\n\n}\n\n\n\nimpl<T> PartialEq for LVar<T> {\n\n fn eq(&self, other: &LVar<T>) -> bool {\n\n self.id == other.id\n\n }\n\n}\n\nimpl<T> Eq for LVar<T> {}\n\n\n\nimpl<T> LVar<T> {\n\n /// Create a new [logical var](LVar).\n\n ///\n\n /// The [`var()`](crate::value::var) function is typically used as a\n\n /// shorthand.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use canrun::{LVar};\n", "file_path": "canrun/src/value/lvar.rs", "rank": 30, "score": 42249.66405393835 }, { "content": "fn range_lmap(range: Range<i32>) -> lmap::LMap<i32, i32> {\n\n range.fold(lmap::LMap::new(), |mut map, n| {\n\n map.insert(var(), n);\n\n map\n\n })\n\n}\n\n\n\nstatic BASE: i32 = 2;\n\nstatic MAX_EXP: u32 = 7;\n\n\n", "file_path": "canrun/benches/collections.rs", "rank": 31, "score": 37610.10414367262 }, { "content": "/// Create a [goal](crate::goals::Goal) that gives access to the underlying\n\n/// [`State`](crate::state::State) struct.\n\n///\n\n/// Similar to [`lazy`](crate::goals::lazy()), the passed in callback is given\n\n/// access to the state so it can call the lower level [State] manipulation\n\n/// methods. This should approach should be used sparingly. Ideally most logic\n\n/// should be composable out of lower level primitive goals.\n\n///\n\n/// Because the [State] methods return an `Option<[State]>` the\n\n/// [question mark operator `?`](https://doc.rust-lang.org/edition-guide/rust-2018/error-handling-and-panics/the-question-mark-operator-for-easier-error-handling.html)\n\n/// can be used to allow chaining operations on the [State].\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use canrun::{Goal, custom, val, var};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = custom(|state| {\n\n/// let y = var();\n\n/// state.unify(&val!(y), &val!(1))?\n\n/// .unify(&val!(x), &val!(y))\n\n/// });\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n\npub fn custom<'a, D, F>(func: F) -> Goal<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n F: Fn(State<'a, D>) -> Option<State<'a, D>> + 'a,\n\n{\n\n Goal(GoalEnum::Custom(Custom(Rc::new(func))))\n\n}\n\n\n\nimpl<'a, D: Domain<'a>> fmt::Debug for Custom<'a, D> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Custom ??\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::custom;\n\n use crate::example::I32;\n\n use crate::goals::Goal;\n\n use crate::util;\n", "file_path": "canrun/src/goals/custom.rs", "rank": 32, "score": 37131.14104179037 }, { "content": "/// Create a [goal](crate::goals::Goal) that is generated via callback just as\n\n/// it is about to be evaluated.\n\n///\n\n/// The primary uses for this function involve introducing new internal vars.\n\n/// The passed in callback function should return a valid goal to be evaluated.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// use canrun::{Goal, lazy, both, unify, var};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = lazy(|| {\n\n/// let y = var();\n\n/// both(unify(y, 1), unify(x, y))\n\n/// });\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n\npub fn lazy<'a, D, F>(func: F) -> Goal<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n F: Fn() -> Goal<'a, D> + 'a,\n\n{\n\n Goal(GoalEnum::Lazy(Lazy(Rc::new(func))))\n\n}\n\n\n\nimpl<'a, D: Domain<'a>> fmt::Debug for Lazy<'a, D> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Lazy ??\")\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::lazy;\n\n use crate::example::I32;\n\n use crate::goals::unify::unify;\n\n use crate::goals::Goal;\n", "file_path": "canrun/src/goals/lazy.rs", "rank": 33, "score": 37130.60282257776 }, { "content": "/// Create a [projection goal](super) that allows deriving one resolved value\n\n/// from the other.\n\n///\n\n/// Functions must be provided to derive in both directions. Whichever value is\n\n/// resolved first will be used to derive the other.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, map_1};\n\n/// use canrun::example::I32;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(1, x),\n\n/// map_1(x, y, |x| x + 1, |y| y - 1),\n\n/// ];\n\n/// let result: Vec<_> = goal.query(y).collect();\n\n/// assert_eq!(result, vec![2])\n\n/// ```\n\npub fn map_1<'a, A, AV, B, BV, D, AtoB, BtoA>(\n\n a: AV,\n\n b: BV,\n\n a_to_b: AtoB,\n\n b_to_a: BtoA,\n\n) -> Goal<'a, D>\n\nwhere\n\n A: UnifyIn<'a, D> + Debug + 'a,\n\n B: UnifyIn<'a, D> + Debug + 'a,\n\n AV: IntoVal<A>,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n AtoB: Fn(&A) -> B + 'a,\n\n BtoA: Fn(&B) -> A + 'a,\n\n{\n\n Goal::constraint(Map1 {\n\n a: a.into_val(),\n\n b: b.into_val(),\n\n a_to_b: Rc::new(a_to_b),\n\n b_to_a: Rc::new(b_to_a),\n", "file_path": "canrun/src/goals/project/map_1.rs", "rank": 34, "score": 35725.35368796924 }, { "content": "/// Create a [goal](crate::goals::Goal) that only succeeds if both sub-goals\n\n/// succeed.\n\n///\n\n/// This is essentially an \"AND\" operation. The resulting state will be the\n\n/// result of the combining the two sub-goals.\n\n///\n\n/// If the first goal fails, the second goal will not be attempted.\n\n///\n\n/// # Examples\n\n///\n\n/// Two successful goals allow values to flow between vars:\n\n/// ```\n\n/// use canrun::{Goal, both, unify, var};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let y = var();\n\n/// let goal: Goal<I32> = both(unify(y, x), unify(1, x));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n\n///\n\n/// A failing goal will cause the entire goal to fail:\n\n/// ```\n\n/// # use canrun::{Goal, both, unify, var};\n\n/// # use canrun::example::I32;\n\n/// # let x = var();\n\n/// let goal: Goal<I32> = both(unify(2, x), unify(1, x));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![]) // Empty result\n\n/// ```\n\npub fn both<'a, D>(a: Goal<'a, D>, b: Goal<'a, D>) -> Goal<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n{\n\n Goal(GoalEnum::Both(Box::new(a.0), Box::new(b.0)))\n\n}\n", "file_path": "canrun/src/goals/both.rs", "rank": 35, "score": 35620.35557917631 }, { "content": "/// Create a [goal](crate::goals::Goal) that attempts to\n\n/// [unify](module@crate::unify) two values with each other.\n\n///\n\n/// If one of the values is an unbound [`LVar`](crate::value::LVar), it will be\n\n/// bound to the other value. If both values are able to be resolved, they will\n\n/// be compared with [`UnifyIn::unify_resolved`](crate::unify::UnifyIn#tymethod.\n\n/// unify_resolved). If this unification fails, the goal will fail.\n\n///\n\n/// # Examples\n\n///\n\n/// Unifying a fresh `LVar` will bind it to the other value:\n\n/// ```\n\n/// use canrun::{Goal, unify, var};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = unify(1, x);\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n\n///\n\n/// Attempting to unify two unequal values will fail:\n\n/// ```\n\n/// # use canrun::{Goal, unify, var};\n\n/// # use canrun::example::I32;\n\n/// # let x = var();\n\n/// let goal: Goal<I32> = unify(1, 2);\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![])\n\n/// ```\n\npub fn unify<'a, T, A, B, D>(a: A, b: B) -> Goal<'a, D>\n\nwhere\n\n T: UnifyIn<'a, D> + Debug,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n Goal(GoalEnum::UnifyIn(\n\n D::into_domain_val(a.into_val()),\n\n D::into_domain_val(b.into_val()),\n\n ))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn deeply_nested_vars() {\n\n let (w, x, y, z) = (var(), var(), var(), var());\n\n let goals: Vec<Goal<I32>> = vec![unify(w, 1), unify(w, x), unify(x, y), unify(y, z)];\n\n util::assert_permutations_resolve_to(goals, z, vec![1])\n\n }\n\n}\n", "file_path": "canrun/src/goals/unify.rs", "rank": 36, "score": 35241.151608118824 }, { "content": "/// Create a [goal](crate::goals::Goal) that succeeds if either sub-goal\n\n/// succeed.\n\n///\n\n/// This is essentially an \"OR\" operation, and will eventually lead to zero, one\n\n/// or two [resolved states](crate::state::ResolvedState), depending on the\n\n/// success or failure of the sub-goals.\n\n///\n\n/// # Examples\n\n///\n\n/// Two successful goals will yield up two different results:\n\n/// ```\n\n/// use canrun::value::var;\n\n/// use canrun::goals::{Goal, either, unify};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = either(unify(x, 1), unify(x, 2));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1, 2])\n\n/// ```\n\n///\n\n/// One failing goal will not cause the other to fail:\n\n/// ```\n\n/// # use canrun::{Goal, either, unify, var};\n\n/// # use canrun::example::I32;\n\n/// # let x = var();\n\n/// let goal: Goal<I32> = either(unify(1, 2), unify(x, 3));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![3])\n\n/// ```\n\n///\n\n/// Both goals can fail, leading to no results:\n\n/// ```\n\n/// # use canrun::{Goal, either, unify, var};\n\n/// # use canrun::example::I32;\n\n/// # let x = var();\n\n/// let goal: Goal<I32> = either(unify(6, 5), unify(1, 2));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![]) // Empty result\n\n/// ```\n\npub fn either<'a, D>(a: Goal<'a, D>, b: Goal<'a, D>) -> Goal<'a, D>\n\nwhere\n\n D: Domain<'a>,\n\n{\n\n Goal::fork(Either { a: a.0, b: b.0 })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::either;\n\n use crate::example::I32;\n\n use crate::goals::unify::unify;\n\n use crate::goals::Goal;\n\n use crate::util;\n\n use crate::value::var;\n\n\n\n #[test]\n\n fn either_both_succeeds() {\n\n let x = var();\n\n let goal = either::<I32>(unify(x, 5), unify(x, 7));\n", "file_path": "canrun/src/goals/either.rs", "rank": 37, "score": 34130.36530620088 }, { "content": "fn next_to(a: &LHouse, b: &LHouse, houses: &LVec<LHouse>) -> Goal<'static, Zebra> {\n\n either(on_right(a, b, houses), on_right(b, a, houses))\n\n}\n\n\n", "file_path": "examples/src/zebra.rs", "rank": 38, "score": 33933.581025540094 }, { "content": "/// Assert that [`LMap`] `a` is a subset of [`LMap`] `b`.\n\n///\n\n/// This means that all of the keys in `a` unify with keys in `b` AND the\n\n/// corresponding values also unify. This is the opposite of [`superset`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{var, Goal};\n\n/// use canrun::lmap::{lmap, subset};\n\n/// use canrun::example::Collections;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<Collections> = subset(lmap! {x => 2}, lmap! {1 => 2, 3 => 4});\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n\npub fn subset<'a, K, V, A, B, D>(a: A, b: B) -> Goal<'a, D>\n\nwhere\n\n K: Debug + Eq + Hash + UnifyIn<'a, D> + 'a,\n\n V: Debug + UnifyIn<'a, D> + 'a,\n\n A: IntoVal<LMap<K, V>>,\n\n B: IntoVal<LMap<K, V>>,\n\n D: DomainType<'a, LMap<K, V>> + DomainType<'a, K> + DomainType<'a, V> + 'a,\n\n{\n\n project_2(a, b, |a, b| {\n\n custom(move |state| unify_entries(state, a.clone(), b.clone()))\n\n })\n\n}\n\n\n", "file_path": "canrun/src/collections/lmap/compare.rs", "rank": 39, "score": 33446.805044710185 }, { "content": "/// Assert that [`LMap`] `a` is a superset of [`LMap`] `b`.\n\n///\n\n/// This means that all of the keys in `b` unify with keys in `a` AND the\n\n/// corresponding values also unify. This is the opposite of [`subset`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{var, Goal};\n\n/// use canrun::lmap::{lmap, superset};\n\n/// use canrun::example::Collections;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<Collections> = superset(lmap! {x => 2, 3 => 4}, lmap! {1 => 2});\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n\npub fn superset<'a, K, V, A, B, D>(a: A, b: B) -> Goal<'a, D>\n\nwhere\n\n K: Debug + Eq + Hash + UnifyIn<'a, D> + 'a,\n\n V: Debug + UnifyIn<'a, D> + 'a,\n\n A: IntoVal<LMap<K, V>>,\n\n B: IntoVal<LMap<K, V>>,\n\n D: DomainType<'a, LMap<K, V>> + DomainType<'a, K> + DomainType<'a, V> + 'a,\n\n{\n\n subset(b, a)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{subset, superset};\n\n use crate::example::Collections;\n\n use crate::lmap;\n\n use crate::{var, Goal, IterResolved};\n\n\n\n #[test]\n\n fn subset_should_succeed_on() {\n", "file_path": "canrun/src/collections/lmap/compare.rs", "rank": 40, "score": 33446.805044710185 }, { "content": "/// Create a [projection goal](super) that succeeds if the resolved value passes\n\n/// an assertion test.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, both, unify, var, assert_1};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = both(unify(1, x), assert_1(x, |x| *x < 2));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n\npub fn assert_1<'a, A, AV, D, F>(a: AV, func: F) -> Goal<'a, D>\n\nwhere\n\n A: Debug + 'a,\n\n AV: IntoVal<A>,\n\n D: DomainType<'a, A>,\n\n F: Fn(&A) -> bool + 'a,\n\n{\n\n Goal::constraint(Assert1 {\n\n a: a.into_val(),\n\n f: Rc::new(func),\n\n })\n\n}\n\n\n\nimpl<'a, A, Dom> Constraint<'a, Dom> for Assert1<'a, A>\n\nwhere\n\n A: Debug + 'a,\n\n Dom: DomainType<'a, A>,\n\n{\n\n fn attempt(&self, state: &State<'a, Dom>) -> Result<ResolveFn<'a, Dom>, VarWatch> {\n\n let a = resolve_1(&self.a, state)?;\n", "file_path": "canrun/src/goals/project/assert_1.rs", "rank": 41, "score": 33192.82828147633 }, { "content": "/// Create a [projection goal](super) that allows creating a new goal based on\n\n/// the resolved value.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, both, unify, var, project_1};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = both(unify(1, x), project_1(x, |x| if *x < 2 { Goal::succeed() } else { Goal::fail() }));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n\npub fn project_1<'a, A, AV, D, F>(a: AV, func: F) -> Goal<'a, D>\n\nwhere\n\n A: Debug + 'a,\n\n AV: IntoVal<A>,\n\n D: DomainType<'a, A>,\n\n F: Fn(&A) -> Goal<'a, D> + 'a,\n\n{\n\n Goal::constraint(Project1 {\n\n a: a.into_val(),\n\n f: Rc::new(func),\n\n })\n\n}\n\n\n\nimpl<'a, A, Dom> Constraint<'a, Dom> for Project1<'a, A, Dom>\n\nwhere\n\n A: Debug,\n\n Dom: DomainType<'a, A>,\n\n{\n\n fn attempt(&self, state: &State<'a, Dom>) -> Result<ResolveFn<'a, Dom>, VarWatch> {\n\n let a = resolve_1(&self.a, state)?;\n", "file_path": "canrun/src/goals/project/project_1.rs", "rank": 42, "score": 33192.45200480164 }, { "content": "use crate::domains::DomainType;\n\nuse crate::state::State;\n\nuse crate::value::{ReifyIn, Val};\n\nuse crate::ResolvedState;\n\nuse crate::UnifyIn;\n\nuse std::rc::Rc;\n\n\n\nmacro_rules! impl_for_tuple {\n\n ($($t:ident => $r:ident),+) => {\n\n impl<'a, $($t,)* D> UnifyIn<'a, D> for ($(Val<$t>),*)\n\n where\n\n $($t: UnifyIn<'a, D>, )*\n\n D: $(DomainType<'a, $t> +)* DomainType<'a, Self>\n\n {\n\n fn unify_resolved(\n\n state: State<'a, D>,\n\n l: Rc<Self>,\n\n r: Rc<Self>,\n\n ) -> Option<State<'a, D>> {\n\n #![allow(non_snake_case)]\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 43, "score": 33077.773427011896 }, { "content": "use super::{Domain, State};\n\nuse std::fmt;\n\n\n\nimpl<'a, D: Domain<'a>> Default for State<'a, D> {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl<'a, D: Domain<'a> + 'a> fmt::Debug for State<'a, D> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"State {:?}\", self.domain)\n\n }\n\n}\n", "file_path": "canrun/src/state/impls.rs", "rank": 44, "score": 33076.42077491474 }, { "content": "use crate::{Domain, DomainType, ReifyIn, ResolvedState, State, UnifyIn};\n\nuse std::rc::Rc;\n\n\n\nmacro_rules! impl_unify_eq {\n\n ($($type:ty),+) => {\n\n $(\n\n impl <'a, D: DomainType<'a, $type>> UnifyIn<'a, D> for $type {\n\n fn unify_resolved(state: State<'a, D>, a: Rc<$type>, b: Rc<$type>) -> Option<State<'a, D>> {\n\n if a == b {\n\n Some(state)\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n )+\n\n };\n\n}\n\n\n\nmacro_rules! impl_reify_copy {\n", "file_path": "canrun/src/impls/primitive.rs", "rank": 45, "score": 33076.19208583697 }, { "content": "#[macro_export]\n\nmacro_rules! ltup {\n\n ($($item:expr),* $(,)?) => {\n\n ($($crate::value::IntoVal::into_val($item)),*)\n\n };\n\n}\n\n\n\n#[doc(inline)]\n\npub use ltup;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate as canrun;\n\n use crate::goals::unify;\n\n use crate::goals::Goal;\n\n use crate::util;\n\n use crate::value::{var, Val};\n\n use canrun_codegen::domain;\n\n\n\n domain! {\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 46, "score": 33075.40187033782 }, { "content": " }\n\n )+\n\n }\n\n}\n\n\n\nimpl_unify_eq!(i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64);\n\nimpl_unify_eq!(String, &'static str, bool, char);\n\n\n\nimpl_reify_copy!(i8, i16, i32, i64, u8, u16, u32, u64, isize, usize, f32, f64);\n\nimpl_reify_clone!(String);\n\nimpl_reify_copy!(&'static str, bool, char);\n", "file_path": "canrun/src/impls/primitive.rs", "rank": 47, "score": 33074.67757944806 }, { "content": " };\n\n}\n\n\n\nimpl_for_tuple!(Av => Ar, Bv => Br);\n\nimpl_for_tuple!(Av => Ar, Bv => Br, Cv => Cr);\n\nimpl_for_tuple!(Av => Ar, Bv => Br, Cv => Cr, Dv => Dr);\n\nimpl_for_tuple!(Av => Ar, Bv => Br, Cv => Cr, Dv => Dr, Ev => Er);\n\n\n\n/// Create a tuple of [logical values](value::Val) with automatic [`IntoVal`\n\n/// wrapping](value::IntoVal).\n\n///\n\n/// The primary benefit is that it allows freely mixing resolved values and\n\n/// [`LVar`s](value::LVar).\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{var, ltup, Val};\n\n/// let x = var();\n\n/// let tuple: (Val<i32>, Val<i32>, Val<&'static str>) = ltup!(x, 1, \"two\");\n\n/// ```\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 48, "score": 33072.38236724868 }, { "content": " ($($type:ty),+) => {\n\n $(\n\n impl<'a, D: Domain<'a>> ReifyIn<'a, D> for $type {\n\n type Reified = $type;\n\n fn reify_in(&self, _: &ResolvedState<D>) -> Option<$type> {\n\n Some(*self)\n\n }\n\n }\n\n )+\n\n }\n\n}\n\n\n\nmacro_rules! impl_reify_clone {\n\n ($($type:ty),+) => {\n\n $(\n\n impl<'a, D: Domain<'a>> ReifyIn<'a, D> for $type {\n\n type Reified = $type;\n\n fn reify_in(&self, _: &ResolvedState<D>) -> Option<$type> {\n\n Some(self.clone())\n\n }\n", "file_path": "canrun/src/impls/primitive.rs", "rank": 49, "score": 33071.165844373216 }, { "content": "mod primitive;\n\npub(crate) mod tuples;\n", "file_path": "canrun/src/impls/mod.rs", "rank": 50, "score": 33070.66029641013 }, { "content": " let ($($t),*) = &*l;\n\n // Abusing the \"reified\" ident as \"right\" since\n\n // it's available. If we did this as a proc-macro\n\n // we could actually make up our own names.\n\n let ($($r),*) = &*r;\n\n Some(\n\n state\n\n $(.unify(&$t, &$r)?)*\n\n )\n\n }\n\n }\n\n\n\n impl<'a, D: 'a, $($t: ReifyIn<'a, D, Reified = $r>, $r,)*> ReifyIn<'a, D> for ($($t),*) {\n\n type Reified = ($($t::Reified),*);\n\n fn reify_in(&self, state: &ResolvedState<D>) -> Option<Self::Reified> {\n\n #![allow(non_snake_case)]\n\n let ($($t),*) = self;\n\n Some(($($t.reify_in(state)?),*))\n\n }\n\n }\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 51, "score": 33069.685465803304 }, { "content": " pub Tuples2 {\n\n i32,\n\n (Val<i32>, Val<i32>),\n\n }\n\n }\n\n domain! {\n\n pub Tuples3 {\n\n i32,\n\n (Val<i32>, Val<i32>, Val<i32>),\n\n }\n\n }\n\n\n\n #[test]\n\n fn tuple2_succeeds() {\n\n let x = var();\n\n let goals: Vec<Goal<Tuples2>> = vec![unify(x, ltup!(1, 2)), unify(x, ltup!(1, 2))];\n\n util::assert_permutations_resolve_to(goals, x, vec![(1, 2)]);\n\n }\n\n\n\n #[test]\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 52, "score": 33067.03587997396 }, { "content": " fn tuple2_fails() {\n\n let x = var();\n\n let goals: Vec<Goal<Tuples2>> = vec![unify(x, ltup!(1, 3)), unify(x, ltup!(1, 2))];\n\n util::assert_permutations_resolve_to(goals, x, vec![]);\n\n }\n\n\n\n #[test]\n\n fn tuple2_nested_var() {\n\n let x = var();\n\n let y = var();\n\n let goals: Vec<Goal<Tuples2>> = vec![unify(x, ltup!(1, y)), unify(x, ltup!(1, 2))];\n\n util::assert_permutations_resolve_to(goals, y, vec![2]);\n\n }\n\n\n\n #[test]\n\n fn tuple3_succeeds() {\n\n let x = var();\n\n let goals: Vec<Goal<Tuples3>> = vec![unify(x, ltup!(1, 2, 3)), unify(x, ltup!(1, 2, 3))];\n\n util::assert_permutations_resolve_to(goals, x, vec![(1, 2, 3)]);\n\n }\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 53, "score": 33067.03587997396 }, { "content": "\n\n #[test]\n\n fn tuple3_fails() {\n\n let x = var();\n\n let goals: Vec<Goal<Tuples3>> = vec![unify(x, ltup!(1, 2, 3)), unify(x, ltup!(1, 2, 4))];\n\n util::assert_permutations_resolve_to(goals, x, vec![]);\n\n }\n\n\n\n #[test]\n\n fn tuple3_nested_var() {\n\n let x = var();\n\n let y = var();\n\n let goals: Vec<Goal<Tuples3>> = vec![unify(x, ltup!(1, y, 3)), unify(x, ltup!(1, 2, 3))];\n\n util::assert_permutations_resolve_to(goals, y, vec![2]);\n\n }\n\n}\n", "file_path": "canrun/src/impls/tuples.rs", "rank": 54, "score": 33067.03587997396 }, { "content": "fn on_right<'a>(left: &LHouse, right: &LHouse, houses: &LVec<LHouse>) -> Goal<'a, Zebra> {\n\n subset(lvec![left, right], houses)\n\n}\n\n\n", "file_path": "examples/src/zebra.rs", "rank": 55, "score": 32869.23944266216 }, { "content": "/// Ensure that one value is less than another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::lt;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// lt(x, y)\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y)).collect();\n\n/// assert_eq!(results, vec![(1, 2)]);\n\n/// ```\n\npub fn lt<'a, A, AV, B, BV, D>(a: AV, b: BV) -> Goal<'a, D>\n\nwhere\n\n A: PartialOrd<B> + Debug + 'a,\n\n B: Debug + 'a,\n\n AV: IntoVal<A>,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n{\n\n assert_2(a, b, |a, b| a < b)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::lt;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y) = (var(), var());\n", "file_path": "canrun/src/goals/cmp/lt.rs", "rank": 56, "score": 32804.72508502056 }, { "content": "/// Ensure that one value is greater than another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::gt;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 2),\n\n/// unify(y, 1),\n\n/// gt(x, y)\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y)).collect();\n\n/// assert_eq!(results, vec![(2, 1)]);\n\n/// ```\n\npub fn gt<'a, A, AV, B, BV, D>(a: AV, b: BV) -> Goal<'a, D>\n\nwhere\n\n A: PartialOrd<B> + Debug + 'a,\n\n B: Debug + 'a,\n\n AV: IntoVal<A>,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n{\n\n assert_2(a, b, |a, b| a > b)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::gt;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y) = (var(), var());\n", "file_path": "canrun/src/goals/cmp/gt.rs", "rank": 57, "score": 32804.72508502056 }, { "content": "/// Ensure that one value is greater than or equal to another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::gte;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 2),\n\n/// unify(y, 1),\n\n/// unify(z, 1),\n\n/// gte(x, y),\n\n/// gte(y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(2, 1, 1)]);\n\n/// ```\n\npub fn gte<'a, A, AV, B, BV, D>(a: AV, b: BV) -> Goal<'a, D>\n\nwhere\n\n A: PartialOrd<B> + Debug + 'a,\n\n B: Debug + 'a,\n\n AV: IntoVal<A>,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n{\n\n assert_2(a, b, |a, b| a >= b)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::gte;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y, z) = (var(), var(), var());\n", "file_path": "canrun/src/goals/cmp/gte.rs", "rank": 58, "score": 32804.487710485264 }, { "content": "/// Ensure that one value is less than or equal to another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::lte;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// unify(z, 2),\n\n/// lte(x, y),\n\n/// lte(y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(1, 2, 2)]);\n\n/// ```\n\npub fn lte<'a, A, AV, B, BV, D>(a: AV, b: BV) -> Goal<'a, D>\n\nwhere\n\n A: PartialOrd<B> + Debug + 'a,\n\n B: Debug + 'a,\n\n AV: IntoVal<A>,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n{\n\n assert_2(a, b, |a, b| a <= b)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::lte;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y, z) = (var(), var(), var());\n", "file_path": "canrun/src/goals/cmp/lte.rs", "rank": 59, "score": 32804.487710485264 }, { "content": "/// Create a [`Goal`] that attempts to unify a `Val<T>` with\n\n/// any of the items in a `LVec<T>`.\n\n///\n\n/// This goal will fork the state for each match found.\n\n///\n\n/// # Examples:\n\n/// ```\n\n/// use canrun::{Goal, val, var, all, unify, lvec, example::Collections};\n\n///\n\n/// let x = var();\n\n/// let xs = var();\n\n/// let goal: Goal<Collections> = all![\n\n/// unify(x, 1),\n\n/// unify(xs, lvec![1, 2, 3]),\n\n/// lvec::member(x, xs),\n\n/// ];\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n\n///\n\n/// ```\n\n/// # use canrun::{Goal, val, var, all, unify};\n\n/// use canrun::{lvec, example::Collections};\n\n/// #\n\n/// let x = var();\n\n/// let goal: Goal<Collections> = all![\n\n/// lvec::member(x, lvec![1, 2, 3]),\n\n/// ];\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1, 2, 3]);\n\n/// ```\n\npub fn member<'a, I, IV, CV, D>(item: IV, collection: CV) -> Goal<'a, D>\n\nwhere\n\n I: UnifyIn<'a, D> + 'a,\n\n IV: IntoVal<I>,\n\n LVec<I>: UnifyIn<'a, D>,\n\n CV: IntoVal<LVec<I>>,\n\n D: DomainType<'a, I> + DomainType<'a, LVec<I>>,\n\n{\n\n Goal::constraint(Member {\n\n item: item.into_val(),\n\n collection: collection.into_val(),\n\n })\n\n}\n\n\n", "file_path": "canrun/src/collections/lvec/member.rs", "rank": 60, "score": 32585.38292317064 }, { "content": "/// Create a [`Goal`] that attempts to unify a `Val<T>` with\n\n/// any of the items in a `LVec<T>`.\n\n///\n\n/// This goal will fork the state for each match found.\n\n/// # Examples:\n\n/// ```\n\n/// use canrun::{Goal, val, var, all, unify, lvec, example::Collections};\n\n///\n\n/// let needle = var();\n\n/// let haystack = var();\n\n/// let goal: Goal<Collections> = all![\n\n/// unify(needle, lvec![1]),\n\n/// unify(haystack, lvec![1, 2, 3]),\n\n/// lvec::subset(needle, haystack),\n\n/// ];\n\n/// let results: Vec<_> = goal.query(needle).collect();\n\n/// assert_eq!(results, vec![vec![1]]);\n\n/// ```\n\npub fn subset<'a, I, SV, CV, D>(subset: SV, collection: CV) -> Goal<'a, D>\n\nwhere\n\n I: UnifyIn<'a, D> + 'a,\n\n SV: IntoVal<LVec<I>>,\n\n LVec<I>: UnifyIn<'a, D>,\n\n CV: IntoVal<LVec<I>>,\n\n D: DomainType<'a, I> + DomainType<'a, LVec<I>>,\n\n{\n\n Goal::constraint(Subset {\n\n subset: subset.into_val(),\n\n collection: collection.into_val(),\n\n })\n\n}\n\n\n", "file_path": "canrun/src/collections/lvec/subset.rs", "rank": 61, "score": 32583.64505674009 }, { "content": "/// Subtract one value from another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::sub;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = sub(3, 2, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n\npub fn sub<'a, T, A, B, C, D>(a: A, b: B, c: C) -> Goal<'a, D>\n\nwhere\n\n T: Add<Output = T> + Sub<Output = T> + UnifyIn<'a, D> + Copy + 'a,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n C: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n map_2(a, b, c, |a, b| *a - *b, |a, c| *a - *c, |b, c| *b + *c)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::sub;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y, z) = (var(), var(), var());\n", "file_path": "canrun/src/goals/ops/sub.rs", "rank": 62, "score": 32105.66769158037 }, { "content": "/// Add two values together.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::add;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = add(1, 2, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![3]);\n\n/// ```\n\npub fn add<'a, T, A, B, C, D>(a: A, b: B, c: C) -> Goal<'a, D>\n\nwhere\n\n T: Add<Output = T> + Sub<Output = T> + UnifyIn<'a, D> + Copy + 'a,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n C: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n map_2(a, b, c, |a, b| *a + *b, |a, c| *c - *a, |b, c| *c - *b)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::add;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y, z) = (var(), var(), var());\n", "file_path": "canrun/src/goals/ops/add.rs", "rank": 63, "score": 32105.66769158037 }, { "content": "/// Multiply two values together.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::mul;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = mul(2, 3, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![6]);\n\n/// ```\n\npub fn mul<'a, T, A, B, C, D>(a: A, b: B, c: C) -> Goal<'a, D>\n\nwhere\n\n T: Mul<Output = T> + Div<Output = T> + UnifyIn<'a, D> + Copy + 'a,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n C: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n map_2(a, b, c, |a, b| *a * *b, |a, c| *c / *a, |b, c| *c / *b)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::mul;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y, z) = (var(), var(), var());\n", "file_path": "canrun/src/goals/ops/mul.rs", "rank": 64, "score": 32105.66769158037 }, { "content": "/// Divide one value with another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::div;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = div(3, 2, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n\npub fn div<'a, T, A, B, C, D>(a: A, b: B, c: C) -> Goal<'a, D>\n\nwhere\n\n T: Mul<Output = T> + Div<Output = T> + UnifyIn<'a, D> + Copy + 'a,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n C: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n map_2(a, b, c, |a, b| *a / *b, |a, c| *a / *c, |b, c| *b * *c)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::div;\n\n use crate::example::I32;\n\n use crate::{unify, util, var, Goal};\n\n\n\n #[test]\n\n fn succeeds() {\n\n let (x, y, z) = (var(), var(), var());\n", "file_path": "canrun/src/goals/ops/div.rs", "rank": 65, "score": 32105.66769158037 }, { "content": "/// Get the lesser of two values according to [`std::cmp::min`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::min;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// unify(z, 1),\n\n/// min(x, y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(1, 2, 1)]);\n\n/// ```\n\npub fn min<'a, T, A, B, C, D>(a: A, b: B, c: C) -> Goal<'a, D>\n\nwhere\n\n T: PartialOrd + UnifyIn<'a, D> + 'a,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n C: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n let a = val!(a);\n\n let b = val!(b);\n\n let c = val!(c);\n\n either(\n\n both(unify(a.clone(), c.clone()), lte(a.clone(), b.clone())),\n\n // Using lte above and just lt below avoids multiple states when they are equal\n\n // I'm not 100% sure this will be generally correct\n\n both(unify(b.clone(), c), lt(b, a)),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "canrun/src/goals/cmp/min.rs", "rank": 66, "score": 32105.124094078652 }, { "content": "/// Get the greater of two values according to [`std::cmp::max`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::max;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// unify(z, 2),\n\n/// max(x, y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(1, 2, 2)]);\n\n/// ```\n\npub fn max<'a, T, A, B, C, D>(a: A, b: B, c: C) -> Goal<'a, D>\n\nwhere\n\n T: PartialOrd + UnifyIn<'a, D> + 'a,\n\n A: IntoVal<T>,\n\n B: IntoVal<T>,\n\n C: IntoVal<T>,\n\n D: DomainType<'a, T>,\n\n{\n\n let a = val!(a);\n\n let b = val!(b);\n\n let c = val!(c);\n\n either(\n\n both(unify(a.clone(), c.clone()), gte(a.clone(), b.clone())),\n\n // Using gte above and just gt below avoids multiple states when they are equal\n\n // I'm not 100% sure this will be generally correct\n\n both(unify(b.clone(), c), gt(b, a)),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "canrun/src/goals/cmp/max.rs", "rank": 67, "score": 32105.124094078652 }, { "content": "/// Create a [projection goal](super) that allows deriving one resolved value\n\n/// from the other two.\n\n///\n\n/// Functions must be provided to derive from any combination of two values.\n\n/// Whichever two are resolved first will be used to derive the other.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, map_2};\n\n/// use canrun::example::I32;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(1, x),\n\n/// unify(2, y),\n\n/// map_2(x, y, z, |x, y| x + y, |x, z| z - x, |y, z| z - y),\n\n/// ];\n\n/// let result: Vec<_> = goal.query(z).collect();\n\n/// assert_eq!(result, vec![3])\n\n/// ```\n\npub fn map_2<'a, A, AV, B, BV, C, CV, D, ABtoC, ACtoB, BCtoA>(\n\n a: AV,\n\n b: BV,\n\n c: CV,\n\n ab_to_c: ABtoC,\n\n ac_to_b: ACtoB,\n\n bc_to_a: BCtoA,\n\n) -> Goal<'a, D>\n\nwhere\n\n A: UnifyIn<'a, D> + Debug + 'a,\n\n AV: IntoVal<A>,\n\n B: UnifyIn<'a, D> + Debug + 'a,\n\n BV: IntoVal<B>,\n\n C: UnifyIn<'a, D> + Debug + 'a,\n\n CV: IntoVal<C>,\n\n D: DomainType<'a, A> + DomainType<'a, B> + DomainType<'a, C>,\n\n ABtoC: Fn(&A, &B) -> C + 'a,\n\n ACtoB: Fn(&A, &C) -> B + 'a,\n\n BCtoA: Fn(&B, &C) -> A + 'a,\n\n{\n", "file_path": "canrun/src/goals/project/map_2.rs", "rank": 68, "score": 31364.05316779286 }, { "content": "/// Create a [projection goal](super) that succeeds if the resolved values pass\n\n/// an assertion test.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, assert_2};\n\n/// use canrun::example::I32;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(1, x),\n\n/// unify(2, y),\n\n/// assert_2(x, y, |x, y| x < y),\n\n/// ];\n\n/// let result: Vec<_> = goal.query((x, y)).collect();\n\n/// assert_eq!(result, vec![(1, 2)])\n\n/// ```\n\npub fn assert_2<'a, A, AV, B, BV, D, F>(a: AV, b: BV, func: F) -> Goal<'a, D>\n\nwhere\n\n A: Debug + 'a,\n\n AV: IntoVal<A>,\n\n B: Debug + 'a,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n F: Fn(&A, &B) -> bool + 'a,\n\n{\n\n Goal::constraint(Assert2 {\n\n a: a.into_val(),\n\n b: b.into_val(),\n\n f: Rc::new(func),\n\n })\n\n}\n\n\n\nimpl<'a, A, B, Dom> Constraint<'a, Dom> for Assert2<'a, A, B>\n\nwhere\n\n A: Debug + 'a,\n\n B: Debug + 'a,\n", "file_path": "canrun/src/goals/project/assert_2.rs", "rank": 69, "score": 30060.720595187926 }, { "content": "/// Create a [projection goal](super) that allows creating a new goal based on\n\n/// the resolved values.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, project_2};\n\n/// use canrun::example::I32;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(1, x),\n\n/// unify(2, y),\n\n/// project_2(x, y, |x, y| if x < y { Goal::succeed() } else { Goal::fail() }),\n\n/// ];\n\n/// let result: Vec<_> = goal.query((x, y)).collect();\n\n/// assert_eq!(result, vec![(1, 2)])\n\n/// ```\n\npub fn project_2<'a, A, AV, B, BV, D, F>(a: AV, b: BV, func: F) -> Goal<'a, D>\n\nwhere\n\n A: Debug + 'a,\n\n AV: IntoVal<A>,\n\n B: Debug + 'a,\n\n BV: IntoVal<B>,\n\n D: DomainType<'a, A> + DomainType<'a, B>,\n\n F: Fn(Rc<A>, Rc<B>) -> Goal<'a, D> + 'a,\n\n{\n\n Goal::constraint(Project2 {\n\n a: a.into_val(),\n\n b: b.into_val(),\n\n f: Rc::new(func),\n\n })\n\n}\n\n\n\nimpl<'a, A, B, Dom> Constraint<'a, Dom> for Project2<'a, A, B, Dom>\n\nwhere\n\n A: Debug,\n\n B: Debug,\n", "file_path": "canrun/src/goals/project/project_2.rs", "rank": 70, "score": 30060.353481156926 }, { "content": "/// Fork a [`State`] into zero or more alternate states.\n\n///\n\n/// Added to a [`State`] with [`.fork()`](crate::state::State::fork()).\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{val, var, Fork, Query, State, StateIter, Val};\n\n/// use canrun::example::I32;\n\n/// use std::rc::Rc;\n\n///\n\n/// #[derive(Debug)]\n\n/// struct Is1or2 {\n\n/// x: Val<i32>,\n\n/// }\n\n///\n\n/// impl<'a> Fork<'a, I32> for Is1or2 {\n\n/// fn fork(&self, state: State<'a, I32>) -> StateIter<'a, I32> {\n\n/// let s1 = state.clone().unify(&self.x, &val!(1));\n\n/// let s2 = state.unify(&self.x, &val!(2));\n\n/// Box::new(s1.into_iter().chain(s2.into_iter()))\n\n/// }\n\n/// }\n\n///\n\n/// # fn main() {\n\n/// let x = var();\n\n/// let state: State<I32> = State::new();\n\n/// let state = state.fork(Rc::new(Is1or2 { x: val!(x) }));\n\n/// let results: Vec<i32> = state.query(x).collect();\n\n/// assert_eq!(results, vec![1, 2]);\n\n/// # }\n\n/// ```\n\npub trait Fork<'a, D: Domain<'a>>: Debug {\n\n /// Given a [`State`], return an iterator of states that result from the\n\n /// fork operation.\n\n fn fork(&self, state: State<'a, D>) -> StateIter<'a, D>;\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::example::I32;\n\n use crate::{val, var, Fork, Query, State, StateIter, Val};\n\n use std::rc::Rc;\n\n\n\n #[derive(Debug)]\n\n struct Is1or2 {\n\n x: Val<i32>,\n\n }\n\n\n\n impl<'a> Fork<'a, I32> for Is1or2 {\n\n fn fork(&self, state: State<'a, I32>) -> StateIter<'a, I32> {\n\n let s1 = state.clone().unify(&self.x, &val!(1));\n", "file_path": "canrun/src/state.rs", "rank": 72, "score": 16.25334952800974 }, { "content": "/// Update a [`State`] whenever one or more [`LVar`]s are resolved.\n\n///\n\n/// The [`Constraint::attempt`] function will be run when it is initially added.\n\n/// Returning a `Err([`VarWatch`])` signals that the constraint is not\n\n/// satisfied. It will be re-run when one of the specified variables is bound to\n\n/// another value.\n\n///\n\n/// You probably want the higher level [goal projection](crate::goals::project)\n\n/// functions.\n\n///\n\n/// # NOTE:\n\n/// The [`attempt`](Constraint::attempt) function must take care to [fully\n\n/// resolve](State::resolve_val) any variables before requesting that they be\n\n/// watched. The [`resolve_1`], [`resolve_2`], [`OneOfTwo`] and [`TwoOfThree`]\n\n/// helpers can simplify handling this (plus returning a [`VarWatch`]).\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{State, Query, Val, val, var, DomainType};\n\n/// use canrun::state::constraints::{Constraint, resolve_1, ResolveFn, VarWatch};\n\n/// use canrun::example::I32;\n\n/// use std::rc::Rc;\n\n/// use std::fmt;\n\n///\n\n/// struct Assert<'a, T: fmt::Debug> {\n\n/// val: Val<T>,\n\n/// assert: Rc<dyn Fn(&T) -> bool + 'a>,\n\n/// }\n\n///\n\n/// impl<'a, T: fmt::Debug> fmt::Debug for Assert<'a, T> {\n\n/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n/// write!(f, \"Assert({:?})\", self.val)\n\n/// }\n\n/// }\n\n///\n\n/// impl<'a, T, D> Constraint<'a, D> for Assert<'a, T>\n\n/// where\n\n/// T: fmt::Debug + 'a,\n\n/// D: DomainType<'a, T>,\n\n/// {\n\n/// fn attempt(&self, state: &State<'a, D>) -> Result<ResolveFn<'a, D>, VarWatch> {\n\n/// let resolved = resolve_1(&self.val, state)?;\n\n/// let assert = self.assert.clone();\n\n/// Ok(Box::new(\n\n/// move |state: State<'a, D>| if assert(&*resolved) { Some(state) } else { None },\n\n/// ))\n\n/// }\n\n/// }\n\n///\n\n/// # fn test() -> Option<()> {\n\n/// let x = var();\n\n///\n\n/// let state: State<I32> = State::new();\n\n/// let state = state.constrain(Rc::new(Assert {val: val!(x), assert: Rc::new(|x| x > &1)}));\n\n/// let state = state?.unify(&val!(x), &val!(2));\n\n///\n\n/// let results: Vec<i32> = state.query(x).collect();\n\n/// assert_eq!(results, vec![2]);\n\n/// # Some(())\n\n/// # }\n\n/// # test();\n\n/// ```\n\npub trait Constraint<'a, D>: Debug\n\nwhere\n\n D: Domain<'a>,\n\n{\n\n /// Resolve required variables in a state and resubscribe or request to\n\n /// update the state.\n\n fn attempt(&self, state: &State<'a, D>) -> Result<ResolveFn<'a, D>, VarWatch>;\n\n}\n\n\n\n/// A set of variables to watch on behalf of a [constraint\n\n/// object](crate::state::State::constrain()).\n\n///\n\n/// Consider generating this with the [`resolve_1`], [`resolve_2`], [`OneOfTwo`]\n\n/// or [`TwoOfThree`] helpers.\n\n#[derive(Debug)]\n\npub struct VarWatch(pub(crate) Vec<LVarId>);\n\n\n\nimpl VarWatch {\n\n /// Watch one [`LVar`] for changes in a [`Constraint`].\n\n pub fn one<A>(a: LVar<A>) -> Self {\n\n VarWatch(vec![a.id])\n\n }\n\n\n\n /// Watch two [`LVar`]s for changes in a [`Constraint`].\n\n pub fn two<A, B>(a: LVar<A>, b: LVar<B>) -> Self {\n\n VarWatch(vec![a.id, b.id])\n\n }\n\n}\n\n\n", "file_path": "canrun/src/state/constraints.rs", "rank": 73, "score": 15.097876361335356 }, { "content": "use crate::domains::DomainType;\n\nuse crate::goals::Goal;\n\nuse crate::state::constraints::{resolve_1, Constraint, ResolveFn, VarWatch};\n\nuse crate::state::State;\n\nuse crate::value::{IntoVal, Val};\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::rc::Rc;\n\n\n\npub struct Assert1<'a, A: Debug> {\n\n a: Val<A>,\n\n f: Rc<dyn Fn(&A) -> bool + 'a>,\n\n}\n\n\n\n/// Create a [projection goal](super) that succeeds if the resolved value passes\n\n/// an assertion test.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, both, unify, var, assert_1};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = both(unify(1, x), assert_1(x, |x| *x < 2));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n", "file_path": "canrun/src/goals/project/assert_1.rs", "rank": 74, "score": 14.917905656083317 }, { "content": "use crate::domains::DomainType;\n\nuse crate::goals::Goal;\n\nuse crate::state::constraints::{resolve_2, Constraint, ResolveFn, VarWatch};\n\nuse crate::state::State;\n\nuse crate::value::{IntoVal, Val};\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::rc::Rc;\n\n\n\npub struct Assert2<'a, A: Debug, B: Debug> {\n\n a: Val<A>,\n\n b: Val<B>,\n\n f: Rc<dyn Fn(&A, &B) -> bool + 'a>,\n\n}\n\n\n\n/// Create a [projection goal](super) that succeeds if the resolved values pass\n\n/// an assertion test.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, assert_2};\n", "file_path": "canrun/src/goals/project/assert_2.rs", "rank": 75, "score": 12.896747727987604 }, { "content": "use crate::domains::DomainType;\n\nuse crate::state::constraints::{resolve_1, Constraint, ResolveFn, VarWatch};\n\nuse crate::value::{IntoVal, Val};\n\nuse crate::{Goal, State};\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::rc::Rc;\n\n\n\npub struct Project1<'a, A: Debug, D: DomainType<'a, A>> {\n\n a: Val<A>,\n\n f: Rc<dyn Fn(&A) -> Goal<'a, D> + 'a>,\n\n}\n\n\n\n/// Create a [projection goal](super) that allows creating a new goal based on\n\n/// the resolved value.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, both, unify, var, project_1};\n\n/// use canrun::example::I32;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = both(unify(1, x), project_1(x, |x| if *x < 2 { Goal::succeed() } else { Goal::fail() }));\n\n/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![1])\n\n/// ```\n", "file_path": "canrun/src/goals/project/project_1.rs", "rank": 76, "score": 12.488327796967651 }, { "content": "use crate::goals::Goal;\n\nuse crate::state::constraints::{Constraint, OneOfTwo, ResolveFn, VarWatch};\n\nuse crate::DomainType;\n\nuse crate::State;\n\nuse crate::UnifyIn;\n\nuse crate::{IntoVal, Val};\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::rc::Rc;\n\n\n\n/// Create a [projection goal](super) that allows deriving one resolved value\n\n/// from the other.\n\n///\n\n/// Functions must be provided to derive in both directions. Whichever value is\n\n/// resolved first will be used to derive the other.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, map_1};\n\n/// use canrun::example::I32;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(1, x),\n\n/// map_1(x, y, |x| x + 1, |y| y - 1),\n\n/// ];\n\n/// let result: Vec<_> = goal.query(y).collect();\n\n/// assert_eq!(result, vec![2])\n\n/// ```\n", "file_path": "canrun/src/goals/project/map_1.rs", "rank": 77, "score": 12.24495383118683 }, { "content": "use crate::goals::{Goal, GoalEnum};\n\nuse crate::state::constraints::{Constraint, ResolveFn, TwoOfThree, VarWatch};\n\nuse crate::state::State;\n\nuse crate::value::{IntoVal, Val};\n\nuse crate::DomainType;\n\nuse crate::UnifyIn;\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::rc::Rc;\n\n\n\n/// Create a [projection goal](super) that allows deriving one resolved value\n\n/// from the other two.\n\n///\n\n/// Functions must be provided to derive from any combination of two values.\n\n/// Whichever two are resolved first will be used to derive the other.\n\n///\n\n/// ```\n\n/// use canrun::{Goal, all, unify, var, map_2};\n\n/// use canrun::example::I32;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(1, x),\n\n/// unify(2, y),\n\n/// map_2(x, y, z, |x, y| x + y, |x, z| z - x, |y, z| z - y),\n\n/// ];\n\n/// let result: Vec<_> = goal.query(z).collect();\n\n/// assert_eq!(result, vec![3])\n\n/// ```\n", "file_path": "canrun/src/goals/project/map_2.rs", "rank": 78, "score": 11.920777881616699 }, { "content": "use super::{Goal, GoalEnum};\n\nuse crate::domains::Domain;\n\nuse crate::state::State;\n\nuse std::fmt;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct Lazy<'a, D: Domain<'a>>(Rc<dyn Fn() -> Goal<'a, D> + 'a>);\n\n\n\nimpl<'a, D: Domain<'a>> Lazy<'a, D> {\n\n pub(crate) fn run(self, state: State<'a, D>) -> Option<State<'a, D>>\n\n where\n\n D: Domain<'a>,\n\n {\n\n let func = self.0;\n\n let goal = func();\n\n goal.apply(state)\n\n }\n\n}\n\n\n", "file_path": "canrun/src/goals/lazy.rs", "rank": 79, "score": 11.87422272568411 }, { "content": "/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![]) // Empty result\n\n/// ```\n\n#[macro_export]\n\nmacro_rules! any {\n\n ($($item:expr),* $(,)?) => {\n\n canrun::Goal::any(vec![$($item),*])\n\n };\n\n}\n\npub use any;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::any;\n\n use crate as canrun;\n\n use crate::example::I32;\n\n use crate::goals::unify::unify;\n\n use crate::goals::Goal;\n\n use crate::util;\n\n use crate::value::var;\n", "file_path": "canrun/src/goals/any.rs", "rank": 81, "score": 11.828576672134968 }, { "content": "/// let result: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(result, vec![]) // Empty result\n\n/// ```\n\n#[macro_export]\n\nmacro_rules! all {\n\n ($($item:expr),* $(,)?) => {\n\n canrun::goals::Goal::all(vec![$($item),*])\n\n };\n\n}\n\npub use all;\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::all;\n\n use crate as canrun;\n\n use crate::example::I32;\n\n use crate::goals::unify::unify;\n\n use crate::goals::Goal;\n\n use crate::util;\n\n use crate::value::var;\n", "file_path": "canrun/src/goals/all.rs", "rank": 82, "score": 11.789997368665276 }, { "content": "use super::{Goal, GoalEnum};\n\nuse crate::domains::Domain;\n\nuse crate::state::State;\n\nuse std::fmt;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct Custom<'a, D: Domain<'a>>(Rc<dyn Fn(State<'a, D>) -> Option<State<'a, D>> + 'a>);\n\n\n\nimpl<'a, D: Domain<'a>> Custom<'a, D> {\n\n pub(crate) fn run(self, state: State<'a, D>) -> Option<State<'a, D>>\n\n where\n\n D: Domain<'a>,\n\n {\n\n let func = self.0;\n\n func(state)\n\n }\n\n}\n\n\n\n/// Create a [goal](crate::goals::Goal) that gives access to the underlying\n", "file_path": "canrun/src/goals/custom.rs", "rank": 83, "score": 11.655732574919938 }, { "content": "use crate::goals::{unify, Goal};\n\nuse crate::lvec::LVec;\n\nuse crate::state::{\n\n constraints::{resolve_2, Constraint, ResolveFn, VarWatch},\n\n State,\n\n};\n\nuse crate::value::{val, IntoVal, Val};\n\nuse crate::{DomainType, UnifyIn};\n\nuse std::fmt::Debug;\n\nuse std::iter::repeat;\n\n\n\n/// Create a [`Goal`] that attempts to unify a `Val<T>` with\n\n/// any of the items in a `LVec<T>`.\n\n///\n\n/// This goal will fork the state for each match found.\n\n/// # Examples:\n\n/// ```\n\n/// use canrun::{Goal, val, var, all, unify, lvec, example::Collections};\n\n///\n\n/// let needle = var();\n\n/// let haystack = var();\n\n/// let goal: Goal<Collections> = all![\n\n/// unify(needle, lvec![1]),\n\n/// unify(haystack, lvec![1, 2, 3]),\n\n/// lvec::subset(needle, haystack),\n\n/// ];\n\n/// let results: Vec<_> = goal.query(needle).collect();\n\n/// assert_eq!(results, vec![vec![1]]);\n\n/// ```\n", "file_path": "canrun/src/collections/lvec/subset.rs", "rank": 84, "score": 11.51431662631282 }, { "content": "/// Helper for converting into [`Val<T>`](crate::value::Val).\n\n///\n\n/// In order to be able to mix [resolved values](crate::value::Val) and [logical\n\n/// variables](crate::value::LVar) in the same [state](crate::state), they need\n\n/// to be contained in the shared [`Val`](crate::value::Val) enum. This trait\n\n/// provides a standard way to convert various types of values into this\n\n/// container enum without manual wrapping.\n\n///\n\n/// # TLDR: If you see a function that takes `IntoVal<T>`\n\n/// ```\n\n/// # use canrun::{Val, IntoVal};\n\n/// # use std::fmt::Debug;\n\n/// fn foo<T: Debug, TV: IntoVal<T>>(bar: TV) -> Val<T> {\n\n/// bar.into_val()\n\n/// }\n\n/// ```\n\n/// That means it can take any of these types and will take care of converting\n\n/// them into a `Val<T>` for you:\n\n/// ```\n\n/// # use canrun::{Val, var, IntoVal};\n\n/// # use std::fmt::Debug;\n\n/// # fn foo<T: Debug, TV: IntoVal<T>>(bar: TV) -> Val<T> {\n\n/// # bar.into_val()\n\n/// # }\n\n/// let a: Val<i32> = foo(1); // a plain value of type `T`\n\n/// let b: Val<i32> = foo(var()); // an `LVar<T>`\n\n/// let c: Val<i32> = foo(a); // a `Val<T>`\n\n/// ```\n\npub trait IntoVal<T: Debug> {\n\n /// Convert various `T` related values into a [`Val<T>`](crate::value::Val).\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use canrun::{var, IntoVal, Val, LVar};\n\n ///\n\n /// let x: LVar<i32> = var();\n\n /// let x_val: Val<i32> = x.into_val();\n\n ///\n\n /// let y: i32 = 1;\n\n /// let y_val: Val<i32> = y.into_val();\n\n /// ```\n\n fn into_val(self) -> Val<T>;\n\n}\n\n\n\nimpl<T: Debug> IntoVal<T> for T {\n\n fn into_val(self) -> Val<T> {\n\n Val::Resolved(Rc::new(self))\n\n }\n", "file_path": "canrun/src/value/into_val.rs", "rank": 85, "score": 10.984006108461607 }, { "content": " use crate::util;\n\n use crate::value::var;\n\n\n\n #[test]\n\n fn succeeds() {\n\n let x = var();\n\n let goal: Goal<I32> = lazy(|| unify(x, 1));\n\n let results = util::goal_resolves_to(goal, x);\n\n assert_eq!(results, vec![1]);\n\n }\n\n}\n", "file_path": "canrun/src/goals/lazy.rs", "rank": 86, "score": 10.943849872065254 }, { "content": "use crate::goals::Goal;\n\nuse crate::map_2;\n\nuse crate::value::IntoVal;\n\nuse crate::{DomainType, UnifyIn};\n\nuse std::ops::{Div, Mul};\n\n\n\n/// Divide one value with another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::div;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = div(3, 2, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n", "file_path": "canrun/src/goals/ops/div.rs", "rank": 87, "score": 10.937297476595086 }, { "content": "use crate::goals::Goal;\n\nuse crate::map_2;\n\nuse crate::value::IntoVal;\n\nuse crate::{DomainType, UnifyIn};\n\nuse std::ops::{Div, Mul};\n\n\n\n/// Multiply two values together.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::mul;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = mul(2, 3, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![6]);\n\n/// ```\n", "file_path": "canrun/src/goals/ops/mul.rs", "rank": 88, "score": 10.937297476595086 }, { "content": "use crate::goals::Goal;\n\nuse crate::map_2;\n\nuse crate::value::IntoVal;\n\nuse crate::{DomainType, UnifyIn};\n\nuse std::ops::{Add, Sub};\n\n\n\n/// Add two values together.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::add;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = add(1, 2, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![3]);\n\n/// ```\n", "file_path": "canrun/src/goals/ops/add.rs", "rank": 89, "score": 10.937297476595086 }, { "content": "use crate::goals::Goal;\n\nuse crate::map_2;\n\nuse crate::value::IntoVal;\n\nuse crate::{DomainType, UnifyIn};\n\nuse std::ops::{Add, Sub};\n\n\n\n/// Subtract one value from another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::ops::sub;\n\n///\n\n/// let x = var();\n\n/// let goal: Goal<I32> = sub(3, 2, x);\n\n/// let results: Vec<_> = goal.query(x).collect();\n\n/// assert_eq!(results, vec![1]);\n\n/// ```\n", "file_path": "canrun/src/goals/ops/sub.rs", "rank": 90, "score": 10.937297476595086 }, { "content": "use crate::assert_2;\n\nuse crate::goals::Goal;\n\nuse crate::value::IntoVal;\n\nuse crate::DomainType;\n\nuse std::fmt::Debug;\n\n\n\n/// Ensure that one value is less than another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::lt;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// lt(x, y)\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y)).collect();\n\n/// assert_eq!(results, vec![(1, 2)]);\n\n/// ```\n", "file_path": "canrun/src/goals/cmp/lt.rs", "rank": 91, "score": 10.894057867503538 }, { "content": "/// assert: Rc<dyn Fn(&T) -> bool + 'a>,\n\n/// }\n\n///\n\n/// impl<'a, T: fmt::Debug> fmt::Debug for Assert<'a, T> {\n\n/// fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n/// write!(f, \"Assert({:?})\", self.val)\n\n/// }\n\n/// }\n\n///\n\n/// impl<'a, T, D> Constraint<'a, D> for Assert<'a, T>\n\n/// where\n\n/// T: fmt::Debug + 'a,\n\n/// D: DomainType<'a, T>,\n\n/// {\n\n/// fn attempt(&self, state: &State<'a, D>) -> Result<ResolveFn<'a, D>, VarWatch> {\n\n/// let resolved = resolve_1(&self.val, state)?;\n\n/// let assert = self.assert.clone();\n\n/// Ok(Box::new(\n\n/// move |state: State<'a, D>| if assert(&*resolved) { Some(state) } else { None },\n\n/// ))\n", "file_path": "canrun/src/state/constraints.rs", "rank": 92, "score": 10.884642504167136 }, { "content": "use crate::domains::DomainType;\n\nuse crate::example::I32;\n\nuse crate::goals::custom;\n\nuse crate::goals::unify;\n\nuse crate::goals::Goal;\n\nuse crate::state::constraints::{Constraint, ResolveFn, VarWatch};\n\nuse crate::state::State;\n\nuse crate::util;\n\nuse crate::value::{\n\n val, var, IntoVal, Val,\n\n Val::{Resolved, Var},\n\n};\n\nuse std::fmt;\n\nuse std::fmt::Debug;\n\nuse std::rc::Rc;\n\n\n", "file_path": "canrun/src/tests/test_constrain.rs", "rank": 93, "score": 10.878146488951025 }, { "content": "use crate::goals::assert_2;\n\nuse crate::goals::Goal;\n\nuse crate::value::IntoVal;\n\nuse crate::DomainType;\n\nuse std::fmt::Debug;\n\n\n\n/// Ensure that one value is greater than another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::gt;\n\n///\n\n/// let (x, y) = (var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 2),\n\n/// unify(y, 1),\n\n/// gt(x, y)\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y)).collect();\n\n/// assert_eq!(results, vec![(2, 1)]);\n\n/// ```\n", "file_path": "canrun/src/goals/cmp/gt.rs", "rank": 94, "score": 10.85121274300454 }, { "content": "use crate::cmp::{lt, lte};\n\nuse crate::goals::Goal;\n\nuse crate::value::IntoVal;\n\nuse crate::{both, either, unify, val};\n\nuse crate::{DomainType, UnifyIn};\n\n\n\n/// Get the lesser of two values according to [`std::cmp::min`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::min;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// unify(z, 1),\n\n/// min(x, y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(1, 2, 1)]);\n\n/// ```\n", "file_path": "canrun/src/goals/cmp/min.rs", "rank": 95, "score": 10.78611354928911 }, { "content": "use crate::cmp::{gt, gte};\n\nuse crate::goals::Goal;\n\nuse crate::value::IntoVal;\n\nuse crate::{both, either, unify, val};\n\nuse crate::{DomainType, UnifyIn};\n\n\n\n/// Get the greater of two values according to [`std::cmp::max`].\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::max;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// unify(z, 2),\n\n/// max(x, y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(1, 2, 2)]);\n\n/// ```\n", "file_path": "canrun/src/goals/cmp/max.rs", "rank": 96, "score": 10.78611354928911 }, { "content": "use crate::assert_2;\n\nuse crate::goals::Goal;\n\nuse crate::value::IntoVal;\n\nuse crate::DomainType;\n\nuse std::fmt::Debug;\n\n\n\n/// Ensure that one value is greater than or equal to another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::gte;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 2),\n\n/// unify(y, 1),\n\n/// unify(z, 1),\n\n/// gte(x, y),\n\n/// gte(y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(2, 1, 1)]);\n\n/// ```\n", "file_path": "canrun/src/goals/cmp/gte.rs", "rank": 97, "score": 10.766682094879233 }, { "content": "use crate::assert_2;\n\nuse crate::goals::Goal;\n\nuse crate::value::IntoVal;\n\nuse crate::DomainType;\n\nuse std::fmt::Debug;\n\n\n\n/// Ensure that one value is less than or equal to another.\n\n///\n\n/// # Example:\n\n/// ```\n\n/// use canrun::{unify, util, var, all, Goal};\n\n/// use canrun::example::I32;\n\n/// use canrun::cmp::lte;\n\n///\n\n/// let (x, y, z) = (var(), var(), var());\n\n/// let goal: Goal<I32> = all![\n\n/// unify(x, 1),\n\n/// unify(y, 2),\n\n/// unify(z, 2),\n\n/// lte(x, y),\n\n/// lte(y, z),\n\n/// ];\n\n/// let results: Vec<_> = goal.query((x, y, z)).collect();\n\n/// assert_eq!(results, vec![(1, 2, 2)]);\n\n/// ```\n", "file_path": "canrun/src/goals/cmp/lte.rs", "rank": 98, "score": 10.766682094879233 }, { "content": "use crate::domains::Domain;\n\nuse crate::state::IterResolved;\n\nuse crate::ReifyIn;\n\n\n\n/// Derive [reified](crate::value::ReifyIn) [values](crate::value) potential\n\n/// resolved states.\n\n///\n\n/// [Query] is implemented for [`Goals`](crate::goals) and\n\n/// [`States`](crate::State) (and other types), meaning you can call\n\n/// [`.query()`](Query::query()) on them with any type that implements\n\n/// [ReifyIn](crate::value::ReifyIn) for a matching [Domain].\n\n///\n\n/// This is a convenient wrapper around the pattern of iterating over a sequence\n\n/// of [`ResolvedStates`](crate::ResolvedState) and calling\n\n/// [`state.reify(query)`](crate::ResolvedState::reify()) and returning only the\n\n/// valid, fully resolved results. Query is implemented on a variety of\n\n/// [`State`](crate::State) related types, allowing it to be used in many\n\n/// contexts.\n\n///\n\n/// A blanket impl covers anything that implements [`IterResolved`], so many\n\n/// types including [`Goal`](crate::goals) and [`State`](crate::State) are\n\n/// queryable.\n", "file_path": "canrun/src/query.rs", "rank": 99, "score": 10.599578742346274 } ]
Rust
src/str_utils.rs
anekos/eitaro
21b0c4355c7c994b9175e205ee9e19fb026633ab
use heck::SnakeCase; use kana::wide2ascii; use regex::Regex; #[derive(Clone, Copy)] pub enum WordType { English, Katakana, } pub fn simple_words_pattern() -> Regex { Regex::new(r"[a-zA-Z]+").unwrap() } pub fn fix_word(s: &str) -> Option<String> { let s = wide2ascii(s); let s = s.to_lowercase().replace('ー', ""); if s.is_empty() { None } else { Some(s) } } pub fn scan_words(word_type: WordType, s: &str) -> Vec<String> { let mut result = vec![]; let mut in_word = false; let mut index = 0; let mut left = 0; let mut right = 0; let is_word_char = get_is_word_char(word_type); for c in s.chars() { let space = c == ' '; if in_word ^ (is_word_char(c) || (in_word && space)) { in_word = !in_word; if in_word { left = index; } else if left < right { extract_patterns(&s[left..right], &mut result); } } index += c.len_utf8(); if in_word && !space { right = index; } } if in_word && left < right { extract_patterns(&s[left..right], &mut result); } result } pub fn shorten(s: &str) -> Vec<&str> { let mut result = vec![]; let mut index = 0; let mut left = 0; let mut in_word = false; let mut first = true; for c in s.chars() { if in_word ^ (c != ' ') { in_word = !in_word; if in_word { if first { left = index; first = false; } } else if left < index { result.push(&s[left..index]); } } index += c.len_utf8(); } if in_word && left < index { result.push(&s[left..index]); } result.reverse(); result } pub fn uncase(s: &str) -> String { s.to_snake_case().replace('_', " ") } fn extract_patterns(s: &str, result: &mut Vec<String>) { if let Some(l) = s.find('(') { if let Some(r) = s.find(')') { extract_patterns(&format!("{}{}", &s[0..l], &s[r+1..]), result); extract_patterns(&format!("{}{}{}", &s[0..l], &s[l+1..r], &s[r+1..]), result); } else { extract_patterns(&s[0..l].to_owned(), result); } } else { result.push(s.to_owned()); } } fn get_is_word_char(word_type: WordType) -> fn(char) -> bool { match word_type { WordType::English => is_word_char_english, WordType::Katakana => is_word_char_katakana, } } fn is_word_char_english(c: char) -> bool { c.is_ascii() && c.is_alphanumeric() || c == '-' || c == '\'' || c == '(' || c == ')' } fn is_word_char_katakana(c: char) -> bool { !c.is_ascii() && c.is_alphabetic() } #[cfg(test)]#[test] fn test_scan_words() { use self::WordType::*; assert_eq!(scan_words(English, " foo キャット bar 猫"), vec!["foo", "bar"]); assert_eq!(scan_words(English, " foo キャット bar "), vec!["foo", "bar"]); assert_eq!(scan_words(English, " foo、キャット・bar=猫 "), vec!["foo", "bar"]); assert_eq!(scan_words(English, " foo-bar "), vec!["foo-bar"]); assert_eq!(scan_words(English, "【変化】動 drives | driving | drove | driven"), vec!["drives", "driving", "drove", "driven"]); assert_eq!(scan_words(English, "【変化】動 foo bar | food bar | foolish bar"), vec!["foo bar", "food bar", "foolish bar"]); assert_eq!(scan_words(English, "【変化】 複 affairs、【文節】..."), vec!["affairs"]); assert_eq!(scan_words(Katakana, "アカムパニ、アカンパニ、アコンパニ、"), vec!["アカムパニ", "アカンパニ", "アコンパニ"]); assert_eq!(scan_words(Katakana, " foo-bar "), Vec::<&str>::new()); assert_eq!(scan_words(English, " f(o)o キャット bar 猫"), vec!["fo", "foo", "bar"]); } #[cfg(test)]#[test] fn test_patterns() { fn ps(s: &str) -> Vec<String> { let mut result = vec![]; extract_patterns(s, &mut result); result } assert_eq!(ps("ana(a)l nathrakh"), vec!["anal nathrakh".to_owned(), "anaal nathrakh".to_owned()]); assert_eq!( ps("ab(c)de(f)g"), vec![ "abdeg".to_owned(), "abdefg".to_owned(), "abcdeg".to_owned(), "abcdefg".to_owned()]); } #[cfg(test)]#[test] fn test_shortens() { assert_eq!( shorten("the cat of hell"), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); assert_eq!( shorten(" the cat of hell"), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); assert_eq!( shorten(" the cat of hell "), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); assert_eq!( shorten(" the cat of hell "), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); }
use heck::SnakeCase; use kana::wide2ascii; use regex::Regex; #[derive(Clone, Copy)] pub enum WordType { English, Katakana, } pub fn simple_words_pattern() -> Regex { Regex::new(r"[a-zA-Z]+").unwrap() } pub fn fix_word(s: &str) -> Option<String> { let s = wide2ascii(s); let s = s.to_lowercase().replace('ー', ""); if s.is_empty() { None } else { Some(s) } } pub fn scan_words(word_type: WordType, s: &str) -> Vec<String> { let mut result = vec![]; let mut in_word = false; let mut index = 0; let mut left = 0; let mut right = 0; let is_word_char = get_is_word_char(word_type); for c in s.chars() { let space = c == ' '; if in_word ^ (is_word_char(c) || (in_word && space)) { in_word = !in_word; if in_word { left = index; } else if left < right { extract_patterns(&s[left..right], &mut result); } } index += c.len_utf8(); if in_word && !space { right = index; } } if in_word && left < right { extract_patterns(&s[left..right], &mut result); } result } pub fn shorten(s: &str) -> Vec<&str> { let mut result = vec![]; let mut index = 0; let mut left = 0; let mut in_word = false; let mut first = true; for c in s.chars() { if in_word ^ (c != ' ') { in_word = !in_word; if in_word { if first { left = index; first = false; } } else if left < index { result.push(&s[left..index]); } } index += c.len_utf8(); } if in_word && left < index { result.push(&s[left..index]); } result.reverse(); result } pub fn uncase(s: &str) -> String { s.to_snake_case().replace('_', " ") } fn extract_patterns(s: &str, result: &mut Vec<String>) { if let Some(l) = s.find('(') { if let Some(r) = s.find(')') { extract_patterns(&format!("{}{}", &s[0..l], &s[r+1..]), result); extract_patterns(&format!("{}{}{}", &s[0..l], &s[l+1..r], &s[r+1..]), result); } else { extract_patterns(&s[0..l].to_owned(), result); } } else { result.push(s.to_owned()); } } fn get_is_word_char(word_type: WordType) -> fn(char) -> bool { match word_type { WordType::English => is_word_char_english, WordType::Katakana => is_word_char_katakana, } } fn is_word_char_english(c: char) -> bool { c.is_ascii() && c.is_alphanumeric() || c == '-' || c == '\'' || c == '(' || c == ')' } fn is_word_char_katakana(c: char) -> bool { !c.is_ascii() && c.is_alphabetic() } #[cfg(test)]#[test] fn test_scan_words() { use self::WordType::*; assert_eq!(scan_words(English, " foo キャット bar 猫"), vec!["foo", "bar"]); assert_eq!(scan_words(English, "
sert_eq!(scan_words(Katakana, " foo-bar "), Vec::<&str>::new()); assert_eq!(scan_words(English, " f(o)o キャット bar 猫"), vec!["fo", "foo", "bar"]); } #[cfg(test)]#[test] fn test_patterns() { fn ps(s: &str) -> Vec<String> { let mut result = vec![]; extract_patterns(s, &mut result); result } assert_eq!(ps("ana(a)l nathrakh"), vec!["anal nathrakh".to_owned(), "anaal nathrakh".to_owned()]); assert_eq!( ps("ab(c)de(f)g"), vec![ "abdeg".to_owned(), "abdefg".to_owned(), "abcdeg".to_owned(), "abcdefg".to_owned()]); } #[cfg(test)]#[test] fn test_shortens() { assert_eq!( shorten("the cat of hell"), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); assert_eq!( shorten(" the cat of hell"), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); assert_eq!( shorten(" the cat of hell "), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); assert_eq!( shorten(" the cat of hell "), vec![ "the cat of hell".to_owned(), "the cat of".to_owned(), "the cat".to_owned(), "the".to_owned() ]); }
foo キャット bar "), vec!["foo", "bar"]); assert_eq!(scan_words(English, " foo、キャット・bar=猫 "), vec!["foo", "bar"]); assert_eq!(scan_words(English, " foo-bar "), vec!["foo-bar"]); assert_eq!(scan_words(English, "【変化】動 drives | driving | drove | driven"), vec!["drives", "driving", "drove", "driven"]); assert_eq!(scan_words(English, "【変化】動 foo bar | food bar | foolish bar"), vec!["foo bar", "food bar", "foolish bar"]); assert_eq!(scan_words(English, "【変化】 複 affairs、【文節】..."), vec!["affairs"]); assert_eq!(scan_words(Katakana, "アカムパニ、アカンパニ、アコンパニ、"), vec!["アカムパニ", "アカンパニ", "アコンパニ"]); as
random
[ { "content": "pub fn v2s(s: Vec<char>) -> String {\n\n let s: String = s.into_iter().collect();\n\n s.trim().to_owned()\n\n}\n", "file_path": "src/parser/utils.rs", "rank": 4, "score": 249985.30886171234 }, { "content": "fn extract_text(dictionary: &mut Dictionary, s: &str) -> AppResult<Vec<String>> {\n\n let valid = Regex::new(r\"\\A[a-zA-Z]{2,}\\z\").unwrap();\n\n\n\n let mut words = HashSet::new();\n\n\n\n let chars = str_utils::simple_words_pattern();\n\n for word in chars.find_iter(s) {\n\n words.insert(word.as_str());\n\n }\n\n\n\n let mut result = Vec::<String>::new();\n\n\n\n for word in words {\n\n let lemmed = dictionary.lemmatize(word)?;\n\n if 2 < lemmed.len() && valid.is_match(&lemmed) {\n\n result.push(lemmed);\n\n }\n\n }\n\n\n\n result.sort();\n\n\n\n Ok(result)\n\n}\n", "file_path": "src/command/export.rs", "rank": 5, "score": 239787.80291750835 }, { "content": "fn color(out: &mut String, s: &str, fg: &str, bg: Option<&str>, bold: bool) {\n\n write!(out, r#\"<span foreground=\"{}\"\"#, fg).unwrap();\n\n if let Some(bg) = bg {\n\n write!(out, r#\" background=\"{}\"\"#, bg).unwrap();\n\n }\n\n if bold {\n\n write!(out, r#\" weight=\"bold\"\"#).unwrap();\n\n }\n\n write!(out, r#\">{}</span>\"#, markup_escape_text(s)).unwrap();\n\n}\n\n\n", "file_path": "src/screen/gui.rs", "rank": 8, "score": 225983.24951168534 }, { "content": "pub fn parse_line(input: &str) -> Result<Vec<Text>, pom::Error> {\n\n Ok(vec![Text::Definition(input.to_owned())])\n\n}\n", "file_path": "src/parser/gene.rs", "rank": 10, "score": 214773.46440308454 }, { "content": "pub fn parse_line(input: &str) -> Result<Vec<Text>, pom::Error> {\n\n let mut input = TextInput::new(input);\n\n text().parse(&mut input)\n\n}\n\n\n", "file_path": "src/parser/ejdic.rs", "rank": 11, "score": 214773.46440308454 }, { "content": "pub fn parse_line(input: &str) -> Result<Vec<Text>, pom::Error> {\n\n let mut input = TextInput::new(input);\n\n text().parse(&mut input)\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 12, "score": 214773.46440308457 }, { "content": "fn stem(word: &str) -> Vec<String> {\n\n let pairs = [\n\n (\"ied\", \"y\"),\n\n (\"ier\", \"y\"),\n\n (\"ies\", \"y\"),\n\n (\"iest\", \"y\"),\n\n (\"nning\", \"n\"),\n\n (\"est\", \"\"),\n\n (\"ing\", \"\"),\n\n (\"'s\", \"\"),\n\n (\"ed\", \"\"),\n\n (\"ed\", \"e\"),\n\n (\"er\", \"\"),\n\n (\"es\", \"\"),\n\n (\"s\", \"\"),\n\n ];\n\n\n\n let mut result = vec![];\n\n let wlen = word.len();\n\n\n", "file_path": "src/dictionary.rs", "rank": 14, "score": 207019.1428929393 }, { "content": "fn untypo(dic: &mut Dictionary, word: &str) -> AppResult<Option<String>> {\n\n let candidates = dic.correct(word);\n\n\n\n if candidates.is_empty() {\n\n return Ok(None)\n\n }\n\n\n\n for (index, candidate) in candidates.iter().enumerate() {\n\n if 0 < index {\n\n print!(\" \");\n\n }\n\n print!(\"[{}] {} \", index, candidate);\n\n }\n\n println!(\"[x] Cancel\");\n\n\n\n loop {\n\n print!(\"Choose a word [0]: \");\n\n stdout().flush()?;\n\n let mut choosen = \"\".to_owned();\n\n stdin().read_line(&mut choosen).unwrap();\n", "file_path": "src/command/lookup.rs", "rank": 15, "score": 195034.98795477248 }, { "content": "fn lookup_and_print(dic: &mut Dictionary, word: &str, color: bool, limit: Option<usize>, correction: bool, pager: bool) -> AppResultU {\n\n let mut found = if word.starts_with('/') {\n\n dic.search(word[1..].trim())\n\n } else {\n\n dic.get_smart(word.trim())\n\n }?;\n\n\n\n if let Some(limit) = limit {\n\n found = found.map(|it| it.into_iter().take(limit + 1).collect());\n\n }\n\n\n\n if let Some(found) = found {\n\n if color {\n\n screen::color::print(found)?;\n\n } else {\n\n screen::plain::print(found)?;\n\n }\n\n return Ok(())\n\n }\n\n\n", "file_path": "src/command/lookup.rs", "rank": 17, "score": 181157.48431719496 }, { "content": "pub fn print(entries: Vec<Entry>) -> AppResultU {\n\n fn color_key<W: Write>(out: &mut W, key: &str) -> Result<(), IOError> {\n\n dwriteln!(out, [black on_yellow bold \"{}\" !] key)\n\n }\n\n\n\n fn color<W: Write>(out: &mut W, text: &Text) -> Result<(), IOError> {\n\n use self::Text::*;\n\n\n\n match text {\n\n Annot(s) => dwrite!(out, [yellow \"{}\" !] s),\n\n Class(s) => dwrite!(out, [blue \"{}\" !] s),\n\n Countability(c) => dwrite!(out, [yellow bold \"{}\" !] c),\n\n Definition(s) => dwrite!(out, [white bold \"{}\" !] s),\n\n Error(s) => dwrite!(out, [red bold \"{}\" !] s),\n\n Etymology(s) => dwrite!(out, [magenta bold \"語源\" ! \" {}\"] s),\n\n Example(s) => dwrite!(out, [green \"{}\" !] s),\n\n Information(s) => dwrite!(out, [cyan \"{}\" !] s),\n\n Note(s) => write!(out, \"{}\", s),\n\n Tag(s) => dwrite!(out, [red bold \"{}\" !] s),\n\n Word(s) => color_key(out, &s),\n", "file_path": "src/screen/color.rs", "rank": 18, "score": 163728.86444736045 }, { "content": "pub fn print(entries: Vec<Entry>) -> AppResultU {\n\n fn color<W: Write>(out: &mut W, text: &Text) -> Result<(), IOError> {\n\n use self::Text::*;\n\n\n\n match text {\n\n Annot(s) | Class(s) | Definition(s) | Example(s) | Information(s) | Note(s) | Tag(s) | Word(s) =>\n\n write!(out, \"{}\", s),\n\n Error(s) =>\n\n write!(out, \"!!{}!!\", s),\n\n Etymology(s) =>\n\n write!(out, \"【語源】{}\", s),\n\n Countability(c) =>\n\n write!(out, \"{}\", c),\n\n }\n\n }\n\n\n\n let out = stdout();\n\n let out = out.lock();\n\n let mut out = BufWriter::new(out);\n\n\n", "file_path": "src/screen/plain.rs", "rank": 19, "score": 163728.86444736042 }, { "content": "pub fn main(rx: &Receiver<Option<Vec<Entry>>>, opt: Opt, bind_to: &str) {\n\n use easycurses::Color::*;\n\n\n\n fn color_key(out: &mut EasyCurses, key: &str) {\n\n out.set_color_pair(colorpair!(Black on Yellow));\n\n out.set_bold(true);\n\n out.win.addstr(key);\n\n out.win.addstr(\"\\n\");\n\n out.set_bold(false);\n\n }\n\n\n\n fn color(out: &mut EasyCurses, text: &Text) {\n\n use self::Text::*;\n\n\n\n fn write(out: &mut EasyCurses, text: &str, color_pair: ColorPair, bold: bool) {\n\n out.set_color_pair(color_pair);\n\n if bold {\n\n out.set_bold(true);\n\n }\n\n out.win.addstr(text);\n", "file_path": "src/screen/curses.rs", "rank": 20, "score": 161525.77662303732 }, { "content": "fn lemmatize(connection: &SqliteConnection, word: &str) -> AppResult<String> {\n\n let mut lemmed = word.to_owned();\n\n let mut path = HashSet::<String>::new();\n\n\n\n while let Some(found) = lookup_lemmatized(connection, &lemmed)? {\n\n if !path.insert(found.clone()) {\n\n return Ok(lemmed)\n\n }\n\n lemmed = found;\n\n }\n\n\n\n if lookup_entry(connection, &lemmed)?.is_some() {\n\n return Ok(lemmed.to_owned());\n\n }\n\n\n\n for stemmed in stem(&lemmed) {\n\n if lookup_entry(connection, &stemmed)?.is_some() {\n\n return Ok(stemmed);\n\n }\n\n }\n\n\n\n Ok(lemmed.to_owned())\n\n}\n\n\n", "file_path": "src/dictionary.rs", "rank": 21, "score": 161100.27448642178 }, { "content": "fn load_line(writer: &mut DictionaryWriter, line: &str) -> AppResultU {\n\n if_let_some!(tab = line.find('\\t'), Ok(()));\n\n let keys = &line[0..tab];\n\n let definitions = &line[tab+1..];\n\n\n\n let mut keys = keys.split(',');\n\n let key = keys.next().unwrap();\n\n for definition in definitions.split(\" / \") {\n\n writer.define(key, parse_line(definition)?)?;\n\n }\n\n for alias in keys {\n\n writer.alias(&alias.trim(), key, false)?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/loader/ejdic.rs", "rank": 22, "score": 152903.32404564347 }, { "content": "fn load_line(writer: &mut DictionaryWriter, line: &str) -> AppResultU {\n\n fn extract_aliases(writer: &mut DictionaryWriter, key: &str, right: &str) -> AppResultU {\n\n fn extract(writer: &mut DictionaryWriter, key: &str, right: &str, word_type: WordType, pattern: &str, for_lemmatization: bool) -> AppResultU {\n\n if let Some(found) = right.find(pattern) {\n\n let right = &right[found + pattern.len()..];\n\n let right = read_until_symbols(&right);\n\n if !right.is_empty() {\n\n for it in scan_words(word_type, right) {\n\n writer.alias(&it, key, for_lemmatization)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n let right = right.replace('(', \"\").replace(')', \"\");\n\n extract(writer, key, &right, WordType::English, \"【変化】\", true)?;\n\n // cat-o'-nine-tails\n\n extract(writer, key, &right, WordType::English, \"【同】\", false)?;\n\n extract(writer, key, &right, WordType::Katakana, \"【@】\", false)?;\n", "file_path": "src/loader/eijiro.rs", "rank": 23, "score": 152903.32404564347 }, { "content": "fn analyze_common(dic: &mut Dictionary, text: &str) -> AppResult<Common> {\n\n let mut words = HashMap::<&str, usize>::new();\n\n\n\n let chars = str_utils::simple_words_pattern();\n\n for word in chars.find_iter(&text) {\n\n let word = word.as_str();\n\n let count = words.entry(word).or_default();\n\n *count += 1;\n\n }\n\n\n\n let mut result = Vec::<Word>::new();\n\n\n\n for (word, count) in words {\n\n let word = word.to_lowercase();\n\n let level = if let Some(level) = dic.get_level(&word)? {\n\n Level::Leveled(level)\n\n } else if dic.get(&word)?.is_some() {\n\n Level::OutOf\n\n } else {\n\n Level::NotInDictionary\n", "file_path": "src/command/analyze.rs", "rank": 24, "score": 152761.22922982214 }, { "content": "fn lookup_unaliased(connection: &SqliteConnection, word: &str) -> AppResult<Option<String>> {\n\n diesel_query!(aliases, Alias [Q E R] {\n\n let found = d::aliases\n\n .filter(d::source.eq(word))\n\n .limit(1)\n\n .load::<Alias>(connection)?;\n\n\n\n Ok(found.get(0).map(|it| it.target.to_owned()))\n\n })\n\n}\n\n\n", "file_path": "src/dictionary.rs", "rank": 25, "score": 152588.45341048166 }, { "content": "fn lookup_lemmatized(connection: &SqliteConnection, word: &str) -> AppResult<Option<String>> {\n\n diesel_query!(lemmatizations, Lemmatization [Q E R] {\n\n let found = d::lemmatizations\n\n .filter(d::source.eq(word))\n\n .limit(1)\n\n .load::<Lemmatization>(connection)?;\n\n\n\n Ok(found.get(0).map(|it| it.target.to_owned()))\n\n })\n\n}\n\n\n", "file_path": "src/dictionary.rs", "rank": 26, "score": 152588.45341048166 }, { "content": "pub fn generate(opt: Opt, mut app: App) -> AppResultU {\n\n app.gen_completions(env!(\"CARGO_PKG_NAME\"), opt.shell, &opt.directory);\n\n Ok(())\n\n}\n", "file_path": "src/command/completions.rs", "rank": 27, "score": 152446.60474727055 }, { "content": "pub fn main(rx: Receiver<Option<Vec<Entry>>>) -> AppResultU {\n\n for entries in rx {\n\n if let Some(entries) = entries {\n\n print(entries)?\n\n } else {\n\n print_not_found();\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/screen/color.rs", "rank": 28, "score": 152215.4044041318 }, { "content": "pub fn main(rx: Receiver<Option<Vec<Entry>>>) -> AppResultU {\n\n for entries in rx {\n\n if let Some(entries) = entries {\n\n print(entries)?\n\n } else {\n\n print_not_found();\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/screen/plain.rs", "rank": 29, "score": 152215.4044041318 }, { "content": "fn text() -> Parser<char, Vec<Text>> {\n\n let p = annot() | class() | example() | etymology() | tag() | word() | information() | note() | definition();\n\n let p = with_spaces(p);\n\n p.repeat(0..)\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 30, "score": 150736.11022022655 }, { "content": "fn text() -> Parser<char, Vec<Text>> {\n\n // let p = annot() | class() | example() | tag() | word() | information() | note() | definition();\n\n let p = note() | annot() | countability() | definition();\n\n let p = with_spaces(p);\n\n p.repeat(0..)\n\n}\n\n\n", "file_path": "src/parser/ejdic.rs", "rank": 31, "score": 150736.11022022655 }, { "content": "fn markup_entries(out: &mut String, entries: &[Entry]) {\n\n for entry in entries {\n\n color(out, &entry.key, \"black\", Some(\"yellow\"), true);\n\n writeln!(out).unwrap();\n\n\n\n for definition in &entry.definitions {\n\n markup_definition(out, definition);\n\n writeln!(out).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/screen/gui.rs", "rank": 32, "score": 145129.33097325344 }, { "content": "fn markup_text(out: &mut String, text: &Text) {\n\n use self::Text::*;\n\n\n\n match &text {\n\n Annot(s) => color(out, s, \"yellow\", None, false),\n\n Countability(c) => color(out, &c.to_string(), \"yellow\", None, false),\n\n Class(s) => color(out, s, \"lightblue\", None, false),\n\n Definition(s) => color(out, s, \"white\", None, true),\n\n Error(s) => color(out, s, \"red\", None, true),\n\n Etymology(s) => {\n\n color(out, \"語源 \", \"magenta\", None, true);\n\n color(out, s, \"white\", None, false);\n\n },\n\n Example(s) => color(out, s, \"lightgreen\", None, false),\n\n Information(s) => color(out, s, \"cyan\", None, false),\n\n Note(s) => color(out, s, \"white\", None, false),\n\n Tag(s) => color(out, s, \"orangered\", None, false),\n\n Word(s) => color(out, s, \"black\", Some(\"yellow\"), false),\n\n }\n\n\n\n}\n\n\n", "file_path": "src/screen/gui.rs", "rank": 33, "score": 145129.33097325344 }, { "content": "fn markup_definition(out: &mut String, definition: &Definition) {\n\n for (index, text) in definition.content.iter().enumerate() {\n\n if 0 < index {\n\n write!(out, \" \").unwrap();\n\n }\n\n markup_text(out, text);\n\n }\n\n}\n\n\n", "file_path": "src/screen/gui.rs", "rank": 34, "score": 145129.33097325344 }, { "content": "fn build_complex_candidates(word: &str) -> HashSet<String> {\n\n let mut result = HashSet::new();\n\n for candidate in build_simple_candidates(word) {\n\n let simple = build_simple_candidates(&candidate);\n\n result.insert(candidate);\n\n result.extend(simple);\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/correction.rs", "rank": 35, "score": 144881.92782847345 }, { "content": "fn build_simple_candidates(word: &str) -> HashSet<String> {\n\n let mut set = HashSet::new();\n\n\n\n let splits = (0..word.len()).map(|it| word.split_at(it)).collect::<Vec<_>>();\n\n\n\n for (l, r) in &splits {\n\n // deletion\n\n if !r.is_empty() {\n\n set.insert(format!(\"{}{}\", l, &r[1..]));\n\n // replacing\n\n for c in LETTERS.chars() {\n\n set.insert(format!(\"{}{}{}\", l, c, &r[1..]));\n\n }\n\n // transposition\n\n if 1 < r.len() {\n\n set.insert(format!(\"{}{}{}{}\", l, &r[1..2], &r[0..1], &r[2..]));\n\n }\n\n }\n\n // insertion\n\n for c in LETTERS.chars() {\n\n set.insert(format!(\"{}{}{}\", l, c, &r[..]));\n\n }\n\n }\n\n\n\n set\n\n}\n", "file_path": "src/correction.rs", "rank": 36, "score": 144881.92782847345 }, { "content": "fn print(entries: Vec<Entry>) -> Result<(), AppError> {\n\n fn span<W: Write>(out: &mut W, name: &'static str, text: &str) -> Result<(), IOError> {\n\n write!(out, \"<span class=\\\"eitaro-definition eitaro-def-{}\\\">{}</span>\", name, escape(text, Html))\n\n }\n\n\n\n fn color_key<W: Write>(out: &mut W, key: &str) -> Result<(), IOError> {\n\n span(out, \"key\", key)\n\n }\n\n\n\n fn color<W: Write>(out: &mut W, text: &Text) -> Result<(), IOError> {\n\n use self::Text::*;\n\n\n\n match text {\n\n Annot(s) => span(out, \"annotation\", s),\n\n Class(s) => span(out, \"class\", s),\n\n Countability(c) => span(out, \"countability\", &format!(\"{}\", c)),\n\n Definition(s) => span(out, \"definition\", s),\n\n Error(s) => span(out, \"error\", s),\n\n Etymology(s) => span(out, \"etymology\", s),\n\n Example(s) => span(out, \"example\", s),\n", "file_path": "src/command/html.rs", "rank": 37, "score": 142108.35136773429 }, { "content": "pub fn get_dictionary_path() -> Result<PathBuf, AppDirsError> {\n\n let mut result = app_dir(AppDataType::UserCache, &APP_INFO, \"dictionary\")?;\n\n result.push(\"db.sqlite\");\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/path.rs", "rank": 38, "score": 139748.4022304658 }, { "content": "pub fn get_history_path() -> Result<PathBuf, AppDirsError> {\n\n let mut path = app_dir(AppDataType::UserCache, &APP_INFO, \"history\")?;\n\n path.push(\"history.txt\");\n\n Ok(path)\n\n}\n", "file_path": "src/path.rs", "rank": 39, "score": 139748.4022304658 }, { "content": "pub fn analyze<T: AsRef<Path>>(mut opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n\n\n let mut text = \"\".to_owned();\n\n stdin().read_to_string(&mut text)?;\n\n\n\n let common = analyze_common(&mut dic, &text)?;\n\n\n\n {\n\n let mut opt_to_check = opt.clone();\n\n opt_to_check.minimum_count = None;\n\n if opt_to_check == Opt::default() {\n\n opt = Opt { all: true, minimum_count: opt.minimum_count, ..Default::default() };\n\n }\n\n }\n\n\n\n if opt.count || opt.all {\n\n analyze_count(&common, &text)?;\n\n }\n\n if opt.svl_stats || opt.all {\n", "file_path": "src/command/analyze.rs", "rank": 40, "score": 135771.70808174467 }, { "content": "fn with_spaces(p: Parser<char, Text>) -> Parser<char, Text> {\n\n sym(' ').repeat(0..) * p - sym(' ').repeat(0..)\n\n}\n\n\n", "file_path": "src/parser/ejdic.rs", "rank": 41, "score": 132265.96323831048 }, { "content": "fn with_spaces(p: Parser<char, Text>) -> Parser<char, Text> {\n\n sym(' ').repeat(0..) * p - sym(' ').repeat(0..)\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 42, "score": 132265.9632383105 }, { "content": "pub fn start_server(opt: Opt, dictionary_path: PathBuf) -> Result<(), AppError> {\n\n let bind_to = opt.bind_to.unwrap_or_else(|| \"127.0.0.1:8116\".to_owned());\n\n let state = State {\n\n dictionary_path: dictionary_path.clone(),\n\n ignore_not_found: opt.ignore_not_found,\n\n screen: Screen::new(opt.screen, dictionary_path, bind_to.clone())\n\n };\n\n let server = HttpServer::new(move || {\n\n let state= state.clone();\n\n App::new()\n\n .wrap(\n\n Cors::new()\n\n .allowed_origin(\"http://localhost:8080\")\n\n .allowed_methods(vec![\"GET\", \"POST\"])\n\n .allowed_headers(vec![header::AUTHORIZATION, header::ACCEPT])\n\n .allowed_header(header::CONTENT_TYPE)\n\n .max_age(3600),\n\n )\n\n .route(\"/ack\", web::get().to(on_ack))\n\n .route(\"/word/{word}\", web::get().to(on_get_word))\n\n .data(state)\n\n });\n\n\n\n server\n\n .bind(bind_to)?\n\n .run()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/command/http.rs", "rank": 43, "score": 124724.51558165607 }, { "content": "fn append_history(line: &str) -> AppResultU {\n\n let path = get_history_path()?;\n\n let mut file = OpenOptions::new().write(true).append(true).create(true).open(path)?;\n\n writeln!(file, \"{}\", line)?;\n\n Ok(())\n\n}\n", "file_path": "src/command/lookup.rs", "rank": 44, "score": 121599.90624256924 }, { "content": "pub fn with_pager<F>(f: F) -> AppResultU\n\nwhere F: FnOnce(&mut ChildStdin) -> AppResultU {\n\n let mut c = Command::new(\"less\");\n\n c.args(&[\"--quit-if-one-screen\", \"--RAW-CONTROL-CHARS\", \"--no-init\"]);\n\n c.stdin(Stdio::piped());\n\n c.stdout(Stdio::inherit());\n\n\n\n let mut child = c.spawn()?;\n\n let stdin: &mut ChildStdin = child.stdin.as_mut().ok_or(AppError::Unexpect(\"Failed to open stdin for pager\"))?;\n\n f(stdin)?;\n\n child.wait()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/pager.rs", "rank": 45, "score": 119273.23610785302 }, { "content": "fn read_until_symbols(s: &str) -> &str {\n\n const SYMBOLS: &str = \"【{◆■〔\";\n\n\n\n let mut right = 0;\n\n\n\n for c in s.chars() {\n\n if SYMBOLS.find(c).is_some() {\n\n break;\n\n }\n\n right += c.len_utf8();\n\n }\n\n\n\n &s[0..right]\n\n}\n\n\n", "file_path": "src/loader/eijiro.rs", "rank": 46, "score": 117319.41839841784 }, { "content": "pub fn lookup<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> Result<(), AppError> {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n let found = dic.get_smart(opt.word.trim())?.ok_or(AppError::NotFound)?;\n\n print(found)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/command/html.rs", "rank": 47, "score": 115992.72574815722 }, { "content": "pub fn main(tx: SyncSender<Option<Vec<Entry>>>, rx: Receiver<Option<Vec<Entry>>>, opt: Opt, dictionary_path: PathBuf) {\n\n // Workaround - https://github.com/gtk-rs/gtk/issues/405#issuecomment-261809506\n\n // gtk::init().unwrap();\n\n unsafe {\n\n use std::ptr;\n\n use gtk_sys::gtk_init;\n\n let mut argc = 0;\n\n gtk_init(&mut argc, ptr::null_mut());\n\n gtk::set_initialized();\n\n }\n\n\n\n let window = gtk::Window::new(gtk::WindowType::Toplevel);\n\n WidgetExt::set_name(&window, \"application\");\n\n window.set_title(\"eitaro\");\n\n window.set_role(opt.role.as_ref().map(String::as_str).unwrap_or(\"eitaro\"));\n\n #[allow(deprecated)]\n\n window.set_wmclass(\"eitaro\", \"eitaro\");\n\n window.set_border_width(0);\n\n // window.set_position(gtk::WindowPosition::Center);\n\n window.add_events(EventMask::SCROLL_MASK.bits() as i32);\n", "file_path": "src/screen/gui.rs", "rank": 48, "score": 115917.37634975366 }, { "content": "pub fn build_dictionary<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> Result<(), AppError> {\n\n use DictionaryFormat::*;\n\n\n\n let mut dictionary = Dictionary::new(dictionary_path);\n\n\n\n let stat = dictionary.write(move |mut writer| {\n\n for file in &opt.files {\n\n println!(\"[{}]\", file.to_str().unwrap_or(\"-\"));\n\n let format = guess(file)?;\n\n let mut file = File::open(file)?;\n\n match format {\n\n Csv => csv::CsvLoader::default().load(&mut file, &mut writer)?,\n\n Eijiro => eijiro::EijiroLoader::default().load(&mut file, &mut writer)?,\n\n Ejdic => ejdic::EjdicLoader::default().load(&mut file, &mut writer)?,\n\n Gene => gene::GeneLoader::default().load(&mut file, &mut writer)?,\n\n };\n\n }\n\n println!(\"[Finalize]\");\n\n Ok(())\n\n })?;\n\n\n\n println!(\"Finished: {} words, {} aliases\", stat.words.separated_string(), stat.aliases.separated_string());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/command/builder.rs", "rank": 50, "score": 114333.11007164625 }, { "content": "fn analyze_count(common: &Common, text: &str) -> AppResultU {\n\n let mut sentences = 0;\n\n let mut words = 0;\n\n let mut prev = 'X';\n\n\n\n for c in text.chars() {\n\n if prev != '.' && c == '.' {\n\n sentences += 1;\n\n }\n\n prev = c;\n\n }\n\n\n\n for word in &common.words {\n\n words += word.count;\n\n }\n\n\n\n println!(\"Count:\");\n\n println!(\"{}{:<17}{:>6}\", INDENT, \"Sentence\", sentences.separated_string());\n\n println!(\"{}{:<17}{:>6}\", INDENT, \"Word\", words.separated_string());\n\n println!(\"{}{:<17}{:>6}\", INDENT, \"Word (unique)\", common.words.len().separated_string());\n\n println!();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/command/analyze.rs", "rank": 51, "score": 112591.6879943594 }, { "content": "fn extract_tag_name(s: &str) -> Option<&str> {\n\n let mut left = 0;\n\n let mut in_tag = false;\n\n let mut index = 0;\n\n\n\n for c in s.chars() {\n\n if in_tag {\n\n if !c.is_alphabetic() {\n\n return Some(&s[left..index]);\n\n }\n\n } else if c.is_alphabetic() {\n\n left = index;\n\n in_tag = true;\n\n }\n\n\n\n index += c.len_utf8();\n\n }\n\n\n\n if in_tag {\n\n Some(&s[left..index])\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/loader/eijiro.rs", "rank": 52, "score": 111772.83999730588 }, { "content": "pub fn print_not_found() {\n\n dprintln!([black on_red \"{}\" !] \"Not Found\");\n\n}\n", "file_path": "src/screen/color.rs", "rank": 53, "score": 108280.4135315966 }, { "content": "pub fn print_not_found() {\n\n println!(\"Not Found\");\n\n}\n", "file_path": "src/screen/plain.rs", "rank": 54, "score": 108280.4135315966 }, { "content": "fn lookup_entry(connection: &SqliteConnection, word: &str) -> AppResult<Option<Entry>> {\n\n let found = diesel_query!(definitions, Definition [Q E R] {\n\n d::definitions\n\n .filter(d::term.eq(word))\n\n .load::<Definition>(connection)?\n\n });\n\n\n\n if found.is_empty() {\n\n return Ok(None)\n\n }\n\n\n\n let defs: serde_json::Result<Vec<Definition>> = found.iter().map(|it| serde_json::from_str::<Definition>(&it.definition)).collect();\n\n\n\n Ok(Some(Entry {\n\n key: word.to_owned(),\n\n definitions: defs?,\n\n }))\n\n}\n\n\n", "file_path": "src/dictionary.rs", "rank": 55, "score": 106848.61331158818 }, { "content": "pub fn path<T: AsRef<Path>>(dictionary_path: &T) -> AppResultU {\n\n let history = get_history_path()?;\n\n println!(\"dictionary: {}\", dictionary_path.as_ref().to_str().unwrap());\n\n println!(\"history: {}\", history.to_str().unwrap());\n\n Ok(())\n\n}\n", "file_path": "src/command/path.rs", "rank": 56, "score": 104578.14049917212 }, { "content": "fn analyze_usage(dictionary: &mut Dictionary, common: &Common, n: usize) -> AppResultU {\n\n println!(\"Usage ranking:\");\n\n let mut words: Vec<(&str, usize)> = common.words.iter().map(|it| (it.word.as_ref(), it.count)).collect();\n\n words.sort_by(|(_, a), (_, b)| b.cmp(a));\n\n let mut results = 0;\n\n let width = (n as f64).log(10.0) as usize + 1;\n\n for (word, count) in words.iter() {\n\n if word.len() < 3 {\n\n continue;\n\n }\n\n if dictionary.get_level(word)? == Some(1) {\n\n continue;\n\n }\n\n\n\n results += 1;\n\n println!(\"{}{:width$}. {:16} {:>7}\", INDENT, results, word, count.separated_string(), width = width);\n\n if n <= results {\n\n break;\n\n }\n\n }\n", "file_path": "src/command/analyze.rs", "rank": 57, "score": 104241.05676013601 }, { "content": "fn to_key_string(ev: &gdk::EventKey) -> String {\n\n let keyval = ev.as_ref().keyval;\n\n gdk::keyval_name(keyval).unwrap_or_else(|| format!(\"{}\", keyval))\n\n}\n", "file_path": "src/screen/gui.rs", "rank": 58, "score": 103723.38501970525 }, { "content": "fn example() -> Parser<char, Text> {\n\n let p1 = sym('■');\n\n let p2 = sym('・') * none_of(SPECIALS).repeat(1..);\n\n let p2 = p2.map(|it| Text::Example(v2s(it)));\n\n let p3 = none_of(SPECIALS).repeat(1..);\n\n let p3 = p3.map(|it| Text::Definition(format!(\"■{}\", v2s(it))));\n\n p1 * (p2 | p3)\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 59, "score": 101899.3197712851 }, { "content": "fn etymology() -> Parser<char, Text> {\n\n let p = seq(\"【語源】\") * none_of(SPECIALS).repeat(1..);\n\n p.map(|it| Text::Etymology(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 60, "score": 101899.3197712851 }, { "content": "fn information() -> Parser<char, Text> {\n\n let p = sym('◆') * none_of(SPECIALS).repeat(1..);\n\n p.map(|it| Text::Information(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 61, "score": 101899.3197712851 }, { "content": "fn definition() -> Parser<char, Text> {\n\n let p = none_of(SPECIALS).repeat(1..);\n\n p.map(|it| Text::Definition(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 62, "score": 101899.3197712851 }, { "content": "fn countability() -> Parser<char, Text> {\n\n let q1 = (sym('U') | sym('C')).map(Text::Countability);\n\n let q2 = none_of(\"〈〉\").repeat(1..).map(|it| Text::Note(v2s(it)));\n\n sym('〈') * (q1 | q2) - sym('〉')\n\n}\n\n\n", "file_path": "src/parser/ejdic.rs", "rank": 63, "score": 101899.3197712851 }, { "content": "fn tag() -> Parser<char, Text> {\n\n let p = sym('{') * none_of(\"{}\").repeat(1..) - sym('}');\n\n p.map(|it| Text::Tag(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 64, "score": 101899.3197712851 }, { "content": "fn note() -> Parser<char, Text> {\n\n let p = sym('〔') * none_of(\"〔〕\").repeat(1..) - sym('〕');\n\n p.map(|it| Text::Note(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 65, "score": 101899.3197712851 }, { "content": "fn word() -> Parser<char, Text> {\n\n let p = seq(\"#\") * sym(' ').repeat(0..) * none_of(\"\\n\").repeat(1..) - seq(\"\\n\");\n\n p.map(|it| Text::Word(v2s(it)))\n\n}\n\n\n\n\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 66, "score": 101899.3197712851 }, { "content": "fn annot() -> Parser<char, Text> {\n\n let p = sym('〈') * none_of(\"〈〉\").repeat(1..) - sym('〉');\n\n p.map(|it| Text::Annot(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 67, "score": 101899.3197712851 }, { "content": "fn class() -> Parser<char, Text> {\n\n let p = sym('《') * none_of(\"《》\").repeat(1..) - sym('》');\n\n p.map(|it| Text::Class(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/eijiro.rs", "rank": 68, "score": 101899.3197712851 }, { "content": "fn note() -> Parser<char, Text> {\n\n let p = sym('〔') * none_of(\"〔〕\").repeat(1..) - sym('〕');\n\n p.map(|it| Text::Note(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/ejdic.rs", "rank": 69, "score": 101899.3197712851 }, { "content": "fn annot() -> Parser<char, Text> {\n\n let p = sym('《') * none_of(\"《》\").repeat(1..) - sym('》');\n\n p.map(|it| Text::Annot(v2s(it)))\n\n}\n\n\n", "file_path": "src/parser/ejdic.rs", "rank": 70, "score": 101899.3197712851 }, { "content": "fn definition() -> Parser<char, Text> {\n\n let p = none_of(SPECIALS).repeat(1..);\n\n p.map(|it| Text::Definition(v2s(it)))\n\n}\n", "file_path": "src/parser/ejdic.rs", "rank": 71, "score": 101899.3197712851 }, { "content": "pub fn shell<T: AsRef<Path>>(opt: Opt, path: &T) -> AppResultU {\n\n let path = path.as_ref().to_str().ok_or(AppError::Unexpect(\"Invalid path string\"))?;\n\n Command::new(\"sqlite3\")\n\n .arg(&path)\n\n .args(&opt.args)\n\n .exec();\n\n\n\n Ok(())\n\n}\n\n\n\n\n\n\n", "file_path": "src/command/database.rs", "rank": 72, "score": 99690.84863402604 }, { "content": "fn scroll(window: &ScrolledWindow, up: bool) {\n\n if let Some(adj) = window.get_vadjustment() {\n\n let mut page_size = adj.get_page_size();\n\n if up {\n\n page_size *= -1.0;\n\n }\n\n adj.set_value(page_size + adj.get_value());\n\n }\n\n}\n\n\n", "file_path": "src/screen/gui.rs", "rank": 73, "score": 98615.41025201892 }, { "content": "fn print_error(mut fail: &dyn Fail) {\n\n let mut message = fail.to_string();\n\n\n\n while let Some(cause) = fail.cause() {\n\n message.push_str(&format!(\"\\n\\tcaused by: {}\", cause));\n\n fail = cause;\n\n }\n\n\n\n eprintln!(\"Error: {}\", message);\n\n\n\n exit(1);\n\n}\n", "file_path": "src/main.rs", "rank": 74, "score": 98037.64361093772 }, { "content": "pub fn export<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dictionary = Dictionary::new(dictionary_path);\n\n let exporter = csv::CsvExporter();\n\n\n\n let out = stdout();\n\n let out = out.lock();\n\n let mut out = BufWriter::new(out);\n\n\n\n let input = stdin();\n\n let input = input.lock();\n\n let mut reader = BufReader::new(input);\n\n if opt.as_text {\n\n let mut buffer = \"\".to_owned();\n\n reader.read_to_string(&mut buffer)?;\n\n let words = extract_text(&mut dictionary, &buffer)?;\n\n let words = words.iter().map(String::as_ref).collect::<Vec<&str>>();\n\n exporter.export(&mut dictionary, &words, &mut out)?;\n\n } else {\n\n let words = reader.lines().collect::<Result<Vec<String>, _>>()?;\n\n let words: Vec<&str> = words.iter().map(String::as_ref).map(str::trim).collect();\n\n exporter.export(&mut dictionary, &words, &mut out)?;\n\n }\n\n\n\n out.flush()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/command/export.rs", "rank": 75, "score": 97940.44368989547 }, { "content": "pub fn lemmatize<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n println!(\"{}\", dic.lemmatize(&opt.word)?);\n\n Ok(())\n\n}\n", "file_path": "src/command/lemmatize.rs", "rank": 76, "score": 97940.44368989547 }, { "content": "pub fn level<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n if let Some(found) = dic.get_level(&opt.word)? {\n\n println!(\"{}\", found);\n\n } else {\n\n eprintln!(\"Not available\");\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/command/level.rs", "rank": 77, "score": 97940.44368989547 }, { "content": "pub fn untypo<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n for candidate in dic.correct(&opt.word) {\n\n println!(\"{}\", candidate);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/command/untypo.rs", "rank": 78, "score": 97940.44368989547 }, { "content": "pub fn extract<T: AsRef<Path>>(_opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n\n\n let mut text = \"\".to_owned();\n\n stdin().read_to_string(&mut text)?;\n\n\n\n let mut lt = HashMap::new();\n\n\n\n let chars = str_utils::simple_words_pattern();\n\n for word in chars.find_iter(&text) {\n\n let word = word.as_str();\n\n if let Ok(lem) = lt.entry(word).or_insert_with(|| dic.lemmatize(&word.to_lowercase())) {\n\n println!(\"{}\", lem);\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/command/words.rs", "rank": 79, "score": 97940.44368989547 }, { "content": "pub fn lemmas<T: AsRef<Path>>(opt: Opt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n let keys = dic.keys()?;\n\n let keys: HashSet<&String> = keys.iter().collect();\n\n let keys: Vec<&String> = keys.into_iter().collect();\n\n let mut keys: Vec<&String> = keys.into_iter().filter(|it| !opt.only_words || it.find(' ') == None).collect();\n\n keys.sort();\n\n for key in keys {\n\n println!(\"{}\", key);\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/command/lemmas.rs", "rank": 80, "score": 97940.44368989547 }, { "content": "pub fn lookup<T: AsRef<Path>>(opt: LookupOpt, dictionary_path: &T) -> AppResultU {\n\n let mut dic = Dictionary::new(dictionary_path);\n\n lookup_and_print(&mut dic, &opt.word, opt.color, opt.n, opt.correction, true)\n\n}\n\n\n", "file_path": "src/command/lookup.rs", "rank": 81, "score": 96280.82801338451 }, { "content": "pub fn shell<T: AsRef<Path>>(opt: ShellOpt, dictionary_path: &T) -> AppResultU {\n\n let config = rustyline::config::Builder::new()\n\n .auto_add_history(true)\n\n .build();\n\n let mut editor = rustyline::Editor::<()>::with_config(config);\n\n let history_path = get_history_path()?;\n\n if history_path.exists() {\n\n editor.load_history(&history_path)?;\n\n }\n\n\n\n let mut dic = Dictionary::new(dictionary_path);\n\n let prompt = opt.prompt.unwrap_or_else(|| DEFAULT_PROMPT.to_owned());\n\n loop {\n\n match editor.readline(&prompt) {\n\n Ok(ref input) => {\n\n let input = input.trim();\n\n if input.is_empty() {\n\n continue;\n\n }\n\n lookup_and_print(&mut dic, input, true, None, true, true)?;\n", "file_path": "src/command/lookup.rs", "rank": 82, "score": 96280.82801338451 }, { "content": "fn analyze_only_given_level<F>(common: &Common, name: &str, valid_level: F, minimum: Option<usize>) -> AppResultU\n\nwhere F: Fn(Level) -> bool {\n\n println!(\"{}:\", name);\n\n let mut words: Vec<&Word> = common.words.iter()\n\n .filter(|it| valid_level(it.level))\n\n .filter(|it| 2 < it.word.len())\n\n .collect();\n\n words.sort_by(|a, b| {\n\n let c1 = b.count.cmp(&a.count);\n\n if c1 == Ordering::Equal {\n\n return a.word.cmp(&b.word);\n\n }\n\n c1\n\n });\n\n let width = (words.len() as f64).log(10.0) as usize + 1;\n\n let mut results = 0;\n\n for word in words {\n\n if let Some(min) = minimum {\n\n if word.count < min {\n\n continue;\n", "file_path": "src/command/analyze.rs", "rank": 83, "score": 89471.88060642072 }, { "content": "fn guess<T: AsRef<Path>>(source_path: &T) -> Result<DictionaryFormat, AppError> {\n\n let mut file = File::open(source_path)?;\n\n let mut head = [0u8;100];\n\n let size = file.read(&mut head)?;\n\n let head = &head[0..size];\n\n\n\n if head.starts_with(b\" / This book describes Jpan and its kaisha at the cutting edge.\") {\n\n return Ok(DictionaryFormat::Gene)\n\n }\n\n\n\n // 81a1 == ■\n\n if head.starts_with(b\"\\x81\\xa1\") {\n\n return Ok(DictionaryFormat::Eijiro);\n\n }\n\n\n\n if head.contains(&b'\\t') {\n\n return Ok(DictionaryFormat::Ejdic);\n\n }\n\n\n\n if head.contains(&b',') {\n\n return Ok(DictionaryFormat::Csv)\n\n }\n\n\n\n Err(AppError::Eitaro(\"Unknown format\"))\n\n}\n", "file_path": "src/command/builder.rs", "rank": 86, "score": 79774.99139907336 }, { "content": "fn stat(connection: &SqliteConnection) -> AppResult<Stat> {\n\n // FIXME\n\n let words = diesel_query!(definitions [Q R] {\n\n d::definitions\n\n .select(d::term)\n\n .distinct()\n\n .load::<String>(connection)\n\n })?.len();\n\n let aliases = diesel_query!(aliases [Q R] {\n\n use diesel::dsl::count;\n\n d::aliases\n\n .select(count(d::id))\n\n .first::<i64>(connection)\n\n })? as usize;\n\n\n\n Ok(Stat { aliases, words })\n\n}\n\n\n", "file_path": "src/dictionary.rs", "rank": 87, "score": 70411.78301114099 }, { "content": "fn analyze_svl(common: &Common) -> AppResultU {\n\n fn pct(v: usize, total: usize) -> f64 {\n\n v as f64 / total as f64 * 100.0\n\n }\n\n\n\n let mut unique_counts = HashMap::<Level, usize>::new();\n\n let mut unique_total = 0;\n\n let mut cumulative_counts = HashMap::<Level, usize>::new();\n\n let mut cumulative_total = 0;\n\n\n\n for word in &common.words {\n\n if word.word.len() <= 3 {\n\n continue;\n\n }\n\n\n\n let unique_count = unique_counts.entry(word.level).or_default();\n\n *unique_count += 1;\n\n unique_total += 1;\n\n\n\n let cumulative_count = cumulative_counts.entry(word.level).or_default();\n", "file_path": "src/command/analyze.rs", "rank": 88, "score": 69780.10571888497 }, { "content": "-- This file should undo anything in `up.sql`\n\nDROP INDEX definitions_term_index;\n", "file_path": "migrations/2020-02-17-054821_create_definitions/down.sql", "rank": 89, "score": 68574.171300057 }, { "content": "#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]\n\nenum Level {\n\n Leveled(u8),\n\n OutOf,\n\n NotInDictionary,\n\n}\n\n\n", "file_path": "src/command/analyze.rs", "rank": 90, "score": 65457.72484742971 }, { "content": "fn _main<T: AsRef<Path>>(dictionary_path: &T) -> AppResultU {\n\n use self::Command::*;\n\n\n\n let opt = Opt::from_args();\n\n\n\n if let Some(command) = opt.command {\n\n match command {\n\n Analyze(opt) =>\n\n command::analyze::analyze(opt, dictionary_path),\n\n Build(opt) =>\n\n command::builder::build_dictionary(opt, dictionary_path),\n\n Completions(opt) =>\n\n command::completions::generate(opt, Opt::clap()),\n\n Database(opt) =>\n\n command::database::shell(opt, dictionary_path),\n\n Export(opt) =>\n\n command::export::export(opt, dictionary_path),\n\n Html(opt) =>\n\n command::html::lookup(opt, dictionary_path),\n\n Lemmas(opt) =>\n", "file_path": "src/main.rs", "rank": 91, "score": 64037.53309535516 }, { "content": "CREATE INDEX tags_tag_index ON tags(tag);\n", "file_path": "migrations/2020-02-18-065155_create_tags/up.sql", "rank": 92, "score": 62204.960265651935 }, { "content": "CREATE INDEX levels_level_index ON levels(level);\n", "file_path": "migrations/2020-02-17-072411_create_levels/up.sql", "rank": 93, "score": 62204.960265651935 }, { "content": "CREATE INDEX definitions_term_index ON definitions(term);\n", "file_path": "migrations/2020-02-17-054821_create_definitions/up.sql", "rank": 94, "score": 62204.960265651935 }, { "content": "CREATE INDEX lemmatizations_source_index ON lemmatizations(source);\n", "file_path": "migrations/2020-02-17-071809_create_lemmatizations/up.sql", "rank": 95, "score": 62204.960265651935 }, { "content": "CREATE INDEX tags_term_index ON tags(term);\n", "file_path": "migrations/2020-02-18-065155_create_tags/up.sql", "rank": 96, "score": 62204.960265651935 }, { "content": "CREATE INDEX aliases_key_index ON aliases(source);\n", "file_path": "migrations/2020-02-17-070434_create_aliases/up.sql", "rank": 97, "score": 62204.960265651935 }, { "content": "fn connect_events(window: gtk::Window, scroller: &gtk::ScrolledWindow, entry: gtk::Entry, dictionary_path: PathBuf, tx: SyncSender<Option<Vec<Entry>>>) {\n\n let delay = Delay::new(Duration::from_millis(250));\n\n\n\n window.connect_delete_event(|_, _| {\n\n exit(0);\n\n });\n\n\n\n entry.connect_key_press_event(clone_army!([window, scroller] move |entry, ev| {\n\n let empty = entry.get_text().map(|it| it.is_empty()).unwrap_or(true);\n\n let key = to_key_string(&ev);\n\n match &*key {\n\n \"Return\" | \"Escape\" => {\n\n if empty {\n\n entry.hide();\n\n window.set_focus(Some(&scroller));\n\n } else {\n\n entry.set_text(\"\");\n\n }\n\n return Inhibit(true);\n\n },\n", "file_path": "src/screen/gui.rs", "rank": 98, "score": 61299.22042714102 }, { "content": "CREATE INDEX definitions_term_index ON definitions(term);\n", "file_path": "migrations/2020-02-20-235758_add_text_column_to_definitions/down.sql", "rank": 99, "score": 60664.51406251129 } ]
Rust
src/day3.rs
arturh85/adventofcode-rust-2021
dddcdb3901fec5fce6d317c0b12ff79d44e4f3bf
use bitlab::*; #[aoc_generator(day3)] fn parse_input(input: &str) -> Vec<u32> { input .lines() .map(|line| u32::from_str_radix(line, 2).unwrap()) .collect() } #[aoc(day3, part1)] fn part1(input: &[u32]) -> u64 { gamma(input) as u64 * epsilon(input) as u64 } #[aoc(day3, part2)] fn part2(input: &[u32]) -> u64 { oxygen(input) as u64 * co2(input) as u64 } fn oxygen(input: &[u32]) -> u32 { let max = *input.iter().max().unwrap(); let start = significant_bitcount(max).unwrap(); let mut remaining = Vec::from(input); for i in start..32 { let (count_zeros, count_ones) = count_ones_zeros_at(&remaining, i); if count_ones >= count_zeros { remaining = remaining .into_iter() .filter(|v| v.get_bit(i).unwrap()) .collect(); } else { remaining = remaining .into_iter() .filter(|v| !v.get_bit(i).unwrap()) .collect(); } if remaining.len() == 1 { return remaining[0]; } } 0 } fn co2(input: &[u32]) -> u32 { let max = *input.iter().max().unwrap(); let start = significant_bitcount(max).unwrap(); let mut remaining = Vec::from(input); for i in start..32 { let (count_zeros, count_ones) = count_ones_zeros_at(&remaining, i); if count_zeros <= count_ones { remaining = remaining .into_iter() .filter(|v| !v.get_bit(i).unwrap()) .collect(); } else { remaining = remaining .into_iter() .filter(|v| v.get_bit(i).unwrap()) .collect(); } if remaining.len() == 1 { return remaining[0]; } } 0 } fn count_ones_zeros_at(input: &[u32], pos: u32) -> (u32, u32) { let mut count_zeros = 0; let mut count_ones = 0; for v in input { if v.get_bit(pos).unwrap() { count_ones += 1; } else { count_zeros += 1; } } (count_zeros, count_ones) } fn gamma(input: &[u32]) -> u32 { let mut ret = 0; for i in 0..32 { let (count_zeros, count_ones) = count_ones_zeros_at(input, i); if count_ones >= count_zeros { ret = ret.set_bit(i).unwrap(); } } ret } fn significant_bitcount(value: u32) -> Option<u32> { for i in 0..32 { if value.get_bit(i).unwrap() { return Some(i); } } None } fn epsilon(input: &[u32]) -> u32 { let gamma = gamma(input); let max = *input.iter().max().unwrap(); let start = significant_bitcount(max).unwrap(); let mut ret = 0; for i in start..32 { if !gamma.get_bit(i).unwrap() { ret = ret.set_bit(i).unwrap() } } ret } #[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "00100 11110 10110 10111 10101 01111 00111 11100 10000 11001 00010 01010"; #[test] fn part1_examples() { assert_eq!(gamma(&parse_input(EXAMPLE)), 0b1_0110); assert_eq!(gamma(&parse_input(EXAMPLE)), 22); assert_eq!(epsilon(&parse_input(EXAMPLE)), 0b1001); assert_eq!(epsilon(&parse_input(EXAMPLE)), 9); } #[test] fn part2_examples() { assert_eq!(oxygen(&parse_input(EXAMPLE)), 0b1_0111); assert_eq!(oxygen(&parse_input(EXAMPLE)), 23); assert_eq!(co2(&parse_input(EXAMPLE)), 0b0_1010); assert_eq!(co2(&parse_input(EXAMPLE)), 10); } }
use bitlab::*; #[aoc_generator(day3)] fn parse_input(input: &str) -> Vec<u32> { input .lines() .map(|line| u32::from_str_radix(line, 2).unwrap()) .collect() } #[aoc(day3, part1)] fn part1(input: &[u32]) -> u64 { gamma(input) as u64 * epsilon(input) as u64 } #[aoc(day3, part2)] fn part2(input: &[u32]) -> u64 { oxygen(input) as u64 * co2(input) as u64 } fn oxygen(input: &[u32]) -> u32 { let max = *input.iter().max().unwrap(); let start = significant_bitcount(max).unwrap(); let mut remaining = Vec::from(input); for i in start..32 { let (count_zeros, count_ones) = count_ones_zeros_at(&remaining, i); if count_ones >= count_zeros { remaining = remaining .into_iter() .filter(|v| v.get_bit(i).unwrap()) .collect(); } else { remaining = remaining .into_iter() .filter(|v| !v.get_bit(i).unwrap()) .collect(); } if remaining.len() == 1 { return remaining[0]; } } 0 } fn co2(input: &[u32]) -> u32 { let max = *input.iter().max().unwrap(); let start = significant_bitcount(max).unwrap(); let mut remaining = Vec::from(input); for i in start..32 { let (count_zeros, count_ones) = count_ones_zeros_at(&remaining, i); if count_zeros <= count_ones { remaining = remaining .into_iter() .filter(|v| !v.get_bit(i).unwrap()) .collect(); } else { remaining = remaining .into_iter() .filter(|v| v.get_bit(i).unwrap()) .collect(); } if remaining.len() == 1 { return remaining[0]; } } 0 } fn count_ones_zeros_at(input: &[u32], pos: u32) -> (u32, u32) { let mut count_zeros = 0; let mut count_ones = 0; for v in input { if v.get_bit(pos).unwrap() { count_ones += 1; } else { count_zeros += 1; } } (count_zeros, count_ones) } fn gamma(input: &[u32]) -> u32 { let mut ret = 0; for i in 0..32 { let (count_zeros, count_ones) = count_ones_zeros_at(input, i); if count_ones >= count_zeros { ret = ret.set_bit(i).unwrap(); } } ret } fn significant_bitcount(value: u32) -> Option<u32> { for i in 0..32 { if value.get_bit(i).unwrap() { return Some(i); } } None }
#[cfg(test)] mod tests { use super::*; const EXAMPLE: &str = "00100 11110 10110 10111 10101 01111 00111 11100 10000 11001 00010 01010"; #[test] fn part1_examples() { assert_eq!(gamma(&parse_input(EXAMPLE)), 0b1_0110); assert_eq!(gamma(&parse_input(EXAMPLE)), 22); assert_eq!(epsilon(&parse_input(EXAMPLE)), 0b1001); assert_eq!(epsilon(&parse_input(EXAMPLE)), 9); } #[test] fn part2_examples() { assert_eq!(oxygen(&parse_input(EXAMPLE)), 0b1_0111); assert_eq!(oxygen(&parse_input(EXAMPLE)), 23); assert_eq!(co2(&parse_input(EXAMPLE)), 0b0_1010); assert_eq!(co2(&parse_input(EXAMPLE)), 10); } }
fn epsilon(input: &[u32]) -> u32 { let gamma = gamma(input); let max = *input.iter().max().unwrap(); let start = significant_bitcount(max).unwrap(); let mut ret = 0; for i in start..32 { if !gamma.get_bit(i).unwrap() { ret = ret.set_bit(i).unwrap() } } ret }
function_block-full_function
[ { "content": "#[aoc(day7, part2)]\n\nfn part2(input: &[u32]) -> u32 {\n\n (0..input.len())\n\n .map(|target| fuel2(input, target as u32))\n\n .min()\n\n .unwrap()\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 2, "score": 218867.65510229085 }, { "content": "#[aoc(day7, part1)]\n\nfn part1(input: &[u32]) -> u32 {\n\n (0..input.len())\n\n .map(|target| fuel1(input, target as u32))\n\n .min()\n\n .unwrap()\n\n}\n\n\n\n/// Part 2:\n\n/// Determine the horizontal position that the crabs can align to using the least fuel possible so\n\n/// they can make you an escape route!\n\n/// How much fuel must they spend to align to that position?\n", "file_path": "src/day7.rs", "rank": 3, "score": 218777.23005528972 }, { "content": "#[aoc(day5, part2)]\n\nfn part2(input: &[Line]) -> usize {\n\n let grid = build_grid(input);\n\n grid.iter().filter(|v| **v >= 2).count()\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 4, "score": 203577.8666416348 }, { "content": "#[aoc(day5, part1)]\n\nfn part1(input: &[Line]) -> usize {\n\n let non_diagonals: Vec<Line> = Vec::from(input)\n\n .into_iter()\n\n .filter(|line| !line.is_diagonal())\n\n .collect();\n\n let grid = build_grid(&non_diagonals);\n\n grid.iter().filter(|v| **v >= 2).count()\n\n}\n\n\n\n/// Part 2: At how many points do at least two lines overlap?\n", "file_path": "src/day5.rs", "rank": 5, "score": 203487.44159463368 }, { "content": "#[aoc(day6, part2)]\n\nfn part2(input: &[u8]) -> u64 {\n\n evolve(input, 256)\n\n}\n\n\n", "file_path": "src/day6.rs", "rank": 6, "score": 202798.97193566273 }, { "content": "#[aoc(day6, part1)]\n\nfn part1(input: &[u8]) -> u64 {\n\n evolve(input, 80)\n\n}\n\n\n\n/// Part 2: How many lanternfish would there be after 256 days?\n", "file_path": "src/day6.rs", "rank": 7, "score": 202708.54688866163 }, { "content": "#[aoc(day10, part2)]\n\nfn part2(input: &str) -> usize {\n\n let mut scores: Vec<usize> = input\n\n .lines()\n\n .map(incomplete_score)\n\n .filter(|score| *score > 0)\n\n .collect();\n\n scores.sort_unstable();\n\n scores[scores.len() / 2]\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 8, "score": 202002.65588025522 }, { "content": "#[aoc(dayX, part2)]\n\nfn part2(input: &str) -> usize {\n\n todo!();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n assert_eq!(0, part1(&parse_input(EXAMPLE)));\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n assert_eq!(0, part2(&parse_input(EXAMPLE)));\n\n }\n\n}\n", "file_path": "src/_day0.rs", "rank": 9, "score": 202002.58248601964 }, { "content": "#[aoc(day10, part1)]\n\nfn part1(input: &str) -> usize {\n\n input.lines().map(syntax_error_score).sum()\n\n}\n\n\n\n/// Part 2:\n\n/// Find the completion string for each incomplete line, score the completion strings, and sort\n\n/// the scores. What is the middle score?\n", "file_path": "src/day10.rs", "rank": 10, "score": 201912.2308332541 }, { "content": "#[aoc(dayX, part1)]\n\nfn part1(input: &str) -> usize {\n\n todo!();\n\n}\n\n\n\n/// Part 2\n", "file_path": "src/_day0.rs", "rank": 11, "score": 201912.1576943284 }, { "content": "#[aoc_generator(day5)]\n\nfn parse_input(input: &str) -> Vec<Line> {\n\n input.lines().map(Line::parse).collect()\n\n}\n\n\n\n/// Part 1: At how many points do at least two lines overlap?\n", "file_path": "src/day5.rs", "rank": 12, "score": 199126.0418733291 }, { "content": "#[aoc_generator(day7)]\n\nfn parse_input(input: &str) -> Vec<u32> {\n\n input.split(',').map(|line| line.parse().unwrap()).collect()\n\n}\n\n\n\n/// Part 1:\n\n/// Determine the horizontal position that the crabs can align to using the least fuel possible.\n\n/// How much fuel must they spend to align to that position?\n", "file_path": "src/day7.rs", "rank": 14, "score": 198561.8300832179 }, { "content": "#[aoc_generator(day15)]\n\nfn parse_input(input: &str) -> Array2<u64> {\n\n parse_array2(input)\n\n}\n\n\n\n/// Part 1: What is the lowest total risk of any path from the top left to the bottom right?\n", "file_path": "src/day15.rs", "rank": 15, "score": 198387.89555464737 }, { "content": "#[aoc(day8, part2)]\n\nfn part2(input: &[(Vec<String>, Vec<String>)]) -> u64 {\n\n input\n\n .iter()\n\n .map(|(unique_signal_patterns, value)| decode(unique_signal_patterns, value))\n\n .sum()\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 16, "score": 179401.57314952815 }, { "content": "#[aoc_generator(day13)]\n\nfn parse_input(input: &str) -> Input {\n\n let re = Regex::new(r\"^fold along (?P<x_or_y>[xy])=(?P<value>\\d+)$\").unwrap();\n\n let mut dots = Vec::new();\n\n let mut folds = Vec::new();\n\n let mut width = 0;\n\n let mut height = 0;\n\n for line in input.lines() {\n\n if let Some(matches) = re.captures(line) {\n\n let value = matches.name(\"value\").unwrap().as_str().parse().unwrap();\n\n match matches.name(\"x_or_y\").unwrap().as_str() {\n\n \"x\" => folds.push(Fold::FoldLeft(value)),\n\n \"y\" => folds.push(Fold::FoldUp(value)),\n\n _ => panic!(\"should not happen\"),\n\n }\n\n } else if !line.is_empty() {\n\n let parts: Vec<usize> = line.split(',').map(|c| c.parse().unwrap()).collect();\n\n let x = parts[0];\n\n let y = parts[1];\n\n dots.push((x, y));\n\n if y > height {\n", "file_path": "src/day13.rs", "rank": 21, "score": 163005.74563274844 }, { "content": "#[aoc(day13, part2)]\n\nfn part2(input: &Input) -> String {\n\n let mut grid = input.grid.clone();\n\n for fold in &input.folds {\n\n grid = execute_fold(&grid, fold);\n\n }\n\n println!(\"{}\", grid_str(&grid));\n\n \"BCZRCEAB\".into()\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 22, "score": 162710.89218812983 }, { "content": "#[aoc(day13, part1)]\n\nfn part1(input: &Input) -> usize {\n\n let mut grid = input.grid.clone();\n\n if let Some(fold) = input.folds.get(0) {\n\n grid = execute_fold(&grid, fold);\n\n }\n\n grid.map(|f| if *f { 1 } else { 0 }).sum()\n\n}\n\n\n\n/// Part 2: What code do you use to activate the infrared thermal imaging camera system?\n", "file_path": "src/day13.rs", "rank": 23, "score": 162620.46714112876 }, { "content": "#[aoc(day15, part2)]\n\nfn part2(grid: &Array2<u64>) -> u64 {\n\n let (height, width) = shape2(grid);\n\n let mut full_grid: Array2<u64> = Array2::zeros((height * 5, width * 5));\n\n for (y, rows) in grid.rows().into_iter().enumerate() {\n\n for (x, col) in rows.iter().enumerate() {\n\n for ny in 0..5usize {\n\n for nx in 0..5usize {\n\n let mut val = *col as usize + nx + ny;\n\n while val > 9 {\n\n val -= 9;\n\n }\n\n full_grid[(ny * height + y, nx * width + x)] = val as u64;\n\n }\n\n }\n\n }\n\n }\n\n part1(&full_grid)\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/day15.rs", "rank": 24, "score": 161954.43870698934 }, { "content": "#[aoc(day15, part1)]\n\nfn part1(grid: &Array2<u64>) -> u64 {\n\n let (graph, node_grid) = array2_to_graph4(grid);\n\n let (height, width) = shape2(grid);\n\n let start: NodeIndex = node_grid[(0, 0)];\n\n let goal: NodeIndex = node_grid[(height - 1, width - 1)];\n\n let (cost, _) = petgraph::algo::astar(\n\n &graph,\n\n start,\n\n |finish| finish == goal,\n\n |e| *e.weight(),\n\n |_| 0,\n\n )\n\n .unwrap();\n\n cost\n\n}\n\n\n\n/// Part 2: Using the full map, what is the lowest total risk of any path from the\n\n/// top left to the bottom right?\n", "file_path": "src/day15.rs", "rank": 25, "score": 161868.08110224723 }, { "content": "fn fuel1(input: &[u32], target: u32) -> u32 {\n\n let mut fuel = 0;\n\n for v in input {\n\n fuel += abs_diff(*v, target);\n\n }\n\n fuel\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 26, "score": 160857.12354762905 }, { "content": "fn fuel2(input: &[u32], target: u32) -> u32 {\n\n let mut fuel = 0;\n\n for v in input {\n\n fuel += triangle_number(abs_diff(*v, target));\n\n }\n\n fuel\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 27, "score": 160857.12354762905 }, { "content": "#[aoc_generator(dayX)]\n\nfn parse_input(input: &str) -> String {\n\n input.to_string()\n\n}\n\n\n\n/// Part 1\n", "file_path": "src/_day0.rs", "rank": 29, "score": 152679.67288515987 }, { "content": "#[aoc_generator(day14)]\n\nfn parse_input(input: &str) -> Polymer {\n\n let mut start = String::new();\n\n let mut rules: Vec<(char, char, char)> = Vec::new();\n\n\n\n for (idx, line) in input.lines().enumerate() {\n\n match idx {\n\n 0 => start = line.to_string(),\n\n 1 => {}\n\n _ => {\n\n let parts: Vec<String> = line.split(\" -> \").map(|c| c.to_string()).collect();\n\n rules.push((\n\n parts[0].chars().next().unwrap(),\n\n parts[0].chars().nth(1).unwrap(),\n\n parts[1].chars().next().unwrap(),\n\n ));\n\n }\n\n }\n\n }\n\n Polymer { rules, start }\n\n}\n\n\n\n/// Part 1: What do you get if you take the quantity of the most common element and subtract the\n\n/// quantity of the least common element?\n", "file_path": "src/day14.rs", "rank": 30, "score": 152679.67288515987 }, { "content": "fn evolve(input: &[u8], days: u64) -> u64 {\n\n let mut count = [0u64; 9];\n\n for i in input {\n\n count[*i as usize] += 1;\n\n }\n\n for _ in 0..days {\n\n let growth = count[0];\n\n for i in 1..=8 {\n\n count[i - 1] = count[i];\n\n }\n\n count[8] = growth;\n\n count[6] += growth;\n\n }\n\n count.iter().sum()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/day6.rs", "rank": 31, "score": 152416.4984977417 }, { "content": "#[aoc_generator(day4)]\n\nfn parse_input(input: &str) -> BingoGame {\n\n let mut boards = Vec::new();\n\n let mut numbers = Vec::new();\n\n let mut buffer = String::new();\n\n const HEADER_LINES: usize = 2;\n\n const FOOTER_LINES: usize = 1;\n\n for (nr, line) in input.lines().enumerate() {\n\n if nr == 0 {\n\n numbers = line.split(',').map(|c| c.parse().unwrap()).collect();\n\n } else if nr >= HEADER_LINES {\n\n buffer += &*line.replace(\" \", \" \").trim();\n\n buffer += \"\\n\";\n\n if (nr - HEADER_LINES) % (BOARD_SIZE + FOOTER_LINES) == BOARD_SIZE {\n\n boards.push(BingoBoard::new(&buffer));\n\n buffer = String::new();\n\n }\n\n }\n\n }\n\n boards.push(BingoBoard::new(&buffer));\n\n\n\n BingoGame { boards, numbers }\n\n}\n\n\n\n/// Part 1: Figure out which board will win first. What will your final score be if you choose that board?\n", "file_path": "src/day4.rs", "rank": 32, "score": 149799.591756984 }, { "content": "#[aoc(day4, part2)]\n\nfn part2(game: &BingoGame) -> u64 {\n\n game.play_win_last().expect(\"no winner\")\n\n}\n\n\n\nconst BOARD_SIZE: usize = 5;\n\n\n", "file_path": "src/day4.rs", "rank": 33, "score": 148831.64440058963 }, { "content": "#[aoc(day4, part1)]\n\nfn part1(game: &BingoGame) -> u64 {\n\n game.play_win_first().expect(\"no winner\")\n\n}\n\n\n\n/// Part 2: Figure out which board will win last. Once it wins, what would its final score be?\n", "file_path": "src/day4.rs", "rank": 34, "score": 148741.9755124342 }, { "content": "/// Did you know that autocomplete tools also have contests? It's true! The score is determined by\n\n/// considering the completion string character-by-character. Start with a total score of `0`.\n\n/// Then, for each character, multiply the total score by 5 and then increase the total score by\n\n/// the point value given for the character in the following table:\n\n///\n\n/// - `)`: `1` point.\n\n/// - `]`: `2` points.\n\n/// - `}`: `3` points.\n\n/// - `>`: `4` points.\n\nfn incomplete_score(line: &str) -> usize {\n\n match check_line(line) {\n\n Ok(complete) => match complete {\n\n LineStatus::Complete => 0,\n\n LineStatus::Incomplete(stack) => {\n\n let mut score = 0;\n\n for c in stack.iter().rev() {\n\n score *= 5;\n\n score += match c {\n\n ')' => 1,\n\n ']' => 2,\n\n '}' => 3,\n\n '>' => 4,\n\n _ => 0,\n\n }\n\n }\n\n score\n\n }\n\n },\n\n Err(_) => 0,\n\n }\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 35, "score": 148542.01085358852 }, { "content": "#[aoc(day2, part2)]\n\nfn part2(input: &[Instr]) -> i64 {\n\n let (x, y) = execute2(input);\n\n x * y\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 36, "score": 147999.49866602602 }, { "content": "#[aoc(day14, part2)]\n\nfn part2(input: &Polymer) -> usize {\n\n let frequencies = evolve2(input, 40);\n\n let min = *frequencies.values().min().unwrap();\n\n let max = *frequencies.values().max().unwrap();\n\n max - min\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 37, "score": 147999.49866602602 }, { "content": "#[aoc(day1, part2)]\n\nfn part2(input: &[i64]) -> usize {\n\n let threesome_sum: Vec<i64> = input.windows(3).map(|n| n.iter().sum()).collect();\n\n count_increases(&threesome_sum)\n\n}\n\n\n", "file_path": "src/day1.rs", "rank": 38, "score": 147999.49866602602 }, { "content": "#[aoc(day2, part1)]\n\nfn part1(input: &[Instr]) -> i64 {\n\n let (x, y) = execute1(input);\n\n x * y\n\n}\n\n\n\n/// Part 2:\n\n/// Using this new interpretation of the commands, calculate the horizontal position and depth\n\n/// you would have after following the planned course.\n\n/// What do you get if you multiply your final horizontal position by your final depth?\n", "file_path": "src/day2.rs", "rank": 39, "score": 147909.07361902492 }, { "content": "#[aoc(day14, part1)]\n\nfn part1(input: &Polymer) -> usize {\n\n let str = evolve1(input, 10);\n\n let frequencies = map_char_frequences(&str);\n\n let min = *frequencies.values().min().unwrap();\n\n let max = *frequencies.values().max().unwrap();\n\n max - min\n\n}\n\n\n\n/// Part 2\n", "file_path": "src/day14.rs", "rank": 40, "score": 147909.07361902492 }, { "content": "#[aoc(day1, part1)]\n\nfn part1(input: &[i64]) -> usize {\n\n count_increases(input)\n\n}\n\n\n\n/// Part 2: How many sums are larger than the previous sum?\n", "file_path": "src/day1.rs", "rank": 41, "score": 147909.07361902492 }, { "content": "#[aoc_generator(day9)]\n\nfn parse_input(input: &str) -> Array2<u8> {\n\n parse_array2(input)\n\n}\n\n\n\n/// Part 1: What is the sum of the risk levels of all low points on your heightmap?\n", "file_path": "src/day9.rs", "rank": 42, "score": 146916.8686208667 }, { "content": "#[aoc_generator(day1)]\n\nfn parse_input(input: &str) -> Vec<i64> {\n\n input.lines().map(|line| line.parse().unwrap()).collect()\n\n}\n\n\n\n/// Part 1: How many measurements are larger than the previous measurement?\n", "file_path": "src/day1.rs", "rank": 43, "score": 146916.8686208667 }, { "content": "#[aoc_generator(day2)]\n\nfn parse_input(input: &str) -> Vec<Instr> {\n\n input\n\n .lines()\n\n .map(|line| {\n\n let parts: Vec<&str> = line.split(' ').collect();\n\n match parts[0] {\n\n \"forward\" => Instr::Forward(parts[1].parse().unwrap()),\n\n \"up\" => Instr::Up(parts[1].parse().unwrap()),\n\n \"down\" => Instr::Down(parts[1].parse().unwrap()),\n\n _ => {\n\n panic!(\"invalid format\")\n\n }\n\n }\n\n })\n\n .collect()\n\n}\n\n\n\n/// Part 1:\n\n/// Calculate the horizontal position and depth you would have after following the planned course.\n\n/// What do you get if you multiply your final horizontal position by your final depth?\n", "file_path": "src/day2.rs", "rank": 44, "score": 146916.8686208667 }, { "content": "#[aoc_generator(day11)]\n\nfn parse_input(input: &str) -> Array2<u8> {\n\n parse_array2(input)\n\n}\n\n\n\n/// Part 1: Given the starting energy levels of the dumbo octopuses in your cavern,\n\n/// simulate 100 steps. How many total flashes are there after 100 steps?\n", "file_path": "src/day11.rs", "rank": 45, "score": 146916.8686208667 }, { "content": "#[aoc_generator(day6)]\n\nfn parse_input(input: &str) -> Vec<u8> {\n\n input.split(',').map(|line| line.parse().unwrap()).collect()\n\n}\n\n\n\n/// Part 1: How many lanternfish would there be after 80 days?\n", "file_path": "src/day6.rs", "rank": 46, "score": 146916.8686208667 }, { "content": "#[aoc(day9, part2)]\n\nfn part2(grid: &Array2<u8>) -> u64 {\n\n let mut basins = find_basins(grid);\n\n basins.sort_unstable();\n\n basins.reverse();\n\n basins[0..3].iter().product()\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 47, "score": 145753.14412159266 }, { "content": "#[aoc(day9, part1)]\n\nfn part1(grid: &Array2<u8>) -> u64 {\n\n find_low_points(grid).iter().sum()\n\n}\n\n\n\n/// Part 2: What do you get if you multiply together the sizes of the three largest basins?\n", "file_path": "src/day9.rs", "rank": 48, "score": 145666.78651685055 }, { "content": "/// To calculate the syntax error score for a line, take\n\n/// the first illegal character on the line and look it up in the following table:\n\n/// - `)`: `3` points.\n\n/// - `]`: `57` points.\n\n/// - `}`: `1197` points.\n\n/// - `>`: `25137` points.\n\nfn syntax_error_score(line: &str) -> usize {\n\n match check_line(line) {\n\n Ok(_) => 0,\n\n Err((_, found)) => match found {\n\n ')' => 3,\n\n ']' => 57,\n\n '}' => 1197,\n\n '>' => 25137,\n\n _ => 0,\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 49, "score": 145520.68457185212 }, { "content": "#[aoc_generator(day12)]\n\nfn parse_input(input: &str) -> Graph<CaveNode, ()> {\n\n let mut graph = Graph::new();\n\n let mut node_by_name: HashMap<String, NodeIndex> = HashMap::new();\n\n for line in input.lines() {\n\n let parts: Vec<&str> = line.split('-').collect();\n\n let from_name = parts[0];\n\n let to_name = parts[1];\n\n if !node_by_name.contains_key(from_name) {\n\n let idx = graph.add_node(CaveNode::parse(from_name));\n\n node_by_name.insert(from_name.to_string(), idx);\n\n }\n\n if !node_by_name.contains_key(to_name) {\n\n let idx = graph.add_node(CaveNode::parse(to_name));\n\n node_by_name.insert(to_name.to_string(), idx);\n\n }\n\n let from_idx = node_by_name[from_name];\n\n let to_idx = node_by_name[to_name];\n\n graph.add_edge(from_idx, to_idx, ());\n\n graph.add_edge(to_idx, from_idx, ());\n\n }\n\n graph\n\n}\n\n\n\n/// Part 1: How many paths through this cave system are there that visit small caves at most once?\n", "file_path": "src/day12.rs", "rank": 50, "score": 144218.3254671421 }, { "content": "fn check_line(line: &str) -> Result<LineStatus, (char, char)> {\n\n let mut stack: Vec<char> = Vec::new();\n\n for char in line.chars() {\n\n match char {\n\n '(' => stack.push(')'),\n\n '[' => stack.push(']'),\n\n '{' => stack.push('}'),\n\n '<' => stack.push('>'),\n\n ')' | ']' | '}' | '>' => {\n\n if let Some(last) = stack.last() {\n\n if char == *last {\n\n stack.pop();\n\n } else {\n\n return Err((*last, char));\n\n }\n\n } else {\n\n return Err(('?', char));\n\n }\n\n }\n\n _ => {\n", "file_path": "src/day10.rs", "rank": 51, "score": 143866.93432887743 }, { "content": "#[aoc(day11, part2)]\n\nfn part2(input: &Array2<u8>) -> usize {\n\n let mut step = 0;\n\n let mut state = (*input).clone();\n\n loop {\n\n let (_, new_state) = evolve(&state, 1);\n\n state = new_state;\n\n step += 1;\n\n if state.iter().map(|s| *s as u64).sum::<u64>() == 0 {\n\n break;\n\n }\n\n }\n\n\n\n step\n\n}\n\n\n", "file_path": "src/day11.rs", "rank": 52, "score": 141844.26037515816 }, { "content": "#[aoc(day11, part1)]\n\nfn part1(input: &Array2<u8>) -> usize {\n\n let (flashes, _grid) = evolve(input, 100);\n\n flashes\n\n}\n\n\n\n/// Part 2: What is the first step during which all octopuses flash?\n", "file_path": "src/day11.rs", "rank": 53, "score": 141757.90277041603 }, { "content": "#[aoc(day12, part1)]\n\nfn part1(input: &Graph<CaveNode, ()>) -> usize {\n\n build_paths(input, 1).len()\n\n}\n\n\n\n// /// Part 2: Given these new rules, how many paths through this cave system are there?\n\n// #[aoc(day12, part2)]\n\n// fn part2(input: &Graph<CaveNode, ()>) -> usize {\n\n// build_paths(input, 2).len()\n\n// }\n\n\n", "file_path": "src/day12.rs", "rank": 54, "score": 138759.37422963942 }, { "content": "#[aoc_generator(day8)]\n\nfn parse_input(input: &str) -> Vec<(Vec<String>, Vec<String>)> {\n\n input\n\n .lines()\n\n .map(|line| {\n\n let parts: Vec<&str> = line.split(\" | \").collect();\n\n (\n\n parts[0].split(' ').map(sort).collect(),\n\n parts[1].split(' ').map(sort).collect(),\n\n )\n\n })\n\n .collect()\n\n}\n\n\n\n/// Part 1: In the output values, how many times do digits `1`, `4`, `7`, or `8` appear?\n", "file_path": "src/day8.rs", "rank": 55, "score": 132078.99744108977 }, { "content": "#[aoc(day8, part1)]\n\nfn part1(input: &[(Vec<String>, Vec<String>)]) -> usize {\n\n let mut cnt = 0;\n\n // 1: 2 segments\n\n // 7: 3 segments\n\n // 4: 4 segments\n\n // 8: 7 segments\n\n for (_, outputs) in input {\n\n for output in outputs {\n\n match output.len() {\n\n 2 | 3 | 4 | 7 => cnt += 1,\n\n _ => {}\n\n }\n\n }\n\n }\n\n cnt\n\n}\n\n\n\n/// Part 2: What do you get if you add up all of the output values?\n", "file_path": "src/day8.rs", "rank": 56, "score": 130887.83054299577 }, { "content": "fn explore_basin(grid: &Array2<u8>, start: (usize, usize)) -> u64 {\n\n let mut already_visited = vec![start];\n\n let mut to_explore = get_neighbors4(grid, &start);\n\n let mut to_add: Vec<(usize, usize)> = Vec::new();\n\n\n\n while !to_explore.is_empty() {\n\n for (y, x) in &to_explore {\n\n let pos = (*y, *x);\n\n if grid[(*y, *x)] != 9 && !already_visited.contains(&pos) && !to_add.contains(&pos) {\n\n to_add.append(&mut get_neighbors4(grid, &pos));\n\n already_visited.push(pos);\n\n }\n\n }\n\n to_explore = to_add;\n\n to_add = Vec::new();\n\n }\n\n\n\n already_visited.len() as u64\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 57, "score": 128123.90817477088 }, { "content": "/// parses ascii based grids into Array2\n\npub fn parse_array2<T>(input: &str) -> Array2<T>\n\nwhere\n\n T: FromStr + Default + Debug,\n\n <T as FromStr>::Err: Debug,\n\n{\n\n let mut grid: Array2<T> =\n\n Array2::default((input.lines().count(), input.lines().next().unwrap().len()));\n\n for (y, line) in input.lines().enumerate() {\n\n for (x, digit) in line.chars().enumerate() {\n\n let val = digit.to_string().parse().unwrap();\n\n grid[(y, x)] = val;\n\n }\n\n }\n\n grid\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 58, "score": 127431.75253887453 }, { "content": "fn map_char_frequences(input: &str) -> HashMap<char, usize> {\n\n let mut map = HashMap::new();\n\n for char in input.chars() {\n\n map.entry(char).or_insert(0);\n\n *map.get_mut(&char).unwrap() += 1;\n\n }\n\n map\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 59, "score": 127405.59380692136 }, { "content": "// named after nightly-only u32::abs_diff:\n\n// https://doc.rust-lang.org/std/primitive.u32.html#method.abs_diff\n\nfn abs_diff(a: u32, b: u32) -> u32 {\n\n if a > b {\n\n a - b\n\n } else {\n\n b - a\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"16,1,2,0,4,2,7,1,2,14\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n // This costs a total of `37` fuel. This is the cheapest possible outcome\n\n assert_eq!(part1(&parse_input(EXAMPLE)), 37);\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n // This costs a total of `168` fuel. This is the new cheapest possible outcome\n\n assert_eq!(part2(&parse_input(EXAMPLE)), 168);\n\n }\n\n}\n", "file_path": "src/day7.rs", "rank": 60, "score": 115959.72980494016 }, { "content": "// https://en.wikipedia.org/wiki/Triangular_number\n\nfn triangle_number(n: u32) -> u32 {\n\n n * (n + 1) / 2\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 61, "score": 113085.28862136383 }, { "content": "// returns true if every char of needles if present in the haystack\n\nfn string_contains_chars(haystack: &str, needles: &str) -> bool {\n\n for c in needles.chars() {\n\n if !haystack.contains(c) {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 63, "score": 101384.70122234127 }, { "content": "fn grid_size(lines: &[Line]) -> (usize, usize) {\n\n let mut cols = 0;\n\n let mut rows = 0;\n\n\n\n for line in lines {\n\n if line.begin.x > cols {\n\n cols = line.begin.x;\n\n }\n\n if line.end.x > cols {\n\n cols = line.end.x;\n\n }\n\n if line.begin.y > rows {\n\n rows = line.begin.y;\n\n }\n\n if line.end.y > rows {\n\n rows = line.end.y;\n\n }\n\n }\n\n\n\n (cols + 1, rows + 1)\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 64, "score": 101112.68728348508 }, { "content": "fn build_grid(lines: &[Line]) -> Grid<u8> {\n\n let (cols, rows) = grid_size(lines);\n\n let mut grid = Grid::new(rows, cols);\n\n for line in lines {\n\n for point in line.build_points() {\n\n grid[point.y][point.x] += 1;\n\n }\n\n }\n\n grid\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"0,9 -> 5,9\n\n8,0 -> 0,8\n\n9,4 -> 3,4\n\n2,2 -> 2,1\n\n7,0 -> 7,4\n", "file_path": "src/day5.rs", "rank": 65, "score": 101112.68728348508 }, { "content": "fn sort(s: &str) -> String {\n\n let mut chars: Vec<char> = s.chars().collect();\n\n chars.sort_by(|a, b| b.cmp(a));\n\n String::from_iter(chars)\n\n}\n\n\n", "file_path": "src/day8.rs", "rank": 66, "score": 98541.29807370127 }, { "content": "fn find_basins(grid: &Array2<u8>) -> Vec<u64> {\n\n let mut basins = Vec::new();\n\n for (y, row) in grid.rows().into_iter().enumerate() {\n\n for (x, col) in row.iter().enumerate() {\n\n let neighbors = get_neighbors4(grid, &(y, x));\n\n let mut higher_found = false;\n\n for (ny, nx) in &neighbors {\n\n if *col >= grid[(*ny, *nx)] {\n\n higher_found = true;\n\n break;\n\n }\n\n }\n\n if higher_found {\n\n continue;\n\n }\n\n basins.push(explore_basin(grid, (y, x)));\n\n }\n\n }\n\n basins\n\n}\n", "file_path": "src/day9.rs", "rank": 67, "score": 87430.69610511909 }, { "content": "fn find_low_points(grid: &Array2<u8>) -> Vec<u64> {\n\n let mut lows = Vec::new();\n\n for (y, row) in grid.rows().into_iter().enumerate() {\n\n for (x, col) in row.iter().enumerate() {\n\n let neighbors = get_neighbors4(grid, &(y, x));\n\n let mut higher_found = false;\n\n for (ny, nx) in &neighbors {\n\n if *col >= grid[(*ny, *nx)] {\n\n higher_found = true;\n\n break;\n\n }\n\n }\n\n if higher_found {\n\n continue;\n\n }\n\n lows.push((*col + 1) as u64)\n\n }\n\n }\n\n lows\n\n}\n\n\n", "file_path": "src/day9.rs", "rank": 68, "score": 85698.79568598318 }, { "content": "fn decode(unique_signal_patterns: &[String], value: &[String]) -> u64 {\n\n let map = build_map(unique_signal_patterns);\n\n let mut output = String::new();\n\n for digit in value {\n\n output += &*map.get(digit).unwrap().to_string();\n\n }\n\n output.parse().unwrap()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str =\n\n \"be cfbegad cbdgef fgaecd cgeb fdcge agebfd fecdb fabcd edb | fdgacbe cefdb cefbgd gcbe\n\nedbfga begcd cbg gc gcadebf fbgde acbgfd abcde gfcbed gfec | fcgedb cgb dgebacf gc\n\nfgaebd cg bdaec gdafb agbcfd gdcbef bgcad gfac gcb cdgabef | cg cg fdcagb cbg\n\nfbegcd cbd adcefb dageb afcb bc aefdc ecdab fgdeca fcdbega | efabcd cedba gadfec cb\n\naecbfdg fbg gf bafeg dbefa fcge gcbea fcaegb dgceab fcbdga | gecf egdcabf bgf bfgea\n\nfgeab ca afcebg bdacfeg cfaedg gcfdb baec bfadeg bafgc acf | gebdcfa ecba ca fadegcb\n", "file_path": "src/day8.rs", "rank": 69, "score": 85698.79568598318 }, { "content": "fn evolve1(input: &Polymer, steps: usize) -> String {\n\n let mut state: Vec<char> = input.start.chars().collect();\n\n for _step in 0..steps {\n\n let mut next_state = state.clone();\n\n let mut idx = 1;\n\n let mut inserts: Vec<(usize, char)> = Vec::new();\n\n for w in state.windows(2) {\n\n for rule in &input.rules {\n\n if w[0] == rule.0 && w[1] == rule.1 {\n\n inserts.push((idx, rule.2));\n\n }\n\n }\n\n idx += 1;\n\n }\n\n inserts.reverse();\n\n for (idx, char) in &inserts {\n\n next_state.insert(*idx, *char);\n\n }\n\n\n\n state = next_state\n\n }\n\n String::from_iter(state)\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 70, "score": 85548.70790609092 }, { "content": "fn build_map(unique_signal_patterns: &[String]) -> HashMap<String, u64> {\n\n let mut output = HashMap::new();\n\n\n\n let signal1 = unique_signal_patterns\n\n .iter()\n\n .find(|s| s.len() == 2)\n\n .expect(\"failed to find 1\");\n\n output.insert(signal1.to_string(), 1); // ab: 1\n\n\n\n let signal7 = unique_signal_patterns\n\n .iter()\n\n .find(|s| s.len() == 3)\n\n .expect(\"failed to find 7\");\n\n output.insert(signal7.to_string(), 7); // dab: 7\n\n\n\n let signal4 = unique_signal_patterns\n\n .iter()\n\n .find(|s| s.len() == 4)\n\n .expect(\"failed to find 4\");\n\n output.insert(signal4.to_string(), 4); // eafb: 4\n", "file_path": "src/day8.rs", "rank": 71, "score": 82564.01647631256 }, { "content": "fn evolve2(input: &Polymer, steps: usize) -> HashMap<char, usize> {\n\n let mut frequencies = map_char_frequences(&input.start);\n\n let chars: Vec<char> = input.start.chars().collect();\n\n let windows: Vec<(char, char)> = chars.windows(2).map(|w| (w[0], w[1])).collect();\n\n let mut window_map: HashMap<(char, char), usize> = HashMap::new();\n\n for w in windows {\n\n window_map.entry(w).or_insert(0);\n\n *window_map.get_mut(&w).unwrap() += 1;\n\n }\n\n for _ in 0..steps {\n\n let mut next_state = window_map.clone();\n\n\n\n for (a, b, c) in &input.rules {\n\n // AB -> C\n\n let ab = (*a, *b);\n\n let ac = (*a, *c);\n\n let cb = (*c, *b);\n\n if window_map.contains_key(&ab) && window_map[&ab] > 0 {\n\n let cnt = window_map[&ab];\n\n *next_state.get_mut(&ab).unwrap() -= cnt;\n", "file_path": "src/day14.rs", "rank": 72, "score": 77673.02294130801 }, { "content": "/// Counts how many values in collection increase compared to previous\n\nfn count_increases(collection: &[i64]) -> usize {\n\n collection\n\n .windows(2)\n\n .filter(|window| window[1] > window[0])\n\n .count()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"199\n\n200\n\n208\n\n210\n\n200\n\n207\n\n240\n\n269\n\n260\n", "file_path": "src/day1.rs", "rank": 73, "score": 69813.34618815793 }, { "content": "fn grid_str(grid: &Array2<bool>) -> String {\n\n let mut output = String::new();\n\n for row in grid.rows() {\n\n let mut line = String::new();\n\n for col in row {\n\n if *col {\n\n line += \"#\";\n\n } else {\n\n line += \".\";\n\n }\n\n }\n\n output += &line;\n\n output += \"\\n\";\n\n }\n\n output.trim().to_string()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/day13.rs", "rank": 74, "score": 66635.69701791224 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Line {\n\n begin: Point,\n\n end: Point,\n\n}\n\n\n\nimpl Line {\n\n fn parse(input: &str) -> Line {\n\n let parts: Vec<&str> = input.split(\" -> \").collect();\n\n Line {\n\n begin: Point::parse(parts[0]),\n\n end: Point::parse(parts[1]),\n\n }\n\n }\n\n\n\n fn is_diagonal(&self) -> bool {\n\n !(self.begin.x == self.end.x || self.begin.y == self.end.y)\n\n }\n\n\n\n fn build_points(&self) -> Vec<Point> {\n\n let mut arr = Vec::new();\n", "file_path": "src/day5.rs", "rank": 75, "score": 64033.68651959562 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Input {\n\n grid: Array2<bool>,\n\n folds: Vec<Fold>,\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 76, "score": 58349.944115446575 }, { "content": "fn build_paths(graph: &Graph<CaveNode, ()>, max_small_caves: u8) -> Vec<Vec<NodeIndex>> {\n\n let mut paths = Vec::new();\n\n let history = Arc::new(RefCell::new(Vec::new()));\n\n\n\n for node_idx in graph.node_indices() {\n\n if let CaveNode::Start = graph.node_weight(node_idx).unwrap() {\n\n for edge in graph.edges(node_idx) {\n\n let target_idx = edge.target();\n\n let small_caves = HashMap::new();\n\n paths.append(&mut decend(\n\n graph,\n\n history.clone(),\n\n small_caves,\n\n max_small_caves,\n\n vec![node_idx, target_idx],\n\n ));\n\n }\n\n\n\n // let mut dfs = Dfs::new(&graph, node_idx);\n\n // while let Some(nx) = dfs.next(&graph) {}\n", "file_path": "src/day12.rs", "rank": 77, "score": 52941.14227575198 }, { "content": "fn decend(\n\n graph: &Graph<CaveNode, ()>,\n\n visited: Arc<RefCell<Vec<Vec<NodeIndex>>>>,\n\n mut small_caves: HashMap<NodeIndex, u8>,\n\n max_small_caves: u8,\n\n path: Vec<NodeIndex>,\n\n) -> Vec<Vec<NodeIndex>> {\n\n let mut paths = Vec::new();\n\n let last_idx = path[path.len() - 1];\n\n let last_node = graph.node_weight(last_idx).unwrap();\n\n if let CaveNode::SmallCave(_) = last_node {\n\n small_caves.entry(last_idx).or_insert(0);\n\n *small_caves.get_mut(&last_idx).unwrap() += 1;\n\n }\n\n for edge in graph.edges(last_idx) {\n\n let target_idx = edge.target();\n\n let target_node = graph.node_weight(target_idx).unwrap();\n\n\n\n match target_node {\n\n CaveNode::Start => {}\n", "file_path": "src/day12.rs", "rank": 78, "score": 48418.96658709609 }, { "content": "fn execute2(instructions: &[Instr]) -> (i64, i64) {\n\n let mut pos = (0, 0);\n\n let mut aim = 0;\n\n for instr in instructions {\n\n match instr {\n\n Instr::Forward(amount) => {\n\n pos.0 += amount;\n\n pos.1 += aim * amount;\n\n }\n\n Instr::Up(amount) => aim -= amount,\n\n Instr::Down(amount) => aim += amount,\n\n }\n\n }\n\n pos\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 79, "score": 40492.58517526435 }, { "content": "fn execute1(instructions: &[Instr]) -> (i64, i64) {\n\n let mut pos = (0, 0);\n\n for instr in instructions {\n\n match instr {\n\n Instr::Forward(amount) => pos.0 += amount,\n\n Instr::Up(amount) => pos.1 -= amount,\n\n Instr::Down(amount) => pos.1 += amount,\n\n }\n\n }\n\n pos\n\n}\n\n\n", "file_path": "src/day2.rs", "rank": 80, "score": 40492.58517526435 }, { "content": "fn range(a: usize, b: usize) -> Vec<usize> {\n\n if a < b {\n\n (a..=b).collect()\n\n } else {\n\n (b..=a).rev().collect()\n\n }\n\n}\n\n\n", "file_path": "src/day5.rs", "rank": 81, "score": 39311.87727352992 }, { "content": "/// shape of given array2 as tuple\n\npub fn shape2<T>(grid: &Array2<T>) -> (usize, usize) {\n\n let shape = grid.shape();\n\n (shape[0], shape[1])\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 82, "score": 36079.188377682374 }, { "content": "fn execute_fold(grid: &Array2<bool>, fold: &Fold) -> Array2<bool> {\n\n match *fold {\n\n Fold::FoldUp(fold_y) => {\n\n let shape = grid.shape();\n\n let mut new_grid: Array2<bool> = Array2::default((fold_y, shape[1]));\n\n for y in 0..fold_y {\n\n for x in 0..shape[1] {\n\n let mirror_y = fold_y + (fold_y - y);\n\n let mirror_side = if mirror_y < shape[0] {\n\n grid[(mirror_y, x)]\n\n } else {\n\n false\n\n };\n\n new_grid[(y, x)] = grid[(y, x)] || mirror_side;\n\n }\n\n }\n\n new_grid\n\n }\n\n Fold::FoldLeft(fold_x) => {\n\n let shape = grid.shape();\n", "file_path": "src/day13.rs", "rank": 83, "score": 35292.43482222625 }, { "content": "/// You can model the energy levels and flashes of light in steps. During a single step,\n\n/// the following occurs:\n\n///\n\n/// - First, the energy level of each octopus increases by `1`.\n\n/// - Then, any octopus with an energy level greater than `9` flashes. This increases the energy\n\n/// level of all adjacent octopuses by `1`, including octopuses that are diagonally adjacent.\n\n/// If this causes an octopus to have an energy level greater than `9`, it also flashes.\n\n/// This process continues as long as new octopuses keep having their energy level\n\n/// increased beyond `9`. (An octopus can only flash at most once per step.)\n\n/// - Finally, any octopus that flashed during this step has its energy level set to `0`,\n\n/// as it used all of its energy to flash.\n\n///\n\n/// Adjacent flashes can cause an octopus to flash on a step even if it begins that\n\n/// step with very little energy.\n\nfn evolve(energy_map: &Array2<u8>, steps: usize) -> (usize, Array2<u8>) {\n\n let mut state = (*energy_map).clone();\n\n let mut flashes = 0;\n\n for _ in 0..steps {\n\n let shape = energy_map.shape();\n\n let mut next_state = Array2::zeros((shape[0], shape[1]));\n\n let mut step_flashes: Vec<(usize, usize)> = Vec::new();\n\n for (y, row) in state.rows().into_iter().enumerate() {\n\n for (x, col) in row.iter().enumerate() {\n\n if *col < 9 {\n\n next_state[(y, x)] = *col + 1;\n\n } else {\n\n next_state[(y, x)] = 0;\n\n step_flashes.push((y, x));\n\n }\n\n }\n\n }\n\n let mut todo_flashes = step_flashes.clone();\n\n while !todo_flashes.is_empty() {\n\n let flash = todo_flashes.pop().unwrap();\n", "file_path": "src/day11.rs", "rank": 84, "score": 34080.869235937964 }, { "content": "#[derive(Debug, Clone, PartialEq)]\n\nenum LineStatus {\n\n Complete,\n\n Incomplete(Vec<char>),\n\n}\n\n\n", "file_path": "src/day10.rs", "rank": 85, "score": 32087.8284516903 }, { "content": "/// gets 8 neighbor positions of given pos in grid\n\npub fn get_neighbors8(grid: &Array2<u8>, pos: &(usize, usize)) -> Vec<(usize, usize)> {\n\n let (height, width) = shape2(grid);\n\n let mut list = get_neighbors4(grid, pos);\n\n // up/left\n\n if pos.0 > 0 && pos.1 > 0 {\n\n list.push((pos.0 - 1, pos.1 - 1));\n\n }\n\n // up/right\n\n if pos.0 > 0 && pos.1 < width - 1 {\n\n list.push((pos.0 - 1, pos.1 + 1));\n\n }\n\n // down/right\n\n if pos.0 < height - 1 && pos.1 > 0 {\n\n list.push((pos.0 + 1, pos.1 - 1));\n\n }\n\n // down/left\n\n if pos.0 < height - 1 && pos.1 < width - 1 {\n\n list.push((pos.0 + 1, pos.1 + 1));\n\n }\n\n list\n\n}\n", "file_path": "src/util.rs", "rank": 86, "score": 31889.413911826858 }, { "content": "/// gets 4 neighbor positions of given pos in grid\n\npub fn get_neighbors4<T>(grid: &Array2<T>, pos: &(usize, usize)) -> Vec<(usize, usize)> {\n\n let (height, width) = shape2(grid);\n\n let mut list = Vec::new();\n\n // up\n\n if pos.0 > 0 {\n\n list.push((pos.0 - 1, pos.1));\n\n }\n\n // right\n\n if pos.1 > 0 {\n\n list.push((pos.0, pos.1 - 1));\n\n }\n\n // down\n\n if pos.0 < height - 1 {\n\n list.push((pos.0 + 1, pos.1));\n\n }\n\n // left\n\n if pos.1 < width - 1 {\n\n list.push((pos.0, pos.1 + 1));\n\n }\n\n list\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 87, "score": 30897.75485428229 }, { "content": "/// converts given Array2 to a Graph\n\npub fn array2_to_graph4<T: Clone>(grid: &Array2<T>) -> (Graph<T, T>, Array2<NodeIndex>) {\n\n let mut graph: Graph<T, T> = Graph::new();\n\n let (height, width) = shape2(grid);\n\n let mut node_grid: Array2<NodeIndex> = Array2::default((height, width));\n\n for (y, row) in grid.rows().into_iter().enumerate() {\n\n for (x, col) in row.iter().enumerate() {\n\n node_grid[(y, x)] = graph.add_node(col.clone());\n\n }\n\n }\n\n for (y, row) in grid.rows().into_iter().enumerate() {\n\n for (x, col) in row.iter().enumerate() {\n\n for (ny, nx) in get_neighbors4(grid, &(y, x)) {\n\n let from_node = node_grid[(y, x)];\n\n let to_node = node_grid[(ny, nx)];\n\n graph.add_edge(from_node, to_node, grid[(ny, nx)].clone());\n\n graph.add_edge(to_node, from_node, col.clone());\n\n }\n\n }\n\n }\n\n (graph, node_grid)\n\n}\n\n\n", "file_path": "src/util.rs", "rank": 88, "score": 30319.319684308386 }, { "content": "// use super::*;\n\n//\n\n// const EXAMPLE: &str = \"\";\n\n//\n\n// #[test]\n\n// fn part1_examples() {\n\n// assert_eq!(0, part1(&parse_input(EXAMPLE)));\n\n// }\n\n//\n\n// // #[test]\n\n// // fn part2_examples() {\n\n// // assert_eq!(0, part2(&parse_input(EXAMPLE)));\n\n// // }\n\n// }\n", "file_path": "src/day16.rs", "rank": 89, "score": 16.85151784196904 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"1163751742\n\n1381373672\n\n2136511328\n\n3694931569\n\n7463417111\n\n1319128137\n\n1359912421\n\n3125421639\n\n1293138521\n\n2311944581\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n assert_eq!(40, part1(&parse_input(EXAMPLE)));\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n assert_eq!(315, part2(&parse_input(EXAMPLE)));\n\n }\n\n}\n", "file_path": "src/day15.rs", "rank": 90, "score": 16.494317750433705 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const EXAMPLE: &str = \"2199943210\n\n3987894921\n\n9856789892\n\n8767896789\n\n9899965678\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n assert_eq!(part1(&parse_input(EXAMPLE)), 15);\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n assert_eq!(part2(&parse_input(EXAMPLE)), 1134);\n\n }\n\n}\n", "file_path": "src/day9.rs", "rank": 91, "score": 16.152560358241256 }, { "content": "// /// Part 1: what do you get if you add up the version numbers in all packets?\n\n// #[aoc(day16, part1)]\n\n// fn part1(_input: &Transmission) -> usize {\n\n// todo!();\n\n// }\n\n//\n\n// // fn decode(hex: &str) -> u64 {\n\n// // // D2FE28\n\n// // }\n\n//\n\n// /// Part 2\n\n// // #[aoc(day16, part2)]\n\n// // fn part2(input: &str) -> usize {\n\n// // todo!();\n\n// // }\n\n//\n\n// struct Transmission {}\n\n//\n\n// #[cfg(test)]\n\n// mod tests {\n", "file_path": "src/day16.rs", "rank": 92, "score": 15.913512289028711 }, { "content": "6,4 -> 2,0\n\n0,9 -> 2,9\n\n3,4 -> 1,4\n\n0,0 -> 8,8\n\n5,5 -> 8,2\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n // In the above example, this is anywhere in the diagram with a `2` or\n\n // larger - a total of `5` points.\n\n let lines = parse_input(EXAMPLE);\n\n assert_eq!(part1(&lines), 5);\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n // In the above example, this is still anywhere in the diagram with a `2` or larger - now a\n\n // total of `12` points.\n\n let lines = parse_input(EXAMPLE);\n\n assert_eq!(part2(&lines), 12);\n\n }\n\n}\n", "file_path": "src/day5.rs", "rank": 93, "score": 13.98986338639478 }, { "content": " assert_eq!(folded1.shape(), [7, 11]);\n\n assert_eq!(grid_str(&folded1), EXAMPLE_GRID_FOLD_UP);\n\n assert_eq!(part1(&input), 17);\n\n let folded2 = execute_fold(&folded1, &Fold::FoldLeft(5));\n\n assert_eq!(folded2.shape(), [7, 5]);\n\n assert_eq!(grid_str(&folded2), EXAMPLE_GRID_FOLD_LEFT);\n\n // println!(\"input {:?}\", input);\n\n // assert_eq!(0, part1(&input));\n\n }\n\n\n\n // #[test]\n\n // fn part2_examples() {\n\n // assert_eq!(0, part2(&parse_input(EXAMPLE)));\n\n // }\n\n}\n", "file_path": "src/day13.rs", "rank": 94, "score": 12.767095109292416 }, { "content": " const EXAMPLE: &str = \"start-A\n\nstart-b\n\nA-c\n\nA-b\n\nb-d\n\nA-end\n\nb-end\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n let expected_paths = vec![\n\n \"start,A,b,A,c,A,end\",\n\n \"start,A,b,A,end\",\n\n \"start,A,b,end\",\n\n \"start,A,c,A,b,A,end\",\n\n \"start,A,c,A,b,end\",\n\n \"start,A,c,A,end\",\n\n \"start,A,end\",\n\n \"start,b,A,c,A,end\",\n\n \"start,b,A,end\",\n", "file_path": "src/day12.rs", "rank": 95, "score": 10.952360521194883 }, { "content": "263\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n // In this example, there are `7` measurements that are larger than the previous measurement.\n\n assert_eq!(part1(&parse_input(EXAMPLE)), 7);\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n // In this example, there are `5` sums that are larger than the previous sum.\n\n assert_eq!(part2(&parse_input(EXAMPLE)), 5);\n\n }\n\n}\n", "file_path": "src/day1.rs", "rank": 96, "score": 10.563923975205341 }, { "content": " // After step 4: NBBNBNBBCCNBCNCCNBBNBBNBBBNBBNBBCBHCBHHNHCBBCBHCB\n\n assert_eq!(\n\n \"NBBNBNBBCCNBCNCCNBBNBBNBBBNBBNBBCBHCBHHNHCBBCBHCB\",\n\n evolve1(&parse_input(EXAMPLE), 4)\n\n );\n\n assert_eq!(1588, part1(&parse_input(EXAMPLE)));\n\n }\n\n\n\n #[test]\n\n fn part2_examples() {\n\n assert_eq!(2188189693529, part2(&parse_input(EXAMPLE)));\n\n }\n\n}\n", "file_path": "src/day14.rs", "rank": 97, "score": 10.494940460498468 }, { "content": " fn play(&mut self, n: u8) -> Option<u64> {\n\n for x in 0..BOARD_SIZE {\n\n for y in 0..BOARD_SIZE {\n\n if self.values[y][x] == n {\n\n self.marked[y][x] = true;\n\n if self.has_win() {\n\n return Some(self.score());\n\n }\n\n }\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn score(&self) -> u64 {\n\n let mut score = 0;\n\n for x in 0..BOARD_SIZE {\n\n for y in 0..BOARD_SIZE {\n\n if !self.marked[y][x] {\n\n score += self.values[y][x] as u64\n", "file_path": "src/day4.rs", "rank": 98, "score": 10.476639000418643 }, { "content": "#...#......\n\n......#...#\n\n#...#......\n\n.#.#..#.###\n\n...........\n\n...........\";\n\n const EXAMPLE_GRID_FOLD_LEFT: &str = \"#####\n\n#...#\n\n#...#\n\n#...#\n\n#####\n\n.....\n\n.....\";\n\n\n\n #[test]\n\n fn part1_examples() {\n\n let input = parse_input(EXAMPLE);\n\n assert_eq!(grid_str(&input.grid), EXAMPLE_GRID);\n\n assert_eq!(input.grid.shape(), [15, 11]);\n\n let folded1 = execute_fold(&input.grid, &Fold::FoldUp(7));\n", "file_path": "src/day13.rs", "rank": 99, "score": 10.26230912210411 } ]
Rust
src/rng/splitmix64simd.rs
tommyettinger/heh
a2a52c8ebd692d3e74222579f4f8ca04f4256b5d
use rand_core::{Error, RngCore, SeedableRng}; use rand_core::block::{BlockRngCore, BlockRng}; use faster::Transmute; use faster::vecs::{u64x4}; use byteorder::{LittleEndian, ByteOrder}; use super::Linnorm64; #[allow(missing_copy_implementations)] #[derive(Debug, Clone)] pub struct SplitMix64x4Core { x: u64x4, } impl SplitMix64x4Core { #[inline] pub fn next_u64x4(&mut self) -> u64x4 { const INC : u64x4 = u64x4::new(0xabdcdadb7e86b08bu64, 0x575bdce3dd69b537u64, 0x765ff07dee64eac9u64, 0x9e3779b97f4a7c15u64); const A_MUL : u64x4 = u64x4::new(0xbf58476d1ce4e5b9u64, 0xbf58476d1ce4e5b9u64, 0xbf58476d1ce4e5b9u64, 0xbf58476d1ce4e5b9u64); const B_MUL : u64x4 = u64x4::new(0x94d049bb133111ebu64, 0x94d049bb133111ebu64, 0x94d049bb133111ebu64, 0x94d049bb133111ebu64); self.x += INC; let mut z = self.x; z = (z ^ (z >> 30)) * A_MUL; z = (z ^ (z >> 27)) * B_MUL; z ^ (z >> 31) } #[inline] pub fn from_seed_u64(seed: u64) -> SplitMix64x4Core { let mut rng = Linnorm64::from_seed_u64(seed); SplitMix64x4Core::from_seed(SplitMix64x4Seed::from_rng(&mut rng)) } } pub struct SplitMix64x4Seed([u8; 32]); impl SplitMix64x4Seed { #[inline] pub fn new(seed: [u8; 32]) -> SplitMix64x4Seed { SplitMix64x4Seed(seed) } pub fn from_rng<R: RngCore>(rng: &mut R) -> SplitMix64x4Seed { let mut seed = [0; 32]; rng.fill_bytes(&mut seed); SplitMix64x4Seed(seed) } } impl ::std::convert::AsMut<[u8]> for SplitMix64x4Seed { fn as_mut(&mut self) -> &mut [u8] { &mut self.0 } } impl ::std::default::Default for SplitMix64x4Seed { fn default() -> SplitMix64x4Seed { SplitMix64x4Seed([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) } } impl SeedableRng for SplitMix64x4Core { type Seed = SplitMix64x4Seed; #[inline] fn from_seed(seed: SplitMix64x4Seed) -> SplitMix64x4Core { let seed = seed.0; SplitMix64x4Core { x: u64x4::new( LittleEndian::read_u64(&seed[0..8]), LittleEndian::read_u64(&seed[8..16]), LittleEndian::read_u64(&seed[16..24]), LittleEndian::read_u64(&seed[24..32]), ), } } } impl BlockRngCore for SplitMix64x4Core { type Item = u32; type Results = [u32; 8]; #[inline] fn generate(&mut self, results: &mut Self::Results) { let r = self.next_u64x4().be_u32s(); r.store(results, 0); } } #[derive(Clone, Debug)] pub struct SplitMix64x4(BlockRng<SplitMix64x4Core>); impl SplitMix64x4 { #[inline] pub fn from_seed_u64(seed: u64) -> Self { SplitMix64x4(BlockRng::<SplitMix64x4Core>::new(SplitMix64x4Core::from_seed_u64(seed))) } } impl RngCore for SplitMix64x4 { #[inline(always)] fn next_u32(&mut self) -> u32 { self.0.next_u32() } #[inline(always)] fn next_u64(&mut self) -> u64 { self.0.next_u64() } #[inline] fn fill_bytes(&mut self, dest: &mut [u8]) { self.0.fill_bytes(dest); } #[inline] fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } impl SeedableRng for SplitMix64x4 { type Seed = <SplitMix64x4Core as SeedableRng>::Seed; fn from_seed(seed: Self::Seed) -> Self { SplitMix64x4(BlockRng::<SplitMix64x4Core>::from_seed(seed)) } fn from_rng<R: RngCore>(rng: R) -> Result<Self, Error> { BlockRng::<SplitMix64x4Core>::from_rng(rng).map(|rng| SplitMix64x4(rng)) } } #[test] fn test_vs_non_simd() { use ::rand_core::SeedableRng; use super::SplitMix64; let mut seed = [0; 32]; LittleEndian::write_u64(&mut seed[0..8], 0); LittleEndian::write_u64(&mut seed[8..16], 1); LittleEndian::write_u64(&mut seed[16..24], 2); LittleEndian::write_u64(&mut seed[24..32], 3); let mut rng_simd = SplitMix64x4Core::from_seed( SplitMix64x4Seed::new(seed)); fn splitmix_from_slice(slice: &[u8]) -> SplitMix64 { let mut seed = [0; 8]; for (x, y) in slice.iter().zip(seed.iter_mut()) { *y = *x; } SplitMix64::from_seed(seed) } let mut rngs = [ splitmix_from_slice(&seed[0..8]), splitmix_from_slice(&seed[8..16]), splitmix_from_slice(&seed[16..24]), splitmix_from_slice(&seed[24..32]), ]; let r_simd = rng_simd.next_u64x4(); let rs = [ rngs[0].next_u64(), rngs[1].next_u64(), rngs[2].next_u64(), rngs[3].next_u64(), ]; assert_eq!(r_simd.extract(0), rs[0]); assert_eq!(r_simd.extract(1), rs[1]); assert_eq!(r_simd.extract(2), rs[2]); assert_eq!(r_simd.extract(3), rs[3]); }
use rand_core::{Error, RngCore, SeedableRng}; use rand_core::block::{BlockRngCore, BlockRng}; use faster::Transmute; use faster::vecs::{u64x4}; use byteorder::{LittleEndian, ByteOrder}; use super::Linnorm64; #[allow(missing_copy_implementations)] #[derive(Debug, Clone)] pub struct SplitMix64x4Core { x: u64x4, } impl SplitMix64x4Core { #[inline]
z = (z ^ (z >> 30)) * A_MUL; z = (z ^ (z >> 27)) * B_MUL; z ^ (z >> 31) } #[inline] pub fn from_seed_u64(seed: u64) -> SplitMix64x4Core { let mut rng = Linnorm64::from_seed_u64(seed); SplitMix64x4Core::from_seed(SplitMix64x4Seed::from_rng(&mut rng)) } } pub struct SplitMix64x4Seed([u8; 32]); impl SplitMix64x4Seed { #[inline] pub fn new(seed: [u8; 32]) -> SplitMix64x4Seed { SplitMix64x4Seed(seed) } pub fn from_rng<R: RngCore>(rng: &mut R) -> SplitMix64x4Seed { let mut seed = [0; 32]; rng.fill_bytes(&mut seed); SplitMix64x4Seed(seed) } } impl ::std::convert::AsMut<[u8]> for SplitMix64x4Seed { fn as_mut(&mut self) -> &mut [u8] { &mut self.0 } } impl ::std::default::Default for SplitMix64x4Seed { fn default() -> SplitMix64x4Seed { SplitMix64x4Seed([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31]) } } impl SeedableRng for SplitMix64x4Core { type Seed = SplitMix64x4Seed; #[inline] fn from_seed(seed: SplitMix64x4Seed) -> SplitMix64x4Core { let seed = seed.0; SplitMix64x4Core { x: u64x4::new( LittleEndian::read_u64(&seed[0..8]), LittleEndian::read_u64(&seed[8..16]), LittleEndian::read_u64(&seed[16..24]), LittleEndian::read_u64(&seed[24..32]), ), } } } impl BlockRngCore for SplitMix64x4Core { type Item = u32; type Results = [u32; 8]; #[inline] fn generate(&mut self, results: &mut Self::Results) { let r = self.next_u64x4().be_u32s(); r.store(results, 0); } } #[derive(Clone, Debug)] pub struct SplitMix64x4(BlockRng<SplitMix64x4Core>); impl SplitMix64x4 { #[inline] pub fn from_seed_u64(seed: u64) -> Self { SplitMix64x4(BlockRng::<SplitMix64x4Core>::new(SplitMix64x4Core::from_seed_u64(seed))) } } impl RngCore for SplitMix64x4 { #[inline(always)] fn next_u32(&mut self) -> u32 { self.0.next_u32() } #[inline(always)] fn next_u64(&mut self) -> u64 { self.0.next_u64() } #[inline] fn fill_bytes(&mut self, dest: &mut [u8]) { self.0.fill_bytes(dest); } #[inline] fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> { self.0.try_fill_bytes(dest) } } impl SeedableRng for SplitMix64x4 { type Seed = <SplitMix64x4Core as SeedableRng>::Seed; fn from_seed(seed: Self::Seed) -> Self { SplitMix64x4(BlockRng::<SplitMix64x4Core>::from_seed(seed)) } fn from_rng<R: RngCore>(rng: R) -> Result<Self, Error> { BlockRng::<SplitMix64x4Core>::from_rng(rng).map(|rng| SplitMix64x4(rng)) } } #[test] fn test_vs_non_simd() { use ::rand_core::SeedableRng; use super::SplitMix64; let mut seed = [0; 32]; LittleEndian::write_u64(&mut seed[0..8], 0); LittleEndian::write_u64(&mut seed[8..16], 1); LittleEndian::write_u64(&mut seed[16..24], 2); LittleEndian::write_u64(&mut seed[24..32], 3); let mut rng_simd = SplitMix64x4Core::from_seed( SplitMix64x4Seed::new(seed)); fn splitmix_from_slice(slice: &[u8]) -> SplitMix64 { let mut seed = [0; 8]; for (x, y) in slice.iter().zip(seed.iter_mut()) { *y = *x; } SplitMix64::from_seed(seed) } let mut rngs = [ splitmix_from_slice(&seed[0..8]), splitmix_from_slice(&seed[8..16]), splitmix_from_slice(&seed[16..24]), splitmix_from_slice(&seed[24..32]), ]; let r_simd = rng_simd.next_u64x4(); let rs = [ rngs[0].next_u64(), rngs[1].next_u64(), rngs[2].next_u64(), rngs[3].next_u64(), ]; assert_eq!(r_simd.extract(0), rs[0]); assert_eq!(r_simd.extract(1), rs[1]); assert_eq!(r_simd.extract(2), rs[2]); assert_eq!(r_simd.extract(3), rs[3]); }
pub fn next_u64x4(&mut self) -> u64x4 { const INC : u64x4 = u64x4::new(0xabdcdadb7e86b08bu64, 0x575bdce3dd69b537u64, 0x765ff07dee64eac9u64, 0x9e3779b97f4a7c15u64); const A_MUL : u64x4 = u64x4::new(0xbf58476d1ce4e5b9u64, 0xbf58476d1ce4e5b9u64, 0xbf58476d1ce4e5b9u64, 0xbf58476d1ce4e5b9u64); const B_MUL : u64x4 = u64x4::new(0x94d049bb133111ebu64, 0x94d049bb133111ebu64, 0x94d049bb133111ebu64, 0x94d049bb133111ebu64); self.x += INC; let mut z = self.x;
function_block-random_span
[ { "content": "#[inline]\n\nfn rotate_left(x: u64x4, n: u32) -> u64x4 {\n\n // Protect against undefined behaviour for over-long bit shifts\n\n const BITS: u32 = 64;\n\n let n = n % BITS;\n\n (x << n) | (x >> ((BITS - n) % BITS))\n\n}\n\n\n\nimpl XoroShiro128x4Core {\n\n /// Return the next random `u64x4`.\n\n #[inline]\n\n pub fn next_u64x4(&mut self) -> u64x4 {\n\n let r = self.s0 + self.s1;\n\n self.s1 ^= self.s0;\n\n self.s0 = rotate_left(self.s0, 55) ^ self.s1 ^ (self.s1 << 14);\n\n self.s1 = rotate_left(self.s1, 36);\n\n r\n\n }\n\n\n\n /// Create a new `XoroShiro128x4Core`. This will use `SplitMix64` to fill the seed.\n\n #[inline]\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 0, "score": 50404.718017754996 }, { "content": "#[inline]\n\nfn rot(x: u32, k: u32) -> u32 {\n\n (x << k) | (x >> (32 - k))\n\n}\n\n\n\nimpl Rng for SmallPrng128 {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n let e = self.a.wrapping_sub(rot(self.b, 27));\n\n self.a = self.b ^ rot(self.c, 17);\n\n self.b = self.c.wrapping_add(self.d);\n\n self.c = self.d.wrapping_add(e);\n\n self.d = e.wrapping_add(self.a);\n\n self.d\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n", "file_path": "benches/smallprng.rs", "rank": 1, "score": 8668.060383925984 }, { "content": "extern crate rand;\n\nextern crate byteorder;\n\n\n\nuse self::rand::{Rng, SeedableRng, Rand};\n\nuse self::byteorder::{LittleEndian, ByteOrder};\n\n\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct Sfc64 {\n\n a: u64,\n\n b: u64,\n\n c: u64,\n\n counter: u64,\n\n}\n\n\n\nimpl Rng for Sfc64 {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n self.next_u64() as u32\n\n }\n", "file_path": "benches/sfc64.rs", "rank": 2, "score": 12.562992205273108 }, { "content": "extern crate aesni;\n\nextern crate byteorder;\n\nextern crate rand;\n\n\n\nuse self::rand::{RngCore, SeedableRng};\n\nuse self::aesni::Aes128;\n\nuse self::byteorder::{LittleEndian, ByteOrder};\n\n\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct AesRng {\n\n aes: Aes128,\n\n key: [u8; 16],\n\n}\n\n\n\nimpl RngCore for AesRng {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n self.next_u64() as u32\n\n }\n", "file_path": "benches/aes.rs", "rank": 3, "score": 12.112027958899857 }, { "content": "extern crate rand;\n\nextern crate byteorder;\n\n\n\nuse self::rand::{Rng, SeedableRng, Rand};\n\nuse self::byteorder::{LittleEndian, ByteOrder};\n\n\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct SmallPrng128 {\n\n a: u32,\n\n b: u32,\n\n c: u32,\n\n d: u32,\n\n}\n\n\n\n#[inline]\n", "file_path": "benches/smallprng.rs", "rank": 4, "score": 11.907907997249671 }, { "content": "use rand::{Rng, SeedableRng, Rand};\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\nuse xoroshiro::rng::SplitMix64;\n\n\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Clone)]\n\n/// 64-bit universal RNG by [Marsaglia and Tsang][1].\n\n///\n\n/// Generates floats directly; generating integers is inefficient.\n\n///\n\n/// [1]: https://doi.org/10.1016/j.spl.2003.11.001\n\npub struct Urng64 {\n\n u: [f64; 98],\n\n c: f64,\n\n i: usize,\n\n j: usize,\n\n}\n\n\n\nimpl Rng for Urng64 {\n", "file_path": "benches/urng.rs", "rank": 7, "score": 10.10759926379377 }, { "content": " }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct XoroShiro128x4(BlockRng<XoroShiro128x4Core>);\n\n\n\nimpl XoroShiro128x4 {\n\n /// Create a new `XoroShiro128x4`. This will use `SplitMix64` to fill the seed.\n\n #[inline]\n\n pub fn from_seed_u64(seed: u64) -> Self {\n\n XoroShiro128x4(BlockRng::<XoroShiro128x4Core>::new(XoroShiro128x4Core::from_seed_u64(seed)))\n\n }\n\n}\n\n\n\nimpl RngCore for XoroShiro128x4 {\n\n #[inline(always)]\n\n fn next_u32(&mut self) -> u32 {\n\n self.0.next_u32()\n\n }\n\n\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 8, "score": 10.04260949804299 }, { "content": "use rand_core::{Error, RngCore, SeedableRng};\n\nuse rand_core::block::{BlockRngCore, BlockRng};\n\nuse faster::Transmute;\n\nuse faster::vecs::{u64x4};\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\nuse super::SplitMix64;\n\n\n\n/// A linnorm64 random number generator using SIMD to generate 4 `u64` at a time.\n\n///\n\n/// The Linnorm algorithm is not suitable for cryptographic purposes, but\n\n/// is very fast and has better statistical properties than `XoroShiro128`. If\n\n/// you do not know for sure that it fits your requirements, use a more secure\n\n/// one such as `IsaacRng` or `OsRng`.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct Linnorm64x4Core {\n\n x: u64x4,\n\n}\n\n\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 9, "score": 9.758302831851314 }, { "content": "impl Linnorm64x4Core {\n\n /// Return the next random `u64x4`.\n\n #[inline]\n\n pub fn next_u64x4(&mut self) -> u64x4 {\n\n const LCG_MUL : u64x4 = u64x4::new(0x41C64E6Du64, 0x41C64E6Du64, 0x41C64E6Du64, 0x41C64E6Du64);\n\n const LCG_INC : u64x4 = u64x4::new(0xBE9u64, 0xACEDu64, 0xDE4Du64, 0x1u64);\n\n const Z_MUL : u64x4 = u64x4::new(0xAEF17502108EF2D9u64, 0xAEF17502108EF2D9u64, 0xAEF17502108EF2D9u64, 0xAEF17502108EF2D9u64);\n\n self.x = self.x * LCG_MUL + LCG_INC;\n\n let z = (self.x ^ (self.x >> 32)) * Z_MUL;\n\n z ^ (z >> 30)\n\n }\n\n\n\n /// Create a new `Linnorm64x4Core`. This will use `SplitMix64` to fill the seed.\n\n #[inline]\n\n pub fn from_seed_u64(seed: u64) -> Linnorm64x4Core {\n\n let mut rng = SplitMix64::from_seed_u64(seed);\n\n Linnorm64x4Core::from_seed(Linnorm64x4Seed::from_rng(&mut rng))\n\n }\n\n}\n\n\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 11, "score": 9.422140438585195 }, { "content": "pub struct Linnorm64x4(BlockRng<Linnorm64x4Core>);\n\n\n\nimpl Linnorm64x4 {\n\n /// Create a new `Linnorm64x4`. This will use `SplitMix64` to fill the seed.\n\n #[inline]\n\n pub fn from_seed_u64(seed: u64) -> Self {\n\n Linnorm64x4(BlockRng::<Linnorm64x4Core>::new(Linnorm64x4Core::from_seed_u64(seed)))\n\n }\n\n}\n\n\n\nimpl RngCore for Linnorm64x4 {\n\n #[inline(always)]\n\n fn next_u32(&mut self) -> u32 {\n\n self.0.next_u32()\n\n }\n\n\n\n #[inline(always)]\n\n fn next_u64(&mut self) -> u64 {\n\n self.0.next_u64()\n\n }\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 12, "score": 9.034144856881758 }, { "content": "use rand_core;\n\nuse rand_core::{RngCore, SeedableRng};\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\n/// A Linnorm random number generator.\n\n///\n\n/// The Linnorm algorithm is not suitable for cryptographic purposes, but is\n\n/// very fast, has high quality, and has a 64 bit state. It does not fail the\n\n/// statistical tests `XoroShiro128` fails systematically.\n\n/// If you do not know for sure that it fits your requirements, use a more\n\n/// secure one such as `IsaacRng` or `OsRng`.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct Linnorm64 {\n\n x: u64,\n\n}\n\n\n\nimpl Linnorm64 {\n\n /// Creates a new `Linnorm64` instance which is not seeded.\n\n ///\n", "file_path": "src/rng/linnorm64.rs", "rank": 13, "score": 8.861750642765161 }, { "content": "pub struct Linnorm64x4Seed([u8; 32]);\n\n\n\n/// Seed for a `Linnorm64x4` or `Linnorm64x4Core`.\n\nimpl Linnorm64x4Seed {\n\n #[inline]\n\n /// Create a seed for a `Linnorm64x4` or `Linnorm64x4Core`.\n\n pub fn new(seed: [u8; 32]) -> Linnorm64x4Seed {\n\n Linnorm64x4Seed(seed)\n\n }\n\n\n\n /// Use an RNG to generate a valid linnorm seed.\n\n pub fn from_rng<R: RngCore>(rng: &mut R) -> Linnorm64x4Seed {\n\n let mut seed = [0; 32];\n\n rng.fill_bytes(&mut seed);\n\n Linnorm64x4Seed(seed)\n\n }\n\n}\n\n\n\nimpl ::std::convert::AsMut<[u8]> for Linnorm64x4Seed {\n\n fn as_mut(&mut self) -> &mut [u8] {\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 14, "score": 7.833971809209294 }, { "content": "pub struct XoroShiro128x4Core {\n\n s0: u64x4,\n\n s1: u64x4,\n\n}\n\n\n\n/// Shifts the bits to the left by a specified amount, `n`,\n\n/// wrapping the truncated bits to the end of the resulting integer.\n\n///\n\n/// Please note this isn't the same operation as `<<`!\n\n#[inline]\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 15, "score": 7.811052957955066 }, { "content": "use rand_core;\n\nuse rand_core::{RngCore, SeedableRng};\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\nuse super::SplitMix64;\n\n\n\n/// A xorshift1024*φ random number generator.\n\n///\n\n/// The xorshift1024*φ algorithm is not suitable for cryptographic purposes, but\n\n/// is very fast and has a huge period. If you do not know for sure that it fits\n\n/// your requirements, use a more secure one such as `IsaacRng` or `OsRng`.\n\n///\n\n/// The algorithm used here is translated from [the `xoroshiro1024star.c`\n\n/// reference source code](http://xorshift.di.unimi.it/xoroshiro1024star.c) by\n\n/// Sebastiano Vigna.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct XorShift1024 {\n\n s: [u64; 16],\n\n p: usize,\n", "file_path": "src/rng/xorshift1024.rs", "rank": 16, "score": 7.67878463428714 }, { "content": " LittleEndian::read_u64(&seed[8..16]),\n\n LittleEndian::read_u64(&seed[16..24]),\n\n LittleEndian::read_u64(&seed[24..32]),\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl BlockRngCore for Linnorm64x4Core {\n\n type Item = u32;\n\n type Results = [u32; 8];\n\n\n\n #[inline]\n\n fn generate(&mut self, results: &mut Self::Results) {\n\n let r = self.next_u64x4().be_u32s();\n\n r.store(results, 0);\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 17, "score": 7.597672738469736 }, { "content": "use rand_core;\n\nuse rand_core::{RngCore, SeedableRng};\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\n/// A splitmix random number generator.\n\n///\n\n/// The splitmix algorithm is not suitable for cryptographic purposes, but is\n\n/// very fast and has a 64 bit state. Usually `XoroShiro128` should be prefered.\n\n/// If you do not know for sure that it fits your requirements, use a more\n\n/// secure one such as `IsaacRng` or `OsRng`.\n\n///\n\n/// The algorithm used here is translated from [the `splitmix64.c`\n\n/// reference source code](http://xorshift.di.unimi.it/splitmix64.c) by\n\n/// Sebastiano Vigna.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct SplitMix64 {\n\n x: u64,\n\n}\n\n\n", "file_path": "src/rng/splitmix64.rs", "rank": 18, "score": 7.5652634281618685 }, { "content": "use rand_core;\n\nuse rand_core::{RngCore, SeedableRng};\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\nuse super::SplitMix64;\n\n\n\n/// A xoroshiro128+ random number generator.\n\n///\n\n/// The xoroshiro128+ algorithm is not suitable for cryptographic purposes, but\n\n/// is very fast and has better statistical properties than `XorShiftRng`. If\n\n/// you do not know for sure that it fits your requirements, use a more secure\n\n/// one such as `IsaacRng` or `OsRng`.\n\n///\n\n/// The algorithm used here is translated from [the `xoroshiro128plus.c`\n\n/// reference source code](http://xorshift.di.unimi.it/xoroshiro128plus.c) by\n\n/// David Blackman and Sebastiano Vigna.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n\npub struct XoroShiro128 {\n\n s0: u64,\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 19, "score": 7.405211195430986 }, { "content": "use rand_core::{Error, RngCore, SeedableRng};\n\nuse rand_core::block::{BlockRngCore, BlockRng};\n\nuse faster::Transmute;\n\nuse faster::vecs::u64x4;\n\nuse byteorder::{LittleEndian, ByteOrder};\n\n\n\nuse super::SplitMix64;\n\n\n\n/// A xoroshiro128+ random number generator using SIMD to generate 4 `u64` at a time.\n\n///\n\n/// The xoroshiro128+ algorithm is not suitable for cryptographic purposes, but\n\n/// is very fast and has better statistical properties than `XorShiftRng`. If\n\n/// you do not know for sure that it fits your requirements, use a more secure\n\n/// one such as `IsaacRng` or `OsRng`.\n\n///\n\n/// The algorithm used here is translated from [the `xoroshiro128plus.c`\n\n/// reference source code](http://xorshift.di.unimi.it/xoroshiro128plus.c) by\n\n/// David Blackman and Sebastiano Vigna. It was adapted to use SIMD.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 20, "score": 7.226444413723618 }, { "content": " }\n\n}\n\n\n\n/// Seed for `XorShift1024`.\n\n#[derive(Clone)]\n\npub struct XorShift1024Seed(pub [u8; 16 * 8]);\n\n\n\nimpl ::std::convert::From<[u8; 16 * 8]> for XorShift1024Seed {\n\n fn from(seed: [u8; 16 * 8]) -> XorShift1024Seed {\n\n XorShift1024Seed(seed)\n\n }\n\n}\n\n\n\nimpl ::std::convert::AsMut<[u8]> for XorShift1024Seed {\n\n fn as_mut(&mut self) -> &mut [u8] {\n\n &mut self.0\n\n }\n\n}\n\n\n\nimpl ::std::default::Default for XorShift1024Seed {\n", "file_path": "src/rng/xorshift1024.rs", "rank": 22, "score": 7.145103006523544 }, { "content": " /// This can be used to generate 2^512 non-overlapping subsequences for\n\n /// parallel computations.\n\n ///\n\n /// ```\n\n /// # extern crate rand;\n\n /// # extern crate xoroshiro;\n\n /// # fn main() {\n\n /// use rand::SeedableRng;\n\n /// use xoroshiro::rng::XorShift1024;\n\n ///\n\n /// let rng1 = XorShift1024::from_seed_u64(0);\n\n /// let mut rng2 = rng1.clone();\n\n /// rng2.jump();\n\n /// let mut rng3 = rng2.clone();\n\n /// rng3.jump();\n\n /// # }\n\n /// ```\n\n pub fn jump(&mut self) {\n\n const JUMP: [u64; 16] = [0x84242f96eca9c41d,\n\n 0xa3c65b8776f96855, 0x5b34a39f070b5837, 0x4489affce4f31a1e,\n", "file_path": "src/rng/xorshift1024.rs", "rank": 23, "score": 7.129836263416249 }, { "content": "#![allow(dead_code)]\n\n#![allow(unreadable_literal)]\n\n\n\nextern crate byteorder;\n\nextern crate rand;\n\nextern crate xoroshiro;\n\n\n\nuse self::rand::{Rng, SeedableRng, Rand};\n\nuse self::byteorder::{LittleEndian, ByteOrder};\n\n\n\nuse self::xoroshiro::rng::SplitMix64;\n\n\n\n/// A xoroshiro128* random number generator.\n\n///\n\n/// The xoroshiro128* algorithm is not suitable for cryptographic purposes, but\n\n/// is very fast and has better statistical properties than `XorShiftRng`. If\n\n/// you do not know for sure that it fits your requirements, use a more secure\n\n/// one such as `IsaacRng` or `OsRng`.\n\n#[allow(missing_copy_implementations)]\n\n#[derive(Debug, Clone)]\n", "file_path": "benches/xoroshiro128star.rs", "rank": 24, "score": 6.755376764526236 }, { "content": "mod splitmix64;\n\nmod linnorm64;\n\nmod xoroshiro128;\n\n#[cfg(feature = \"unstable\")]\n\nmod xoroshiro128simd;\n\n#[cfg(feature = \"unstable\")]\n\nmod linnorm64simd;\n\n#[cfg(feature = \"unstable\")]\n\nmod splitmix64simd;\n\nmod xorshift1024;\n\n\n\npub use self::splitmix64::SplitMix64;\n\npub use self::linnorm64::Linnorm64;\n\npub use self::xoroshiro128::XoroShiro128;\n\n#[cfg(feature = \"unstable\")]\n\npub use self::xoroshiro128simd::{XoroShiro128x4, XoroShiro128x4Seed};\n\n#[cfg(feature = \"unstable\")]\n\npub use self::linnorm64simd::{Linnorm64x4, Linnorm64x4Seed};\n\n#[cfg(feature = \"unstable\")]\n\npub use self::splitmix64simd::{SplitMix64x4, SplitMix64x4Seed};\n\npub use self::xorshift1024::{XorShift1024, XorShift1024Seed};\n", "file_path": "src/rng/mod.rs", "rank": 25, "score": 6.689040815233045 }, { "content": "}\n\n\n\nimpl RngCore for XoroShiro128 {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n // The two lowest bits have some linear dependencies, so we use the\n\n // upper bits instead.\n\n (self.next_u64() >> 32) as u32\n\n }\n\n\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n let r = self.s0.wrapping_add(self.s1);\n\n self.s1 ^= self.s0;\n\n self.s0 = self.s0.rotate_left(55) ^ self.s1 ^ (self.s1 << 14);\n\n self.s1 = self.s1.rotate_left(36);\n\n r\n\n }\n\n\n\n #[inline]\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 26, "score": 6.490555075514956 }, { "content": " &mut self.0\n\n }\n\n}\n\n\n\nimpl ::std::default::Default for Linnorm64x4Seed {\n\n fn default() -> Linnorm64x4Seed {\n\n Linnorm64x4Seed([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31])\n\n }\n\n}\n\n\n\nimpl SeedableRng for Linnorm64x4Core {\n\n type Seed = Linnorm64x4Seed;\n\n\n\n /// Create a new `Linnorm64x4Core`.\n\n #[inline]\n\n fn from_seed(seed: Linnorm64x4Seed) -> Linnorm64x4Core {\n\n let seed = seed.0;\n\n Linnorm64x4Core {\n\n x: u64x4::new(\n\n LittleEndian::read_u64(&seed[0..8]),\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 27, "score": 6.255352102284652 }, { "content": "//! This crate provides the [xoroshiro128+, xorshift1024*φ and\n\n//! splitmix64](http://xoroshiro.di.unimi.it) random number generators.\n\n//!\n\n//! It is recommended to use `XoroShiro128` unless you need a period larger\n\n//! than 2^128 - 1, where `XorShift1024` with a period of 2^1024 - 1 is more\n\n//! appropriate. `SplitMix64` is only used to initialize the other generators,\n\n//! it should not be used directly, unless you are sure that a period of\n\n//! 2^64 - 1 is enough.\n\n\n\nextern crate rand_core;\n\nextern crate byteorder;\n\n#[cfg(feature = \"unstable\")]\n\nextern crate faster;\n\n\n\n/// Pseudo-random number generators.\n\npub mod rng;\n", "file_path": "src/lib.rs", "rank": 29, "score": 6.118042174533737 }, { "content": " /// The initial values of this RNG are constants, so all generators created\n\n /// by this function will yield the same stream of random numbers. It is\n\n /// highly recommended that this is created through `SeedableRng` instead of\n\n /// this function.\n\n pub fn new_unseeded() -> Linnorm64 {\n\n // The state can be seeded with any value.\n\n Linnorm64 {\n\n x: 0,\n\n }\n\n }\n\n\n\n pub fn from_seed_u64(seed: u64) -> Linnorm64 {\n\n let mut x = [0; 8];\n\n LittleEndian::write_u64(&mut x, seed);\n\n Linnorm64::from_seed(x)\n\n }\n\n}\n\n\n\nimpl RngCore for Linnorm64 {\n\n #[inline]\n", "file_path": "src/rng/linnorm64.rs", "rank": 30, "score": 5.867409424550441 }, { "content": "#![allow(unknown_lints)]\n\n#![allow(unreadable_literal)]\n\n\n\nextern crate byteorder;\n\nextern crate rand;\n\nextern crate xoroshiro;\n\n\n\nuse byteorder::{ByteOrder, LittleEndian};\n\nuse rand::{Rng, SeedableRng};\n\nuse xoroshiro::rng::{\n\n SplitMix64,\n\n XoroShiro128,\n\n XorShift1024, XorShift1024Seed\n\n};\n\n\n\n#[test]\n", "file_path": "tests/rng_basic.rs", "rank": 31, "score": 5.866747748072447 }, { "content": " pub fn from_seed_u64(seed: u64) -> XoroShiro128 {\n\n let mut rng = SplitMix64::from_seed_u64(seed);\n\n XoroShiro128::from_rng(&mut rng).unwrap()\n\n }\n\n\n\n /// Jump forward, equivalently to 2^64 calls to `next_u64()`.\n\n ///\n\n /// This can be used to generate 2^64 non-overlapping subsequences for\n\n /// parallel computations.\n\n ///\n\n /// ```\n\n /// # extern crate rand;\n\n /// # extern crate xoroshiro;\n\n /// # fn main() {\n\n /// use rand::SeedableRng;\n\n /// use xoroshiro::rng::XoroShiro128;\n\n ///\n\n /// let rng1 = XoroShiro128::from_seed_u64(0);\n\n /// let mut rng2 = rng1.clone();\n\n /// rng2.jump();\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 32, "score": 5.8348677583223925 }, { "content": " LittleEndian::read_u64(&seed[48..56]),\n\n ),\n\n s1: u64x4::new(\n\n LittleEndian::read_u64(&seed[8..16]),\n\n LittleEndian::read_u64(&seed[24..32]),\n\n LittleEndian::read_u64(&seed[40..48]),\n\n LittleEndian::read_u64(&seed[56..64]),\n\n ),\n\n }\n\n }\n\n}\n\n\n\nimpl BlockRngCore for XoroShiro128x4Core {\n\n type Item = u32;\n\n type Results = [u32; 8];\n\n\n\n #[inline]\n\n fn generate(&mut self, results: &mut Self::Results) {\n\n let r = self.next_u64x4().be_u32s();\n\n r.store(results, 0);\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 33, "score": 5.820086604973868 }, { "content": " pub fn from_seed_u64(seed: u64) -> XoroShiro128x4Core {\n\n let mut rng = SplitMix64::from_seed_u64(seed);\n\n XoroShiro128x4Core::from_seed(XoroShiro128x4Seed::from_rng(&mut rng))\n\n }\n\n}\n\n\n\npub struct XoroShiro128x4Seed([u8; 64]);\n\n\n\n/// Seed for a `XoroShiro128x4` or `XoroShiro128x4Core`.\n\nimpl XoroShiro128x4Seed {\n\n #[inline]\n\n /// Create a seed for a `XoroShiro128x4` or `XoroShiro128x4Core`.\n\n ///\n\n /// # Panics\n\n /// This effectively has to seed 4 `XoroShiro128` and will panic if any of\n\n /// those would be initialized with an all zero seed.\n\n pub fn new(seed: [u8; 64]) -> XoroShiro128x4Seed {\n\n for i in 0..4 {\n\n assert_ne!(&seed[16*i..16*(i + 1)], &[0; 16]);\n\n }\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 34, "score": 5.419225258556396 }, { "content": "}\n\n\n\nimpl ::std::default::Default for XoroShiro128x4Seed {\n\n fn default() -> XoroShiro128x4Seed {\n\n XoroShiro128x4Seed([0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63])\n\n }\n\n}\n\n\n\nimpl SeedableRng for XoroShiro128x4Core {\n\n type Seed = XoroShiro128x4Seed;\n\n\n\n /// Create a new `XoroShiro128x4Core`.\n\n #[inline]\n\n fn from_seed(seed: XoroShiro128x4Seed) -> XoroShiro128x4Core {\n\n let seed = seed.0;\n\n XoroShiro128x4Core {\n\n s0: u64x4::new(\n\n LittleEndian::read_u64(&seed[0..8]),\n\n LittleEndian::read_u64(&seed[16..24]),\n\n LittleEndian::read_u64(&seed[32..40]),\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 35, "score": 5.2926443906844245 }, { "content": " }\n\n}\n\n\n\nimpl RngCore for XorShift1024 {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n (self.next_u64() >> 32) as u32\n\n }\n\n\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n let s0 = self.s[self.p];\n\n self.p = self.p.wrapping_add(1) & 15;\n\n let mut s1 = self.s[self.p];\n\n s1 ^= s1 << 31;\n\n self.s[self.p] = s1 ^ s0 ^ (s1 >> 11) ^ (s0 >> 30);\n\n self.s[self.p].wrapping_mul(0x9e3779b97f4a7c13)\n\n }\n\n\n\n #[inline]\n", "file_path": "src/rng/xorshift1024.rs", "rank": 36, "score": 5.111343328328671 }, { "content": " XoroShiro128x4Seed(seed)\n\n }\n\n\n\n /// Use an RNG to generate a valid (non-zero) xoroshiro seed.\n\n pub fn from_rng<R: RngCore>(rng: &mut R) -> XoroShiro128x4Seed {\n\n let mut seed = [0; 64];\n\n for i in 0..4 {\n\n let mut s = &mut seed[i..i*16];\n\n while s == [0; 16] {\n\n rng.fill_bytes(&mut s);\n\n }\n\n }\n\n XoroShiro128x4Seed(seed)\n\n }\n\n}\n\n\n\nimpl ::std::convert::AsMut<[u8]> for XoroShiro128x4Seed {\n\n fn as_mut(&mut self) -> &mut [u8] {\n\n &mut self.0\n\n }\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 37, "score": 5.090388307458566 }, { "content": " }\n\n}\n\n\n\nimpl Rng for XoroShiro128 {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n (self.next_u64() >> 32) as u32\n\n }\n\n\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n let r = self.s0.wrapping_mul(self.s1);\n\n self.s1 ^= self.s0;\n\n self.s0 = self.s0.rotate_left(55) ^ self.s1 ^ (self.s1 << 14);\n\n self.s1 = self.s1.rotate_left(36);\n\n r\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n", "file_path": "benches/xoroshiro128star.rs", "rank": 38, "score": 4.825372267464225 }, { "content": "\n\nimpl RngCore for SplitMix64 {\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n self.next_u64() as u32\n\n }\n\n\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n self.x = self.x.wrapping_add(0x9e3779b97f4a7c15);\n\n let mut z = self.x;\n\n z = (z ^ (z >> 30)).wrapping_mul(0xbf58476d1ce4e5b9);\n\n z = (z ^ (z >> 27)).wrapping_mul(0x94d049bb133111eb);\n\n return z ^ (z >> 31);\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n", "file_path": "src/rng/splitmix64.rs", "rank": 39, "score": 4.749996680388534 }, { "content": "#![allow(unknown_lints)]\n\n\n\n#[macro_use]\n\nextern crate bencher;\n\nextern crate rand;\n\nextern crate xoroshiro;\n\nextern crate byteorder;\n\n\n\nconst RAND_BENCH_N: u64 = 100_000;\n\nconst RAND_BENCH_BYTES: usize = 1 << 20; // > 1_000_000\n\nuse std::mem::size_of;\n\nuse bencher::{black_box, Bencher};\n\nuse rand::{Rng, RngCore, SeedableRng};\n\nuse rand::{XorShiftRng, IsaacRng, Isaac64Rng, OsRng, ChaChaRng};\n\nuse xoroshiro::rng::{XoroShiro128, SplitMix64, XorShift1024, Linnorm64}; //XoroShiro128x4\n\n#[cfg(feature = \"unstable\")]\n\nuse xoroshiro::rng::{Linnorm64x4, XoroShiro128x4, SplitMix64x4};\n\n\n\n//#[cfg(feature = \"unstable\")]\n\n//mod aes;\n", "file_path": "benches/bench.rs", "rank": 40, "score": 4.614636547947246 }, { "content": " /// let mut rng3 = rng2.clone();\n\n /// rng3.jump();\n\n /// # }\n\n /// ```\n\n pub fn jump(&mut self) {\n\n const JUMP: [u64; 2] = [0xbeac0467eba5facb, 0xd86b048b86aa9922];\n\n let mut s0 = 0;\n\n let mut s1 = 0;\n\n for j in &JUMP {\n\n for b in 0..64 {\n\n if (j & 1 << b) != 0 {\n\n s0 ^= self.s0;\n\n s1 ^= self.s1;\n\n }\n\n self.next_u64();\n\n }\n\n }\n\n self.s0 = s0;\n\n self.s1 = s1;\n\n }\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 41, "score": 4.432471023681657 }, { "content": "impl SplitMix64 {\n\n /// Creates a new `SplitMix64` instance which is not seeded.\n\n ///\n\n /// The initial values of this RNG are constants, so all generators created\n\n /// by this function will yield the same stream of random numbers. It is\n\n /// highly recommended that this is created through `SeedableRng` instead of\n\n /// this function.\n\n pub fn new_unseeded() -> SplitMix64 {\n\n // The state can be seeded with any value.\n\n SplitMix64 {\n\n x: 0,\n\n }\n\n }\n\n\n\n pub fn from_seed_u64(seed: u64) -> SplitMix64 {\n\n let mut x = [0; 8];\n\n LittleEndian::write_u64(&mut x, seed);\n\n SplitMix64::from_seed(x)\n\n }\n\n}\n", "file_path": "src/rng/splitmix64.rs", "rank": 43, "score": 3.897110705638873 }, { "content": "pub struct XoroShiro128 {\n\n s0: u64,\n\n s1: u64,\n\n}\n\n\n\nimpl XoroShiro128 {\n\n /// Creates a new `XoroShiro128` instance which is not seeded.\n\n ///\n\n /// The initial values of this RNG are constants, so all generators created\n\n /// by this function will yield the same stream of random numbers. It is\n\n /// highly recommended that this is created through `SeedableRng` instead of\n\n /// this function.\n\n pub fn new_unseeded() -> XoroShiro128 {\n\n // These constants were taken from the `XorShiftRng` implementation.\n\n // The only requirement imposed by the algorithm is that these values\n\n // cannot be zero everywhere.\n\n XoroShiro128 {\n\n s0: 0x193a6754a8a7d469,\n\n s1: 0x97830e05113ba7bb,\n\n }\n", "file_path": "benches/xoroshiro128star.rs", "rank": 44, "score": 3.836221916503832 }, { "content": " for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {\n\n self.fill_bytes(dest);\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl SeedableRng for Linnorm64 {\n\n type Seed = [u8; 8];\n\n\n\n /// Create a new `Linnorm64`.\n\n fn from_seed(seed: [u8; 8]) -> Linnorm64 {\n\n Linnorm64 {\n\n x: LittleEndian::read_u64(&seed),\n\n }\n\n }\n\n}\n", "file_path": "src/rng/linnorm64.rs", "rank": 45, "score": 3.7588427941131726 }, { "content": " #[inline(always)]\n\n fn next_u64(&mut self) -> u64 {\n\n self.0.next_u64()\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n self.0.fill_bytes(dest);\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n self.0.try_fill_bytes(dest)\n\n }\n\n}\n\n\n\nimpl SeedableRng for XoroShiro128x4 {\n\n type Seed = <XoroShiro128x4Core as SeedableRng>::Seed;\n\n\n\n fn from_seed(seed: Self::Seed) -> Self {\n\n XoroShiro128x4(BlockRng::<XoroShiro128x4Core>::from_seed(seed))\n\n }\n\n\n\n fn from_rng<R: RngCore>(rng: R) -> Result<Self, Error> {\n\n BlockRng::<XoroShiro128x4Core>::from_rng(rng).map(|rng| XoroShiro128x4(rng))\n\n }\n\n}\n\n\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 46, "score": 3.7472465941138657 }, { "content": " i += 1;\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), ::rand::Error> {\n\n self.fill_bytes(dest);\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl SeedableRng for AesRng {\n\n type Seed = [u8; 16];\n\n\n\n /// Create a new `AesRng`.\n\n fn from_seed(seed: [u8; 16]) -> AesRng {\n\n AesRng {\n\n aes: Aes128::new(&seed),\n\n key: seed,\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "benches/aes.rs", "rank": 47, "score": 3.7164844480746604 }, { "content": "}\n\n\n\nimpl XorShift1024 {\n\n /// Creates a new `XorShift1024` instance which is not seeded.\n\n ///\n\n /// The initial values of this RNG are constants, so all generators created\n\n /// by this function will yield the same stream of random numbers. It is\n\n /// highly recommended that this is created through `SeedableRng` instead of\n\n /// this function.\n\n pub fn new_unseeded() -> XorShift1024 {\n\n XorShift1024::from_seed_u64(0)\n\n }\n\n\n\n pub fn from_seed_u64(seed: u64) -> XorShift1024 {\n\n let mut rng = SplitMix64::from_seed_u64(seed);\n\n XorShift1024::from_rng(&mut rng).unwrap()\n\n }\n\n\n\n /// Jump forward, equivalently to 2^512 calls to `next_u64()`.\n\n ///\n", "file_path": "src/rng/xorshift1024.rs", "rank": 48, "score": 3.6921827910277116 }, { "content": "\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n self.0.fill_bytes(dest);\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), Error> {\n\n self.0.try_fill_bytes(dest)\n\n }\n\n}\n\n\n\nimpl SeedableRng for Linnorm64x4 {\n\n type Seed = <Linnorm64x4Core as SeedableRng>::Seed;\n\n\n\n fn from_seed(seed: Self::Seed) -> Self {\n\n Linnorm64x4(BlockRng::<Linnorm64x4Core>::from_seed(seed))\n\n }\n\n\n\n fn from_rng<R: RngCore>(rng: R) -> Result<Self, Error> {\n\n BlockRng::<Linnorm64x4Core>::from_rng(rng).map(|rng| Linnorm64x4(rng))\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 49, "score": 3.627373677600276 }, { "content": " s1: u64,\n\n}\n\n\n\nimpl XoroShiro128 {\n\n /// Creates a new `XoroShiro128` instance which is not seeded.\n\n ///\n\n /// The initial values of this RNG are constants, so all generators created\n\n /// by this function will yield the same stream of random numbers. It is\n\n /// highly recommended that this is created through `SeedableRng` instead of\n\n /// this function.\n\n pub fn new_unseeded() -> XoroShiro128 {\n\n // These constants were taken from the `XorShiftRng` implementation.\n\n // The only requirement imposed by the algorithm is that these values\n\n // cannot be zero everywhere.\n\n XoroShiro128 {\n\n s0: 0x193a6754a8a7d469,\n\n s1: 0x97830e05113ba7bb,\n\n }\n\n }\n\n\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 51, "score": 3.2376531627038583 }, { "content": "//mod xoroshiro128star;\n\n//mod smallprng;\n\n//mod sfc64;\n\n//mod urng;\n\n\n\n//#[cfg(feature = \"unstable\")]\n\n//use aes::AesRng;\n\n//use xoroshiro128star::XoroShiro128 as XoroShiro128Star;\n\n//use smallprng::SmallPrng128;\n\n//use sfc64::Sfc64;\n\n//use urng::Urng64;\n\n\n\nmacro_rules! make_bench_u64 {\n\n ($name:ident, $rng:ident) => {\n\n fn $name(b: &mut Bencher) {\n\n let mut rng = $rng::from_rng(OsRng::new().unwrap()).unwrap();\n\n b.iter(|| {\n\n for _ in 0..RAND_BENCH_N {\n\n black_box(rng.gen::<u64>());\n\n }\n", "file_path": "benches/bench.rs", "rank": 52, "score": 3.222101057169265 }, { "content": "#![allow(unknown_lints)]\n\n#![allow(unreadable_literal)]\n\n\n\nextern crate rand;\n\nextern crate xoroshiro;\n\n\n\nuse rand::{Rng, RngCore};\n\nuse xoroshiro::rng::{XoroShiro128, XorShift1024};\n\n\n\n#[test]\n", "file_path": "tests/rng_seeding.rs", "rank": 53, "score": 3.0086167162517787 }, { "content": "/// Use a RNG to generate a valid (non-zero) xoroshiro seed.\n\nfn generate_seed_128<R: Rng>(rng: &mut R) -> [u64; 2] {\n\n let mut s: [u64; 2] = rng.gen();\n\n while s == [0, 0] {\n\n s = rng.gen();\n\n }\n\n s\n\n}\n\n\n\nimpl SeedableRng<u64> for XoroShiro128 {\n\n /// Reseed an `XoroShiro128`. This will use `SplitMix64` to fill the seed.\n\n fn reseed(&mut self, seed: u64) {\n\n let mut rng = SplitMix64::from_seed(seed);\n\n self.reseed(generate_seed_128(&mut rng));\n\n }\n\n\n\n /// Create a new `XoroShiro128`. This will use `SplitMix64` to fill the seed.\n\n fn from_seed(seed: u64) -> XoroShiro128 {\n\n let mut rng = SplitMix64::from_seed(seed);\n\n XoroShiro128::from_seed(generate_seed_128(&mut rng))\n\n }\n\n}\n\n\n\nimpl Rand for XoroShiro128 {\n\n fn rand<R: Rng>(rng: &mut R) -> XoroShiro128 {\n\n XoroShiro128::from_seed(generate_seed_128(rng))\n\n }\n\n}\n", "file_path": "benches/xoroshiro128star.rs", "rank": 54, "score": 2.9440492641334237 }, { "content": " }\n\n x -= self.c;\n\n if x < 0. {\n\n x + 1.\n\n } else {\n\n x\n\n }\n\n }\n\n\n\n #[inline]\n\n fn next_u32(&mut self) -> u32 {\n\n (self.next_f64() * 4294967296.) as u32\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n", "file_path": "benches/urng.rs", "rank": 55, "score": 2.8253870858046426 }, { "content": " #[inline]\n\n fn next_f64(&mut self) -> f64 {\n\n const R: f64 = 9007199254740881.0/9007199254740992.;\n\n const D: f64 = 362436069876.0/9007199254740992.0;\n\n let mut x = self.u[self.i] - self.u[self.j];\n\n if x < 0. {\n\n x += 1.;\n\n }\n\n self.u[self.i] = x;\n\n self.i -= 1;\n\n if self.i == 0 {\n\n self.i = 97;\n\n }\n\n self.j -= 1;\n\n if self.j == 0 {\n\n self.j = 97;\n\n }\n\n self.c -= D;\n\n if self.c < 0. {\n\n self.c += R;\n", "file_path": "benches/urng.rs", "rank": 56, "score": 2.7000354052372533 }, { "content": "\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n const RSHIFT: u64 = 11;\n\n const LSHIFT: u64 = 3;\n\n const BARREL_SHIFT: u64 = 24;\n\n\n\n let tmp = self.a + self.b + self.counter;\n\n self.counter += 1;\n\n self.a = self.b ^ (self.b >> RSHIFT);\n\n self.b = self.c.wrapping_add(self.c << LSHIFT);\n\n self.c = ((self.c << BARREL_SHIFT) | (self.c >> (64 - BARREL_SHIFT))) + tmp;\n\n tmp\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n", "file_path": "benches/sfc64.rs", "rank": 57, "score": 2.469597600925539 }, { "content": " fn next_u32(&mut self) -> u32 {\n\n self.next_u64() as u32\n\n }\n\n\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n self.x = self.x.wrapping_mul(0x41C64E6D).wrapping_add(1);\n\n let z = (self.x ^ (self.x >> 32)).wrapping_mul(0xAEF17502108EF2D9);\n\n return z ^ (z >> 30);\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n", "file_path": "src/rng/linnorm64.rs", "rank": 58, "score": 2.3813976866067694 }, { "content": "}\n\n\n\nimpl SeedableRng<u32> for SmallPrng128 {\n\n /// Reseed a `SmallPrng128`.\n\n fn reseed(&mut self, seed: u32) {\n\n self.reseed([seed, seed, seed]);\n\n }\n\n\n\n /// Create a new `SmallPrng128`.\n\n fn from_seed(seed: u32) -> SmallPrng128 {\n\n SmallPrng128::from_seed([seed, seed, seed])\n\n }\n\n}\n\n\n\nimpl SeedableRng<u64> for SmallPrng128 {\n\n /// Reseed a `SmallPrng128`.\n\n fn reseed(&mut self, seed: u64) {\n\n self.reseed([seed as u32, (seed >> 32) as u32, seed as u32]);\n\n }\n\n\n", "file_path": "benches/smallprng.rs", "rank": 59, "score": 2.3299965733437773 }, { "content": "\n\n #[inline]\n\n fn next_u64(&mut self) -> u64 {\n\n let AesRng { aes, mut key } = *self;\n\n aes.encrypt(&mut key);\n\n self.aes = Aes128::new(&key);\n\n LittleEndian::read_u64(&key)\n\n }\n\n\n\n #[inline]\n\n fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n", "file_path": "benches/aes.rs", "rank": 60, "score": 2.315247750867693 }, { "content": " debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n /*\n\n pub fn seed(&mut self, seed1: i32, seed2: i32) {\n\n let mut x = seed1;\n\n let mut y = seed2;\n\n for i in 1..98 {\n\n let mut s = 0.;\n\n let mut t = 0.5;\n\n for _ in 1..54 {\n", "file_path": "benches/urng.rs", "rank": 61, "score": 2.2991037384349844 }, { "content": "impl SeedableRng for SplitMix64 {\n\n type Seed = [u8; 8];\n\n\n\n /// Create a new `SplitMix64`.\n\n fn from_seed(seed: [u8; 8]) -> SplitMix64 {\n\n SplitMix64 {\n\n x: LittleEndian::read_u64(&seed),\n\n }\n\n }\n\n}\n", "file_path": "src/rng/splitmix64.rs", "rank": 62, "score": 2.242256116606983 }, { "content": " b: seed,\n\n c: seed,\n\n counter: 1,\n\n };\n\n for _ in 0..12 {\n\n rng.next_u64();\n\n }\n\n rng\n\n }\n\n}\n\n\n\n\n\nimpl Rand for Sfc64 {\n\n fn rand<R: Rng>(rng: &mut R) -> Sfc64 {\n\n let seed: [u64; 3] = rng.gen();\n\n Sfc64::from_seed(seed)\n\n }\n\n}\n", "file_path": "benches/sfc64.rs", "rank": 63, "score": 2.2118481621954493 }, { "content": " x = 6969i32.wrapping_mul(x) % 65543;\n\n y = 8888i32.wrapping_mul(y) % 65579;\n\n if ((x ^ y) & 32) > 0 {\n\n s += t;\n\n }\n\n t *= 0.5;\n\n }\n\n self.u[i] = s;\n\n }\n\n }\n\n */\n\n\n\nimpl SeedableRng<[f64; 98]> for Urng64 {\n\n /// Reseed an `Urng64`.\n\n fn reseed(&mut self, seed: [f64; 98]) {\n\n let rng = Urng64::from_seed(seed);\n\n *self = rng;\n\n }\n\n\n\n /// Create a new `Urng64`.\n", "file_path": "benches/urng.rs", "rank": 64, "score": 2.1822539168606645 }, { "content": " } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl SeedableRng<[u64; 3]> for Sfc64 {\n\n /// Reseed a `Sfc64`.\n\n fn reseed(&mut self, seed: [u64; 3]) {\n\n *self = Sfc64 {\n\n a: seed[0],\n\n b: seed[1],\n\n c: seed[2],\n", "file_path": "benches/sfc64.rs", "rank": 65, "score": 2.1534411498757073 }, { "content": "}\n\n\n\nimpl SeedableRng<u64> for Sfc64 {\n\n /// Reseed a `Sfc64`.\n\n fn reseed(&mut self, seed: u64) {\n\n *self = Sfc64 {\n\n a: seed,\n\n b: seed,\n\n c: seed,\n\n counter: 1,\n\n };\n\n for _ in 0..18 {\n\n self.next_u64();\n\n }\n\n }\n\n\n\n /// Create a new `Sfc64`.\n\n fn from_seed(seed: u64) -> Sfc64 {\n\n let mut rng = Sfc64 {\n\n a: seed,\n", "file_path": "benches/sfc64.rs", "rank": 66, "score": 2.07139397188486 }, { "content": " } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl SeedableRng<[u32; 3]> for SmallPrng128 {\n\n /// Reseed a `SmallPrng128`.\n\n fn reseed(&mut self, seed: [u32; 3]) {\n\n *self = SmallPrng128 {\n\n a: 0xf1ea5eed,\n\n b: seed[0],\n\n c: seed[1],\n", "file_path": "benches/smallprng.rs", "rank": 67, "score": 2.07139397188486 }, { "content": " LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {\n\n self.fill_bytes(dest);\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/rng/splitmix64.rs", "rank": 68, "score": 1.990680309157269 }, { "content": " /// Create a new `SmallPrng128`.\n\n fn from_seed(seed: u64) -> SmallPrng128 {\n\n SmallPrng128::from_seed([seed as u32, (seed >> 32) as u32, seed as u32])\n\n }\n\n}\n\n\n\n\n\nimpl Rand for SmallPrng128 {\n\n fn rand<R: Rng>(rng: &mut R) -> SmallPrng128 {\n\n let seed: [u32; 3] = rng.gen();\n\n SmallPrng128::from_seed(seed)\n\n }\n\n}\n", "file_path": "benches/smallprng.rs", "rank": 69, "score": 1.8589169886439456 }, { "content": " \"XoroShiro128.reseed called with an all zero seed.\");\n\n\n\n self.s0 = seed[0];\n\n self.s1 = seed[1];\n\n }\n\n\n\n /// Create a new `XoroShiro128`. This will panic if `seed` is entirely 0.\n\n fn from_seed(seed: [u64; 2]) -> XoroShiro128 {\n\n assert!(seed != [0, 0],\n\n \"XoroShiro128::from_seed called with an all zero seed.\");\n\n\n\n XoroShiro128 {\n\n s0: seed[0],\n\n s1: seed[1],\n\n }\n\n }\n\n}\n\n\n\n/// Use a RNG to generate a valid (non-zero) xoroshiro seed.\n", "file_path": "benches/xoroshiro128star.rs", "rank": 70, "score": 1.794262240830283 }, { "content": " }\n\n}\n\n\n\nimpl SeedableRng for XoroShiro128 {\n\n type Seed = [u8; 16];\n\n\n\n /// Create a new `XoroShiro128`. This will panic if `seed` is entirely 0.\n\n fn from_seed(seed: [u8; 16]) -> XoroShiro128 {\n\n assert!(seed != [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],\n\n \"XoroShiro128::from_seed called with an all zero seed.\");\n\n\n\n XoroShiro128 {\n\n s0: LittleEndian::read_u64(&seed[..8]),\n\n s1: LittleEndian::read_u64(&seed[8..]),\n\n }\n\n }\n\n}\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 71, "score": 1.777864683925821 }, { "content": "\n\n let mut rngs = [\n\n linnorm_from_slice(&seed[0..8]),\n\n linnorm_from_slice(&seed[8..16]),\n\n linnorm_from_slice(&seed[16..24]),\n\n linnorm_from_slice(&seed[24..32]),\n\n ];\n\n\n\n let r_simd = rng_simd.next_u64x4();\n\n let rs = [\n\n rngs[0].next_u64(),\n\n rngs[1].next_u64(),\n\n rngs[2].next_u64(),\n\n rngs[3].next_u64(),\n\n ];\n\n assert_eq!(r_simd.extract(0), rs[0]);\n\n assert_eq!(r_simd.extract(1), rs[1]);\n\n assert_eq!(r_simd.extract(2), rs[2]);\n\n assert_eq!(r_simd.extract(3), rs[3]);\n\n}\n", "file_path": "src/rng/linnorm64simd.rs", "rank": 73, "score": 1.739433129796618 }, { "content": " fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {\n\n self.fill_bytes(dest);\n\n Ok(())\n", "file_path": "src/rng/xoroshiro128.rs", "rank": 74, "score": 1.7231686631177077 }, { "content": " fn fill_bytes(&mut self, dest: &mut [u8]) {\n\n for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn try_fill_bytes(&mut self, dest: &mut [u8]) -> Result<(), rand_core::Error> {\n\n self.fill_bytes(dest);\n\n Ok(())\n", "file_path": "src/rng/xorshift1024.rs", "rank": 75, "score": 1.7231686631177077 }, { "content": " for mut chunk in dest.chunks_mut(8) {\n\n if chunk.len() == 8 {\n\n LittleEndian::write_u64(&mut chunk, self.next_u64());\n\n } else {\n\n debug_assert!(chunk.len() < 8);\n\n let r = self.next_u64();\n\n let mut i = 0;\n\n for v in chunk.iter_mut() {\n\n *v = (r >> 8*i) as u8;\n\n i += 1;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl SeedableRng<[u64; 2]> for XoroShiro128 {\n\n /// Reseed an `XoroShiro128`. This will panic if `seed` is entirely 0.\n\n fn reseed(&mut self, seed: [u64; 2]) {\n\n assert!(seed != [0, 0],\n", "file_path": "benches/xoroshiro128star.rs", "rank": 76, "score": 1.7215669954490513 }, { "content": " *y = *x;\n\n }\n\n XoroShiro128::from_seed(seed)\n\n }\n\n\n\n let mut rngs = [\n\n xoroshiro_from_slice(&seed[0..16]),\n\n xoroshiro_from_slice(&seed[16..32]),\n\n xoroshiro_from_slice(&seed[32..48]),\n\n xoroshiro_from_slice(&seed[48..64]),\n\n ];\n\n\n\n let r_simd = rng_simd.next_u64x4();\n\n let rs = [\n\n rngs[0].next_u64(),\n\n rngs[1].next_u64(),\n\n rngs[2].next_u64(),\n\n rngs[3].next_u64(),\n\n ];\n\n assert_eq!(r_simd.extract(0), rs[0]);\n\n assert_eq!(r_simd.extract(1), rs[1]);\n\n assert_eq!(r_simd.extract(2), rs[2]);\n\n assert_eq!(r_simd.extract(3), rs[3]);\n\n}\n", "file_path": "src/rng/xoroshiro128simd.rs", "rank": 77, "score": 1.673632205478654 }, { "content": " fn default() -> XorShift1024Seed {\n\n XorShift1024Seed([0; 16 * 8])\n\n }\n\n}\n\n\n\nimpl SeedableRng for XorShift1024 {\n\n type Seed = XorShift1024Seed;\n\n\n\n /// Create a new `XorShift1024`. This will panic if `seed` is entirely 0.\n\n fn from_seed(seed: XorShift1024Seed) -> XorShift1024 {\n\n let seed = seed.0;\n\n assert!(!seed.iter().all(|&x| x == 0),\n\n \"XorShift1024::from_seed called with an all zero seed.\");\n\n\n\n XorShift1024 {\n\n s: [\n\n LittleEndian::read_u64(&seed[0..8]),\n\n LittleEndian::read_u64(&seed[8..16]),\n\n LittleEndian::read_u64(&seed[16..24]),\n\n LittleEndian::read_u64(&seed[24..32]),\n", "file_path": "src/rng/xorshift1024.rs", "rank": 78, "score": 1.4339810715150927 } ]
Rust
src/keys/types.rs
ianco/aries-askar
7346f30c8c95bf2ce343bc50e0f38f4e3921c711
use std::borrow::Cow; use std::convert::Infallible; use std::fmt::{self, Debug, Display, Formatter}; use std::mem::ManuallyDrop; use std::ops::Deref; use std::ptr; use std::str::FromStr; use indy_utils::keys::{EncodedVerKey, KeyType as IndyKeyAlg, PrivateKey, VerKey}; use serde::{Deserialize, Serialize}; use zeroize::Zeroize; use crate::error::Error; use crate::types::{sorted_tags, EntryTag, SecretBytes}; #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] pub enum KeyAlg { ED25519, Other(String), } serde_as_str_impl!(KeyAlg); impl KeyAlg { pub fn as_str(&self) -> &str { match self { Self::ED25519 => "ed25519", Self::Other(other) => other.as_str(), } } } impl AsRef<str> for KeyAlg { fn as_ref(&self) -> &str { self.as_str() } } impl FromStr for KeyAlg { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "ed25519" => Self::ED25519, other => Self::Other(other.to_owned()), }) } } impl Display for KeyAlg { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] pub enum KeyCategory { PublicKey, KeyPair, Other(String), } impl KeyCategory { pub fn as_str(&self) -> &str { match self { Self::PublicKey => "public", Self::KeyPair => "keypair", Self::Other(other) => other.as_str(), } } pub fn into_string(self) -> String { match self { Self::Other(other) => other, _ => self.as_str().to_owned(), } } } serde_as_str_impl!(KeyCategory); impl AsRef<str> for KeyCategory { fn as_ref(&self) -> &str { self.as_str() } } impl FromStr for KeyCategory { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "public" => Self::PublicKey, "keypair" => Self::KeyPair, other => Self::Other(other.to_owned()), }) } } impl Display for KeyCategory { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] pub struct KeyParams { pub alg: KeyAlg, #[serde(default, rename = "meta", skip_serializing_if = "Option::is_none")] pub metadata: Option<String>, #[serde(default, rename = "ref", skip_serializing_if = "Option::is_none")] pub reference: Option<String>, #[serde( default, rename = "pub", skip_serializing_if = "Option::is_none", with = "crate::serde_utils::as_base58" )] pub pub_key: Option<Vec<u8>>, #[serde( default, rename = "prv", skip_serializing_if = "Option::is_none", with = "crate::serde_utils::as_base58" )] pub prv_key: Option<SecretBytes>, } impl KeyParams { pub(crate) fn to_vec(&self) -> Result<Vec<u8>, Error> { serde_json::to_vec(self) .map_err(|e| err_msg!(Unexpected, "Error serializing key params: {}", e)) } pub(crate) fn from_slice(params: &[u8]) -> Result<KeyParams, Error> { let result = serde_json::from_slice(params) .map_err(|e| err_msg!(Unexpected, "Error deserializing key params: {}", e)); result } } impl Drop for KeyParams { fn drop(&mut self) { self.zeroize() } } impl Zeroize for KeyParams { fn zeroize(&mut self) { self.prv_key.zeroize(); } } #[derive(Clone, Debug, Eq)] pub struct KeyEntry { pub category: KeyCategory, pub ident: String, pub params: KeyParams, pub tags: Option<Vec<EntryTag>>, } impl KeyEntry { pub(crate) fn into_parts(self) -> (KeyCategory, String, KeyParams, Option<Vec<EntryTag>>) { let slf = ManuallyDrop::new(self); unsafe { ( ptr::read(&slf.category), ptr::read(&slf.ident), ptr::read(&slf.params), ptr::read(&slf.tags), ) } } pub fn is_local(&self) -> bool { self.params.reference.is_none() } pub fn encoded_verkey(&self) -> Result<EncodedVerKey, Error> { Ok(self .verkey()? .as_base58() .map_err(err_map!(Unexpected, "Error encoding verkey"))?) } pub fn verkey(&self) -> Result<VerKey, Error> { match (&self.params.alg, &self.params.pub_key) { (KeyAlg::ED25519, Some(pub_key)) => Ok(VerKey::new(pub_key, Some(IndyKeyAlg::ED25519))), (_, None) => Err(err_msg!(Input, "Undefined public key")), _ => Err(err_msg!(Unsupported, "Unsupported key algorithm")), } } pub fn private_key(&self) -> Result<PrivateKey, Error> { match (&self.params.alg, &self.params.prv_key) { (KeyAlg::ED25519, Some(prv_key)) => { Ok(PrivateKey::new(prv_key, Some(IndyKeyAlg::ED25519))) } (_, None) => Err(err_msg!(Input, "Undefined private key")), _ => Err(err_msg!(Unsupported, "Unsupported key algorithm")), } } pub(crate) fn sorted_tags(&self) -> Option<Vec<&EntryTag>> { self.tags.as_ref().and_then(sorted_tags) } } impl PartialEq for KeyEntry { fn eq(&self, rhs: &Self) -> bool { self.category == rhs.category && self.ident == rhs.ident && self.params == rhs.params && self.sorted_tags() == rhs.sorted_tags() } } #[derive(Clone)] pub struct PassKey<'a>(Option<Cow<'a, str>>); impl PassKey<'_> { pub fn as_ref(&self) -> PassKey<'_> { PassKey(Some(Cow::Borrowed(&**self))) } pub(crate) fn is_none(&self) -> bool { self.0.is_none() } pub(crate) fn into_owned(self) -> PassKey<'static> { let mut slf = ManuallyDrop::new(self); let val = slf.0.take(); PassKey(match val { None => None, Some(Cow::Borrowed(s)) => Some(Cow::Owned(s.to_string())), Some(Cow::Owned(s)) => Some(Cow::Owned(s)), }) } } impl Debug for PassKey<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { if cfg!(test) { f.debug_tuple("PassKey").field(&*self).finish() } else { f.debug_tuple("PassKey").field(&"<secret>").finish() } } } impl Default for PassKey<'_> { fn default() -> Self { Self(None) } } impl Deref for PassKey<'_> { type Target = str; fn deref(&self) -> &str { match self.0.as_ref() { None => "", Some(s) => s.as_ref(), } } } impl Drop for PassKey<'_> { fn drop(&mut self) { self.zeroize(); } } impl<'a> From<&'a str> for PassKey<'a> { fn from(inner: &'a str) -> Self { Self(Some(Cow::Borrowed(inner))) } } impl From<String> for PassKey<'_> { fn from(inner: String) -> Self { Self(Some(Cow::Owned(inner))) } } impl<'a> From<Option<&'a str>> for PassKey<'a> { fn from(inner: Option<&'a str>) -> Self { Self(inner.map(Cow::Borrowed)) } } impl<'a, 'b> PartialEq<PassKey<'b>> for PassKey<'a> { fn eq(&self, other: &PassKey<'b>) -> bool { &**self == &**other } } impl Eq for PassKey<'_> {} impl Zeroize for PassKey<'_> { fn zeroize(&mut self) { match self.0.take() { Some(Cow::Owned(mut s)) => { s.zeroize(); } _ => (), } } } #[cfg(test)] mod tests { use super::*; #[test] fn key_params_roundtrip() { let params = KeyParams { alg: KeyAlg::ED25519, metadata: Some("meta".to_string()), reference: None, pub_key: Some(vec![0, 0, 0, 0]), prv_key: Some(vec![1, 1, 1, 1].into()), }; let enc_params = params.to_vec().unwrap(); let p2 = KeyParams::from_slice(&enc_params).unwrap(); assert_eq!(p2, params); } }
use std::borrow::Cow; use std::convert::Infallible; use std::fmt::{self, Debug, Display, Formatter}; use std::mem::ManuallyDrop; use std::ops::Deref; use std::ptr; use std::str::FromStr; use indy_utils::keys::{EncodedVerKey, KeyType as IndyKeyAlg, PrivateKey, VerKey}; use serde::{Deserialize, Serialize}; use zeroize::Zeroize; use crate::error::Error; use crate::types::{sorted_tags, EntryTag, SecretBytes}; #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] pub enum KeyAlg { ED25519, Other(String), } serde_as_str_impl!(KeyAlg); impl KeyAlg { pub fn as_str(&self) -> &str { match self { Self::ED25519 => "ed25519", Self::Other(other) => other.as_str(), } } } impl AsRef<str> for KeyAlg { fn as_ref(&self) -> &str { self.as_str() } } impl FromStr for KeyAlg { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "ed25519" => Self::ED25519, other => Self::Other(other.to_owned()), }) } } impl Display for KeyAlg { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } #[derive(Clone, Debug, PartialEq, Eq, Zeroize)] pub enum KeyCategory { PublicKey, KeyPair, Other(String), } impl KeyCategory { pub fn as_str(&self) -> &str { match self { Self::PublicKey => "public", Self::KeyPair => "keypair", Self::Other(other) => other.as_str(), } } pub fn into_string(self) -> String { match self { Self::Other(other) => other, _ => self.as_str().to_owned(), } } } serde_as_str_impl!(KeyCategory); impl AsRef<str> for KeyCategory { fn as_ref(&self) -> &str { self.as_str() } } impl FromStr for KeyCategory { type Err = Infallible; fn from_str(s: &str) -> Result<Self, Self::Err> { Ok(match s { "public" => Self::PublicKey, "keypair" => Self::KeyPair, other => Self::Other(other.to_owned()), }) } } impl Display for KeyCategory { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { f.write_str(self.as_str()) } } #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] pub struct KeyParams { pub alg: KeyAlg, #[serde(default, rename = "meta", skip_serializing_if = "Option::is_none")] pub metadata: Option<String>, #[serde(default, rename = "ref", skip_serializing_if = "Option::is_none")] pub reference: Option<String>, #[serde( default, rename = "pub", skip_serializing_if = "Option::is_none", with = "crate::serde_utils::as_base58" )] pub pub_key: Option<Vec<u8>>, #[serde( default, rename = "prv", skip_serializing_if = "Option::is_none", with = "crate::serde_utils::as_base58" )] pub prv_key: Option<SecretBytes>, } impl KeyParams { pub(crate) fn to_vec(&self) -> Result<Vec<u8>, Error> { serde_json::to_vec(self) .map_err(|e| err_msg!(Unexpected, "Error serializing key params: {}", e)) } pub(crate) fn from_slice(params: &[u8]) -> Result<KeyParams, Error> { let result = serde_json::from_slice(params) .map_err(|e| err_msg!(Unexpected, "Error deserializing key params: {}", e)); result } } impl Drop for KeyParams { fn drop(&mut self) { self.zeroize() } } impl Zeroize for KeyParams { fn zeroize(&mut self) { self.prv_key.zeroize(); } } #[derive(Clone, Debug, Eq)] pub struct KeyEntry { pub category: KeyCategory, pub ident: String, pub params: KeyParams, pub tags: Option<Vec<EntryTag>>, } impl KeyEntry { pub(crate) fn into_parts(self) -> (KeyCategory, String, KeyParams, Option<Vec<EntryTag>>) { let slf = ManuallyDrop::new(self); unsafe { ( ptr::read(&slf.category), ptr::read(&slf.ident), ptr::read(&slf.params), ptr::read(&slf.tags), ) } } pub fn is_local(&self) -> bool { self.params.reference.is_none() } pub fn encoded_verkey(&self) -> Result<EncodedVerKey, Error> { Ok(self .verkey()? .as_base58() .map_err(err_map!(Unexpected, "Error encoding verkey"))?) } pub fn verkey(&self) -> Result<VerKey, Error> { match (&self.params.alg, &self.params.pub_key) { (KeyAlg::ED25519, Some(pub_key)) => Ok(VerKey::new(pub_key, Some(IndyKeyAlg::ED25519))), (_, None) => Err(err_msg!(Input, "Undefined public key")), _ => Err(err_msg!(Unsupported, "Unsupported key algorithm")), } } pub fn private_key(&self) -> Result<PrivateKey, Error> { match (&self.params.alg, &self.params.prv_key) { (KeyAlg::ED25519, Some(prv_key)) => { Ok(PrivateKey::new(prv_key, Some(IndyKeyAlg::ED25519))) } (_, None) => Err(err_msg!(Input, "Undefined private key")), _ => Err(err_msg!(Unsupported, "Unsupported key algorithm")), } } pub(crate) fn sorted_tags(&self) -> Option<Vec<&EntryTag>> { self.tags
nt == rhs.ident && self.params == rhs.params && self.sorted_tags() == rhs.sorted_tags() } } #[derive(Clone)] pub struct PassKey<'a>(Option<Cow<'a, str>>); impl PassKey<'_> { pub fn as_ref(&self) -> PassKey<'_> { PassKey(Some(Cow::Borrowed(&**self))) } pub(crate) fn is_none(&self) -> bool { self.0.is_none() } pub(crate) fn into_owned(self) -> PassKey<'static> { let mut slf = ManuallyDrop::new(self); let val = slf.0.take(); PassKey(match val { None => None, Some(Cow::Borrowed(s)) => Some(Cow::Owned(s.to_string())), Some(Cow::Owned(s)) => Some(Cow::Owned(s)), }) } } impl Debug for PassKey<'_> { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { if cfg!(test) { f.debug_tuple("PassKey").field(&*self).finish() } else { f.debug_tuple("PassKey").field(&"<secret>").finish() } } } impl Default for PassKey<'_> { fn default() -> Self { Self(None) } } impl Deref for PassKey<'_> { type Target = str; fn deref(&self) -> &str { match self.0.as_ref() { None => "", Some(s) => s.as_ref(), } } } impl Drop for PassKey<'_> { fn drop(&mut self) { self.zeroize(); } } impl<'a> From<&'a str> for PassKey<'a> { fn from(inner: &'a str) -> Self { Self(Some(Cow::Borrowed(inner))) } } impl From<String> for PassKey<'_> { fn from(inner: String) -> Self { Self(Some(Cow::Owned(inner))) } } impl<'a> From<Option<&'a str>> for PassKey<'a> { fn from(inner: Option<&'a str>) -> Self { Self(inner.map(Cow::Borrowed)) } } impl<'a, 'b> PartialEq<PassKey<'b>> for PassKey<'a> { fn eq(&self, other: &PassKey<'b>) -> bool { &**self == &**other } } impl Eq for PassKey<'_> {} impl Zeroize for PassKey<'_> { fn zeroize(&mut self) { match self.0.take() { Some(Cow::Owned(mut s)) => { s.zeroize(); } _ => (), } } } #[cfg(test)] mod tests { use super::*; #[test] fn key_params_roundtrip() { let params = KeyParams { alg: KeyAlg::ED25519, metadata: Some("meta".to_string()), reference: None, pub_key: Some(vec![0, 0, 0, 0]), prv_key: Some(vec![1, 1, 1, 1].into()), }; let enc_params = params.to_vec().unwrap(); let p2 = KeyParams::from_slice(&enc_params).unwrap(); assert_eq!(p2, params); } }
.as_ref().and_then(sorted_tags) } } impl PartialEq for KeyEntry { fn eq(&self, rhs: &Self) -> bool { self.category == rhs.category && self.ide
random
[ { "content": "/// Derive the (public) verification key for a keypair\n\npub fn derive_verkey(alg: KeyAlg, seed: &[u8]) -> Result<String> {\n\n match alg {\n\n KeyAlg::ED25519 => (),\n\n _ => return Err(err_msg!(Unsupported, \"Unsupported key algorithm\")),\n\n }\n\n\n\n let sk =\n\n PrivateKey::from_seed(seed).map_err(err_map!(Unexpected, \"Error generating keypair\"))?;\n\n let pk = sk\n\n .public_key()\n\n .map_err(err_map!(Unexpected, \"Error generating public key\"))?\n\n .as_base58()\n\n .map_err(err_map!(Unexpected, \"Error encoding public key\"))?\n\n .long_form();\n\n Ok(pk)\n\n}\n\n\n", "file_path": "src/keys/mod.rs", "rank": 0, "score": 347823.25777236396 }, { "content": "#[inline]\n\nfn percent_encode_into(result: &mut String, s: &str) {\n\n push_iter_str(result, utf8_percent_encode(s, NON_ALPHANUMERIC))\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 1, "score": 282449.75771605904 }, { "content": "/// Verify that a message signature is consistent with the signer's key\n\npub fn verify_signature(signer_vk: &str, data: &[u8], signature: &[u8]) -> Result<bool> {\n\n let vk = EncodedVerKey::from_str(&signer_vk).map_err(err_map!(\"Invalid verkey\"))?;\n\n Ok(vk\n\n .decode()\n\n .map_err(err_map!(\"Unsupported verkey\"))?\n\n .verify_signature(&data, &signature)\n\n .unwrap_or(false))\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct KeyCache {\n\n profile_info: Mutex<HashMap<String, (ProfileId, Arc<StoreKey>)>>,\n\n pub(crate) wrap_key: Arc<WrapKey>,\n\n}\n\n\n\nimpl KeyCache {\n\n pub fn new(wrap_key: impl Into<Arc<WrapKey>>) -> Self {\n\n Self {\n\n profile_info: Mutex::new(HashMap::new()),\n\n wrap_key: wrap_key.into(),\n", "file_path": "src/keys/mod.rs", "rank": 2, "score": 279672.2465211808 }, { "content": "fn encode_tag_exist<V, E>(names: &[TagName], enc: &mut E, negate: bool) -> Result<Option<V>>\n\nwhere\n\n E: TagQueryEncoder<Clause = V>,\n\n{\n\n match names.len() {\n\n 0 => Ok(None),\n\n 1 => {\n\n let is_plaintext = match names[0] {\n\n TagName::Plaintext(_) => true,\n\n _ => false,\n\n };\n\n let enc_name = enc.encode_name(&names[0])?;\n\n enc.encode_exist_clause(enc_name, is_plaintext, negate)\n\n }\n\n n => {\n\n let mut cs = Vec::with_capacity(n);\n\n for idx in 0..n {\n\n if let Some(clause) = encode_tag_exist(&names[idx..=idx], enc, negate)? {\n\n cs.push(clause);\n\n }\n\n }\n\n enc.encode_conj_clause(ConjunctionOp::And, cs)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/wql/tags.rs", "rank": 3, "score": 254806.60660710547 }, { "content": "fn encode_tag_query<V, E>(query: &TagQuery, enc: &mut E, negate: bool) -> Result<Option<V>>\n\nwhere\n\n E: TagQueryEncoder<Clause = V>,\n\n{\n\n match query {\n\n TagQuery::Eq(tag_name, target_value) => {\n\n encode_tag_op(CompareOp::Eq, tag_name, target_value, enc, negate)\n\n }\n\n TagQuery::Neq(tag_name, target_value) => {\n\n encode_tag_op(CompareOp::Neq, tag_name, target_value, enc, negate)\n\n }\n\n TagQuery::Gt(tag_name, target_value) => {\n\n encode_tag_op(CompareOp::Gt, tag_name, target_value, enc, negate)\n\n }\n\n TagQuery::Gte(tag_name, target_value) => {\n\n encode_tag_op(CompareOp::Gte, tag_name, target_value, enc, negate)\n\n }\n\n TagQuery::Lt(tag_name, target_value) => {\n\n encode_tag_op(CompareOp::Lt, tag_name, target_value, enc, negate)\n\n }\n", "file_path": "src/wql/tags.rs", "rank": 4, "score": 254806.60660710547 }, { "content": "pub fn decode_wallet_key(enc_key: &[u8], password: &str) -> Result<StoreKey> {\n\n let key =\n\n serde_json::from_slice::<EncStorageKey>(enc_key).map_err(err_map!(\"Invalid wallet key\"))?;\n\n\n\n let keys = decrypt_key(key, password)?;\n\n let data = rmp_serde::from_slice::<[serde_bytes::ByteBuf; 7]>(keys.as_slice()).unwrap();\n\n let wallet_key = StoreKey {\n\n category_key: EncKey::from_slice(&data[0]),\n\n name_key: EncKey::from_slice(&data[1]),\n\n value_key: EncKey::from_slice(&data[2]),\n\n item_hmac_key: HmacKey::from_slice(&data[3]),\n\n tag_name_key: EncKey::from_slice(&data[4]),\n\n tag_value_key: EncKey::from_slice(&data[5]),\n\n tags_hmac_key: HmacKey::from_slice(&data[6]),\n\n };\n\n\n\n Ok(wallet_key)\n\n}\n\n\n", "file_path": "src/indy_compat/mod.rs", "rank": 5, "score": 243057.9995794356 }, { "content": "pub fn parse_raw_key(raw_key: &str) -> Result<WrapKey> {\n\n let key = base58::decode(raw_key)\n\n .map_err(|_| err_msg!(Input, \"Error parsing raw key as base58 value\"))?;\n\n if key.len() != WRAP_KEY_SIZE {\n\n Err(err_msg!(Input, \"Incorrect length for encoded raw key\"))\n\n } else {\n\n Ok(WrapKey::from(WrapKeyData::from_slice(key)))\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct WrapKey(pub Option<WrapKeyData>);\n\n\n\nimpl WrapKey {\n\n pub const fn empty() -> Self {\n\n Self(None)\n\n }\n\n\n\n pub fn random() -> Result<Self> {\n\n Ok(Self(Some(WrapKeyData::random())))\n", "file_path": "src/keys/wrap.rs", "rank": 6, "score": 227250.5527701525 }, { "content": "pub fn encode_store_key(store_key: &StoreKey, wrap_key: &WrapKey) -> Result<Vec<u8>> {\n\n let enc_store_key = store_key.to_string()?;\n\n let result = wrap_key.wrap_data(enc_store_key.into())?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 7, "score": 222726.54548208494 }, { "content": "fn decrypt_key(key: EncStorageKey, password: &str) -> Result<Vec<u8>> {\n\n // check for a raw key in base58 format\n\n if let Ok(raw_key) = base58::decode(password) {\n\n if raw_key.len() == 32 {\n\n let master_key = EncKey::from_slice(&raw_key);\n\n return Ok(decrypt(&master_key, key.keys.as_slice())?);\n\n }\n\n }\n\n\n\n let salt = &key.master_key_salt[..16];\n\n\n\n // derive key with libsodium 'moderate' settings\n\n let master_key = Level::Moderate.derive_key(salt, password)?;\n\n if let Ok(keys) = decrypt(&master_key, key.keys.as_slice()) {\n\n Ok(keys)\n\n } else {\n\n // derive key with libsodium 'interactive' settings\n\n let master_key = Level::Interactive.derive_key(salt, password)?;\n\n Ok(decrypt(&master_key, key.keys.as_slice())?)\n\n }\n\n}\n\n\n", "file_path": "src/indy_compat/mod.rs", "rank": 8, "score": 222122.30540229805 }, { "content": "#[inline]\n\nfn decode_utf8(value: Vec<u8>) -> Result<String> {\n\n String::from_utf8(value).map_err(err_map!(Encryption))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::types::Entry;\n\n\n\n #[test]\n\n fn store_key_round_trip() {\n\n let key = StoreKey::new().unwrap();\n\n let test_record = Entry::new(\n\n \"category\",\n\n \"name\",\n\n \"value\",\n\n Some(vec![\n\n EntryTag::Plaintext(\"plain\".to_string(), \"tag\".to_string()),\n\n EntryTag::Encrypted(\"enctag\".to_string(), \"envtagval\".to_string()),\n\n ]),\n", "file_path": "src/keys/store.rs", "rank": 9, "score": 220718.56625909387 }, { "content": "fn parse_salt(detail: &str) -> Result<Vec<u8>> {\n\n let opts = Options::parse_uri(detail)?;\n\n if let Some(salt) = opts.query.get(\"salt\") {\n\n if let Ok(salt) = base58::decode(salt) {\n\n if salt.len() >= SALT_SIZE {\n\n Ok(salt)\n\n } else {\n\n Err(err_msg!(Input, \"Invalid salt length\"))\n\n }\n\n } else {\n\n Err(err_msg!(Input, \"Invalid salt\"))\n\n }\n\n } else {\n\n Err(err_msg!(Input, \"Missing salt\"))\n\n }\n\n}\n", "file_path": "src/keys/kdf/mod.rs", "rank": 10, "score": 215821.73026592485 }, { "content": "pub trait SymEncryptKey: Clone + Debug + Eq + Sized + Serialize + for<'a> Deserialize<'a> {\n\n const SIZE: usize;\n\n\n\n fn as_bytes(&self) -> &[u8];\n\n\n\n fn from_slice(bytes: &[u8]) -> Self;\n\n\n\n fn from_seed(seed: &[u8]) -> Result<Self>;\n\n\n\n fn random_key() -> Self;\n\n}\n\n\n", "file_path": "src/keys/encrypt.rs", "rank": 11, "score": 212920.6801982259 }, { "content": "/// Create a new raw wrap key for a store\n\npub fn generate_raw_wrap_key(seed: Option<&[u8]>) -> Result<PassKey<'static>> {\n\n let key = if let Some(seed) = seed {\n\n WrapKey::from(WrapKeyData::from_seed(seed)?)\n\n } else {\n\n WrapKey::from(WrapKeyData::random_key())\n\n };\n\n Ok(key.to_opt_string().unwrap().into())\n\n}\n\n\n", "file_path": "src/keys/wrap.rs", "rank": 12, "score": 206029.41724794576 }, { "content": "pub fn get_current_error_json() -> String {\n\n if let Some(err) = Option::take(&mut *LAST_ERROR.write().unwrap()) {\n\n let message = err.to_string();\n\n let code = ErrorCode::from(err.kind()) as usize;\n\n // let extra = err.extra();\n\n json!({\"code\": code, \"message\": message}).to_string()\n\n } else {\n\n r#\"{\"code\":0,\"message\":null}\"#.to_owned()\n\n }\n\n}\n\n\n", "file_path": "src/ffi/error.rs", "rank": 13, "score": 204817.70674892567 }, { "content": "fn encode_tag_in<V, E>(\n\n name: &TagName,\n\n values: &Vec<String>,\n\n enc: &mut E,\n\n negate: bool,\n\n) -> Result<Option<V>>\n\nwhere\n\n E: TagQueryEncoder<Clause = V>,\n\n{\n\n let is_plaintext = match &name {\n\n TagName::Plaintext(_) => true,\n\n _ => false,\n\n };\n\n let enc_name = enc.encode_name(name)?;\n\n let enc_values = values\n\n .into_iter()\n\n .map(|val| enc.encode_value(val, is_plaintext))\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n enc.encode_in_clause(enc_name, enc_values, is_plaintext, negate)\n\n}\n\n\n", "file_path": "src/wql/tags.rs", "rank": 14, "score": 190753.06741932168 }, { "content": "pub fn validate_tag_query(_query: &TagQuery) -> Result<()> {\n\n // FIXME only equality comparison supported for encrypted keys\n\n Ok(())\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]\n\npub enum TagName {\n\n Encrypted(String),\n\n Plaintext(String),\n\n}\n\n\n\nimpl ToString for TagName {\n\n fn to_string(&self) -> String {\n\n match self {\n\n Self::Encrypted(v) => v.to_string(),\n\n Self::Plaintext(v) => format!(\"~{}\", v),\n\n }\n\n }\n\n}\n\n\n\nimpl Into<String> for &TagName {\n\n fn into(self) -> String {\n\n self.to_string()\n\n }\n\n}\n\n\n", "file_path": "src/wql/tags.rs", "rank": 15, "score": 190254.07566772745 }, { "content": "#[inline]\n\nfn decode_utf8(value: Vec<u8>) -> Result<String> {\n\n String::from_utf8(value).map_err(err_map!(Encryption))\n\n}\n", "file_path": "src/indy_compat/mod.rs", "rank": 16, "score": 187354.9630577605 }, { "content": "struct MaybeStr<'a>(&'a [u8]);\n\n\n\nimpl Debug for MaybeStr<'_> {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n if let Ok(sval) = std::str::from_utf8(self.0) {\n\n write!(f, \"{:?}\", sval)\n\n } else {\n\n write!(f, \"_\\\"{}\\\"\", hex::encode(self.0))\n\n }\n\n }\n\n}\n\n\n\n/// A protected byte buffer\n\n#[derive(Clone, Hash, PartialEq, Eq, PartialOrd, Ord, Zeroize)]\n\npub struct SecretBytes(Vec<u8>);\n\n\n\nimpl SecretBytes {\n\n pub(crate) fn as_buffer(&mut self) -> SecretBytesMut<'_> {\n\n SecretBytesMut(&mut self.0)\n\n }\n", "file_path": "src/types.rs", "rank": 17, "score": 186929.92634516503 }, { "content": "pub fn generate_salt() -> Vec<u8> {\n\n random_vec(SALT_SIZE)\n\n}\n", "file_path": "src/keys/kdf/argon2.rs", "rank": 18, "score": 186483.75410085527 }, { "content": "fn encode_tag_op<V, E>(\n\n op: CompareOp,\n\n name: &TagName,\n\n value: &String,\n\n enc: &mut E,\n\n negate: bool,\n\n) -> Result<Option<V>>\n\nwhere\n\n E: TagQueryEncoder<Clause = V>,\n\n{\n\n let is_plaintext = match &name {\n\n TagName::Plaintext(_) => true,\n\n _ => false,\n\n };\n\n let enc_name = enc.encode_name(name)?;\n\n let enc_value = enc.encode_value(value, is_plaintext)?;\n\n let op = if negate { op.negate() } else { op };\n\n\n\n enc.encode_op_clause(op, enc_name, enc_value, is_plaintext)\n\n}\n\n\n", "file_path": "src/wql/tags.rs", "rank": 19, "score": 186113.6770582273 }, { "content": "fn encode_tag_conj<V, E>(\n\n op: ConjunctionOp,\n\n subqueries: &Vec<TagQuery>,\n\n enc: &mut E,\n\n negate: bool,\n\n) -> Result<Option<V>>\n\nwhere\n\n E: TagQueryEncoder<Clause = V>,\n\n{\n\n let op = if negate { op.negate() } else { op };\n\n let clauses = subqueries\n\n .into_iter()\n\n .flat_map(|q| encode_tag_query(q, enc, negate).transpose())\n\n .collect::<Result<Vec<_>>>()?;\n\n\n\n enc.encode_conj_clause(op, clauses)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/wql/tags.rs", "rank": 20, "score": 186113.6770582273 }, { "content": "pub fn tag_query(query: Query) -> Result<TagQuery> {\n\n let result = query\n\n .map_names(&mut |k| {\n\n if k.starts_with(\"~\") {\n\n Ok(TagName::Plaintext(k[1..].to_string()))\n\n } else {\n\n Ok(TagName::Encrypted(k))\n\n }\n\n })\n\n .unwrap();\n\n validate_tag_query(&result)?;\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/wql/tags.rs", "rank": 21, "score": 185653.12510849582 }, { "content": "#[inline]\n\nfn push_iter_str<'a, I: Iterator<Item = &'a str>>(s: &mut String, iter: I) {\n\n for item in iter {\n\n s.push_str(item);\n\n }\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 22, "score": 175681.4992146335 }, { "content": "pub fn set_last_error(error: Option<Error>) -> ErrorCode {\n\n trace!(\"askar_set_last_error\");\n\n let code = match error.as_ref() {\n\n Some(err) => err.kind.into(),\n\n None => ErrorCode::Success,\n\n };\n\n *LAST_ERROR.write().unwrap() = error;\n\n code\n\n}\n", "file_path": "src/ffi/error.rs", "rank": 23, "score": 169699.04645732566 }, { "content": "// convert a slice of tags into a Vec, when ensuring there is\n\n// adequate space in the allocations to reuse them during encryption\n\npub fn prepare_tags(tags: &[EntryTag]) -> Vec<EntryTag> {\n\n let mut result = Vec::with_capacity(tags.len());\n\n for tag in tags {\n\n result.push(match tag {\n\n EntryTag::Plaintext(name, value) => EntryTag::Plaintext(\n\n unsafe {\n\n String::from_utf8_unchecked(StoreKey::prepare_input(name.as_bytes()).into_vec())\n\n },\n\n value.clone(),\n\n ),\n\n EntryTag::Encrypted(name, value) => EntryTag::Encrypted(\n\n unsafe {\n\n String::from_utf8_unchecked(StoreKey::prepare_input(name.as_bytes()).into_vec())\n\n },\n\n unsafe {\n\n String::from_utf8_unchecked(\n\n StoreKey::prepare_input(value.as_bytes()).into_vec(),\n\n )\n\n },\n\n ),\n\n });\n\n }\n\n result\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 24, "score": 167033.46032710734 }, { "content": "fn collect_tags(key: &StoreKey, tags: Vec<DbRow>) -> Result<BTreeMap<i64, Vec<EntryTag>>> {\n\n let mut result = BTreeMap::new();\n\n for row in tags {\n\n let entry = result.entry(row.try_get(1)?).or_insert_with(Vec::new);\n\n let name = decode_utf8(key.decrypt_tag_name(get_slice(&row, 2)?)?)?;\n\n if row.try_get(0)? {\n\n // encrypted value\n\n let value = decode_utf8(key.decrypt_tag_value(get_slice(&row, 3)?)?)?;\n\n entry.push(EntryTag::Encrypted(name, value))\n\n } else {\n\n let value = decode_utf8(get_slice(&row, 3)?.to_vec())?;\n\n entry.push(EntryTag::Plaintext(name, value));\n\n };\n\n }\n\n Ok(result)\n\n}\n\n\n", "file_path": "src/indy_compat/mod.rs", "rank": 25, "score": 166301.95639445935 }, { "content": "#[inline]\n\npub fn random_profile_name() -> String {\n\n uuid::Uuid::new_v4().to_string()\n\n}\n", "file_path": "src/db_utils.rs", "rank": 26, "score": 166102.85737201618 }, { "content": "pub fn encode_tag_filter<Q: QueryPrepare>(\n\n tag_filter: Option<TagFilter>,\n\n key: &StoreKey,\n\n offset: usize,\n\n) -> Result<Option<(String, Vec<Vec<u8>>)>> {\n\n if let Some(tag_filter) = tag_filter {\n\n let tag_query = tag_query(tag_filter.query)?;\n\n let mut enc = TagSqlEncoder::new(\n\n |name| Ok(key.encrypt_tag_name(StoreKey::prepare_input(name.as_bytes()))?),\n\n |value| Ok(key.encrypt_tag_value(StoreKey::prepare_input(value.as_bytes()))?),\n\n );\n\n if let Some(filter) = enc.encode_query(&tag_query)? {\n\n let filter = replace_arg_placeholders::<Q>(&filter, (offset as i64) + 1);\n\n Ok(Some((filter, enc.arguments)))\n\n } else {\n\n Ok(None)\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 27, "score": 161444.31094758652 }, { "content": "pub fn init_keys<'a>(\n\n method: WrapKeyMethod,\n\n pass_key: PassKey<'a>,\n\n) -> Result<(StoreKey, Vec<u8>, WrapKey, String)> {\n\n let (wrap_key, wrap_key_ref) = method.resolve(pass_key)?;\n\n let store_key = StoreKey::new()?;\n\n let enc_store_key = encode_store_key(&store_key, &wrap_key)?;\n\n Ok((store_key, enc_store_key, wrap_key, wrap_key_ref.into_uri()))\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 28, "score": 149088.8627985642 }, { "content": "pub trait SymEncrypt: Debug {\n\n type Key: SymEncryptKey;\n\n type HashKey: SymEncryptHashKey;\n\n type Nonce;\n\n\n\n /// Convert a referenced secret value to a secure buffer with sufficient\n\n /// memory for in-place encryption, reusing the same buffer if possible\n\n fn prepare_input(input: &[u8]) -> SecretBytes;\n\n\n\n /// Create a predictable nonce for an input, to allow searching\n\n fn hashed_nonce(input: &SecretBytes, key: &Self::HashKey) -> Result<Self::Nonce>;\n\n\n\n /// Encrypt a secret value and optional random nonce, producing a Vec containing the\n\n /// nonce, ciphertext and tag\n\n fn encrypt(\n\n input: SecretBytes,\n\n enc_key: &Self::Key,\n\n nonce: Option<Self::Nonce>,\n\n ) -> Result<Vec<u8>>;\n\n\n", "file_path": "src/keys/encrypt.rs", "rank": 29, "score": 146497.55796241577 }, { "content": "/// Encrypt a value with a predictable nonce, making it searchable\n\nfn encrypt_searchable<E: SymEncrypt>(\n\n input: SecretBytes,\n\n enc_key: &E::Key,\n\n hmac_key: &E::HashKey,\n\n) -> Result<Vec<u8>> {\n\n let nonce = E::hashed_nonce(&input, hmac_key)?;\n\n E::encrypt(input, enc_key, Some(nonce))\n\n}\n\n\n\nimpl<E> EntryEncryptor for StoreKeyImpl<E>\n\nwhere\n\n E: SymEncrypt,\n\n{\n\n fn prepare_input(input: &[u8]) -> SecretBytes {\n\n E::prepare_input(input)\n\n }\n\n\n\n fn encrypt_entry_category(&self, category: SecretBytes) -> Result<Vec<u8>> {\n\n encrypt_searchable::<E>(category, &self.category_key, &self.item_hmac_key)\n\n }\n", "file_path": "src/keys/store.rs", "rank": 30, "score": 142523.56356104108 }, { "content": "#[inline]\n\nfn percent_decode(s: &str) -> Cow<'_, str> {\n\n percent_decode_str(s).decode_utf8_lossy()\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 31, "score": 139840.78047968663 }, { "content": "pub fn expiry_timestamp(expire_ms: i64) -> Result<Expiry> {\n\n chrono::Utc::now()\n\n .checked_add_signed(chrono::Duration::milliseconds(expire_ms))\n\n .ok_or_else(|| err_msg!(Unexpected, \"Invalid expiry timestamp\"))\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 32, "score": 139690.27624267992 }, { "content": "enum EntryTagValues {\n\n Single(String),\n\n Multiple(Vec<String>),\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for EntryTagValues {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct TagValuesVisitor;\n\n\n\n impl<'d> Visitor<'d> for TagValuesVisitor {\n\n type Value = EntryTagValues;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"a string or list of strings\")\n\n }\n\n\n\n fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>\n", "file_path": "src/types.rs", "rank": 33, "score": 138069.45771234238 }, { "content": "pub trait TagQueryEncoder {\n\n type Arg;\n\n type Clause;\n\n\n\n fn encode_query(&mut self, query: &TagQuery) -> Result<Option<Self::Clause>>\n\n where\n\n Self: Sized,\n\n {\n\n encode_tag_query(query, self, false)\n\n }\n\n\n\n fn encode_name(&mut self, name: &TagName) -> Result<Self::Arg>;\n\n\n\n fn encode_value(&mut self, value: &String, is_plaintext: bool) -> Result<Self::Arg>;\n\n\n\n fn encode_op_clause(\n\n &mut self,\n\n op: CompareOp,\n\n enc_name: Self::Arg,\n\n enc_value: Self::Arg,\n", "file_path": "src/wql/tags.rs", "rank": 34, "score": 136560.44158191344 }, { "content": " pub trait Serialize {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer;\n\n }\n\n\n\n impl Serialize for Vec<u8> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(&base58::encode(self))\n\n }\n\n }\n\n\n\n impl Serialize for SecretBytes {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n", "file_path": "src/serde_utils.rs", "rank": 35, "score": 134036.73543149026 }, { "content": "fn decode_row(key: &StoreKey, row: DbRow) -> Result<(i64, Entry)> {\n\n let value_key_enc = get_slice(&row, 4)?;\n\n let value_key = EncKey::from_slice(decrypt(&key.value_key, value_key_enc)?);\n\n let value = decrypt(&value_key, get_slice(&row, 3)?)?;\n\n\n\n let entry = Entry::new(\n\n decode_utf8(key.decrypt_category(get_slice(&row, 1)?)?)?,\n\n decode_utf8(key.decrypt_name(get_slice(&row, 2)?)?)?,\n\n value,\n\n None,\n\n );\n\n Ok((row.try_get(0)?, entry))\n\n}\n\n\n", "file_path": "src/indy_compat/mod.rs", "rank": 36, "score": 133491.9192464953 }, { "content": "#[inline]\n\npub fn spawn_ok(fut: impl Future<Output = ()> + Send + 'static) {\n\n async_global_executor::spawn(fut).detach();\n\n}\n", "file_path": "src/future.rs", "rank": 37, "score": 132786.72643482653 }, { "content": "#[inline]\n\nfn get_slice<'a>(row: &'a DbRow, index: usize) -> Result<&'a [u8]> {\n\n row.try_get(index)\n\n .map_err(err_map!(Unexpected, \"Error fetching column\"))\n\n}\n\n\n", "file_path": "src/indy_compat/mod.rs", "rank": 38, "score": 122002.11329462338 }, { "content": "class KeyAlg(Enum):\n", "file_path": "wrappers/python/aries_askar/types.py", "rank": 39, "score": 121316.92149827648 }, { "content": " pub trait Deserialize<'de>: Sized {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>;\n\n }\n\n\n\n impl<'de> Deserialize<'de> for Vec<u8> {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n struct FromBase58Visitor;\n\n\n\n impl<'de> Visitor<'de> for FromBase58Visitor {\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"a valid base58 string\")\n\n }\n\n\n", "file_path": "src/serde_utils.rs", "rank": 40, "score": 121063.80154203612 }, { "content": "pub fn decrypt_scan_entry(\n\n category: String,\n\n enc_entry: EncScanEntry,\n\n key: &StoreKey,\n\n) -> Result<Entry> {\n\n let name = key.decrypt_entry_name(enc_entry.name)?;\n\n let value = key.decrypt_entry_value(enc_entry.value)?;\n\n let tags = if let Some(enc_tags) = enc_entry.tags {\n\n Some(key.decrypt_entry_tags(\n\n decode_tags(enc_tags).map_err(|_| err_msg!(Unexpected, \"Error decoding tags\"))?,\n\n )?)\n\n } else {\n\n None\n\n };\n\n Ok(Entry::new(category.to_string(), name, value, tags))\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 41, "score": 117921.74944788293 }, { "content": "pub fn decrypt_scan_batch(\n\n category: String,\n\n enc_rows: Vec<EncScanEntry>,\n\n key: &StoreKey,\n\n) -> Result<Vec<Entry>> {\n\n let mut batch = Vec::with_capacity(enc_rows.len());\n\n for enc_entry in enc_rows {\n\n batch.push(decrypt_scan_entry(category.clone(), enc_entry, key)?);\n\n }\n\n Ok(batch)\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 42, "score": 117921.74944788293 }, { "content": "fn export_key_entry(key_entry: KeyEntry) -> KvResult<Entry> {\n\n let (category, name, params, tags) = key_entry.into_parts();\n\n let value = serde_json::to_string(&params)\n\n .map_err(err_map!(\"Error converting key entry to JSON\"))?\n\n .into_bytes();\n\n Ok(Entry::new(category.to_string(), name, value, tags))\n\n}\n", "file_path": "src/ffi/store.rs", "rank": 43, "score": 117067.7264258222 }, { "content": "fn derive_key(\n\n password: &str,\n\n salt: &[u8],\n\n mem_cost: u32,\n\n time_cost: u32,\n\n) -> Result<EncKey<ChaChaEncrypt>> {\n\n if salt.len() < SALT_SIZE {\n\n return Err(err_msg!(Encryption, \"Invalid salt for argon2i hash\"));\n\n }\n\n let config = argon2::Config {\n\n variant: argon2::Variant::Argon2i,\n\n version: argon2::Version::Version13,\n\n mem_cost,\n\n time_cost,\n\n lanes: 1,\n\n thread_mode: argon2::ThreadMode::Sequential,\n\n secret: &[],\n\n ad: &[],\n\n hash_length: HASH_SIZE as u32,\n\n };\n\n let mut hashed = argon2::hash_raw(password.as_bytes(), &salt[..SALT_SIZE], &config)\n\n .map_err(|e| err_msg!(Encryption, \"Error deriving key: {}\", e))?;\n\n let key = EncKey::<ChaChaEncrypt>::from_slice(&hashed);\n\n hashed.zeroize();\n\n Ok(key)\n\n}\n\n\n", "file_path": "src/keys/kdf/argon2.rs", "rank": 44, "score": 107940.1536818482 }, { "content": "pub trait SymEncryptHashKey:\n\n Clone + Debug + Eq + Sized + Serialize + for<'a> Deserialize<'a>\n\n{\n\n const SIZE: usize;\n\n\n\n fn random_hash_key() -> Self;\n\n}\n\n\n", "file_path": "src/keys/encrypt.rs", "rank": 45, "score": 100237.31106742131 }, { "content": "pub fn extend_query<'q, Q: QueryPrepare>(\n\n query: &str,\n\n args: &mut QueryParams<'q, Q::DB>,\n\n tag_filter: Option<(String, Vec<Vec<u8>>)>,\n\n offset: Option<i64>,\n\n limit: Option<i64>,\n\n) -> Result<String>\n\nwhere\n\n i64: for<'e> Encode<'e, Q::DB> + Type<Q::DB>,\n\n Vec<u8>: for<'e> Encode<'e, Q::DB> + Type<Q::DB>,\n\n{\n\n let mut query = query.to_string();\n\n if let Some((filter_clause, filter_args)) = tag_filter {\n\n args.extend(filter_args);\n\n query.push_str(\" AND \"); // assumes WHERE already occurs\n\n query.push_str(&filter_clause);\n\n };\n\n if offset.is_some() || limit.is_some() {\n\n query = Q::limit_query(query, args, offset, limit);\n\n };\n\n Ok(query)\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 46, "score": 99875.66538367778 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct EncStorageKey {\n\n keys: Vec<u8>,\n\n master_key_salt: Vec<u8>,\n\n}\n\n\n", "file_path": "src/indy_compat/mod.rs", "rank": 47, "score": 96449.36191605392 }, { "content": "pub fn replace_arg_placeholders<Q: QueryPrepare + ?Sized>(\n\n filter: &str,\n\n start_index: i64,\n\n) -> String {\n\n let mut index = start_index;\n\n let mut buffer: String = String::with_capacity(filter.len());\n\n let mut remain = filter;\n\n while let Some(start_offs) = remain.find('$') {\n\n let mut iter = remain[(start_offs + 1)..].chars();\n\n if let Some((end_offs, sub_index)) = iter.next().and_then(|c| match c {\n\n '$' => Some((start_offs + 2, index)),\n\n '0'..='9' => {\n\n let mut end_offs = start_offs + 2;\n\n while let Some(c) = iter.next() {\n\n if ('0'..='9').contains(&c) {\n\n end_offs += 1;\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/db_utils.rs", "rank": 48, "score": 95876.36411655076 }, { "content": "#[derive(Debug, Serialize)]\n\nstruct PrintEntry {\n\n category: String,\n\n name: String,\n\n value: String,\n\n tags: HashMap<String, String>,\n\n}\n\n\n\nimpl PrintEntry {\n\n pub fn new(entry: Entry) -> Self {\n\n let value = String::from_utf8(entry.value.to_vec()).expect(\"Error parsing value as utf-8\");\n\n let mut tags = HashMap::new();\n\n if let Some(entry_tags) = entry.tags {\n\n for tag in entry_tags {\n\n match tag {\n\n EntryTag::Encrypted(name, value) => {\n\n tags.insert(name, value);\n\n }\n\n EntryTag::Plaintext(name, value) => {\n\n tags.insert(format!(\"~{}\", name), value);\n\n }\n", "file_path": "src/indy_compat/mod.rs", "rank": 67, "score": 64971.69927370988 }, { "content": "#[test]\n\nfn faber_print_records() {\n\n let db = \"tests/faber.agent372766/sqlite.db\";\n\n let key = \"Faber.Agent372766\";\n\n block_on(print_records(db, key)).unwrap();\n\n}\n", "file_path": "tests/faber.rs", "rank": 68, "score": 64885.335065614636 }, { "content": "fn perform_scan<'q>(\n\n mut active: DbSessionRef<'q, Postgres>,\n\n profile_id: ProfileId,\n\n key: Arc<StoreKey>,\n\n kind: EntryKind,\n\n category: String,\n\n tag_filter: Option<TagFilter>,\n\n offset: Option<i64>,\n\n limit: Option<i64>,\n\n for_update: bool,\n\n) -> impl Stream<Item = Result<Vec<EncScanEntry>>> + 'q {\n\n try_stream! {\n\n let mut params = QueryParams::new();\n\n params.push(profile_id);\n\n params.push(kind as i16);\n\n let (enc_category, tag_filter) = unblock({\n\n let key = key.clone();\n\n let category = StoreKey::prepare_input(category.as_bytes());\n\n let params_len = params.len() + 1; // plus category\n\n move || {\n", "file_path": "src/postgres/mod.rs", "rank": 69, "score": 61462.35920003869 }, { "content": "fn perform_scan<'q>(\n\n mut active: DbSessionRef<'q, Sqlite>,\n\n profile_id: ProfileId,\n\n key: Arc<StoreKey>,\n\n kind: EntryKind,\n\n category: String,\n\n tag_filter: Option<TagFilter>,\n\n offset: Option<i64>,\n\n limit: Option<i64>,\n\n) -> impl Stream<Item = Result<Vec<EncScanEntry>>> + 'q {\n\n try_stream! {\n\n let mut params = QueryParams::new();\n\n params.push(profile_id);\n\n params.push(kind as i16);\n\n let (enc_category, tag_filter) = unblock({\n\n let key = key.clone();\n\n let category = StoreKey::prepare_input(category.as_bytes());\n\n let params_len = params.len() + 1; // plus category\n\n move || {\n\n Result::Ok((\n", "file_path": "src/sqlite/mod.rs", "rank": 70, "score": 61462.35920003869 }, { "content": "pub trait IntoOptions<'a> {\n\n fn into_options(self) -> Result<Options<'a>>;\n\n}\n\n\n\nimpl<'a> IntoOptions<'a> for Options<'a> {\n\n fn into_options(self) -> Result<Options<'a>> {\n\n Ok(self)\n\n }\n\n}\n\n\n\nimpl<'a> IntoOptions<'a> for &'a str {\n\n fn into_options(self) -> Result<Options<'a>> {\n\n Options::parse_uri(self)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::iter::FromIterator;\n", "file_path": "src/options.rs", "rank": 71, "score": 60564.225250615214 }, { "content": "pub trait QueryPrepare {\n\n type DB: Database;\n\n\n\n fn placeholder(index: i64) -> String {\n\n format!(\"?{}\", index)\n\n }\n\n\n\n fn limit_query<'q>(\n\n mut query: String,\n\n args: &mut QueryParams<'q, Self::DB>,\n\n offset: Option<i64>,\n\n limit: Option<i64>,\n\n ) -> String\n\n where\n\n i64: for<'e> Encode<'e, Self::DB> + Type<Self::DB>,\n\n {\n\n if offset.is_some() || limit.is_some() {\n\n let last_idx = (args.len() + 1) as i64;\n\n args.push(offset.unwrap_or(0));\n\n args.push(limit.unwrap_or(-1));\n\n let limit = replace_arg_placeholders::<Self>(\" LIMIT $$, $$\", last_idx);\n\n query.push_str(&limit);\n\n }\n\n query\n\n }\n\n}\n\n\n", "file_path": "src/db_utils.rs", "rank": 72, "score": 60404.91518410842 }, { "content": "/// Create, open, or remove a generic backend implementation\n\npub trait ManageBackend<'a> {\n\n /// The type of store being managed\n\n type Store;\n\n\n\n /// Open an existing store\n\n fn open_backend(\n\n self,\n\n method: Option<WrapKeyMethod>,\n\n pass_key: PassKey<'a>,\n\n profile: Option<&'a str>,\n\n ) -> BoxFuture<'a, Result<Self::Store>>;\n\n\n\n /// Provision a new store\n\n fn provision_backend(\n\n self,\n\n method: WrapKeyMethod,\n\n pass_key: PassKey<'a>,\n\n profile: Option<&'a str>,\n\n recreate: bool,\n\n ) -> BoxFuture<'a, Result<Self::Store>>;\n\n\n\n /// Remove an existing store\n\n fn remove_backend(self) -> BoxFuture<'a, Result<bool>>;\n\n}\n\n\n", "file_path": "src/store.rs", "rank": 73, "score": 59062.626289132546 }, { "content": "def encode_str(arg: Optional[Union[str, bytes]]) -> c_char_p:\n\n \"\"\"\n\n Encode an optional input argument as a string.\n\n\n\n Returns: None if the argument is None, otherwise the value encoded utf-8.\n\n \"\"\"\n\n if arg is None:\n\n return c_char_p()\n\n if isinstance(arg, str):\n\n return c_char_p(arg.encode(\"utf-8\"))\n", "file_path": "wrappers/python/aries_askar/bindings.py", "rank": 74, "score": 58164.83386689839 }, { "content": "/// Query from a generic backend implementation\n\npub trait QueryBackend: Send {\n\n /// Count the number of matching records in the store\n\n fn count<'q>(\n\n &'q mut self,\n\n kind: EntryKind,\n\n category: &'q str,\n\n tag_filter: Option<TagFilter>,\n\n ) -> BoxFuture<'q, Result<i64>>;\n\n\n\n /// Fetch a single record from the store by category and name\n\n fn fetch<'q>(\n\n &'q mut self,\n\n kind: EntryKind,\n\n category: &'q str,\n\n name: &'q str,\n\n for_update: bool,\n\n ) -> BoxFuture<'q, Result<Option<Entry>>>;\n\n\n\n /// Fetch all matching records from the store\n\n fn fetch_all<'q>(\n", "file_path": "src/store.rs", "rank": 75, "score": 57676.91998930318 }, { "content": "class KeyEntry:\n\n def __init__(\n\n self,\n\n category: str,\n\n ident: str,\n\n params: dict,\n\n tags: Mapping[str, str] = None,\n\n ) -> \"Entry\":\n\n self.category = category\n\n self.ident = ident\n\n self.params = params\n\n self.tags = dict(tags) if tags else {}\n\n\n\n def __repr__(self) -> str:\n\n return (\n\n f\"{self.__class__.__name__}(category={repr(self.category)}, \"\n\n f\"ident={repr(self.ident)}, params=.., tags={self.tags})\"\n", "file_path": "wrappers/python/aries_askar/types.py", "rank": 76, "score": 57282.052929316 }, { "content": "pub trait ExtDatabase: Database {\n\n fn start_transaction(\n\n conn: &mut PoolConnection<Self>,\n\n _nested: bool,\n\n ) -> BoxFuture<'_, std::result::Result<(), SqlxError>> {\n\n <Self as Database>::TransactionManager::begin(conn)\n\n }\n\n}\n\n\n\npub enum DbSessionRef<'q, DB: ExtDatabase> {\n\n Owned(DbSession<DB>),\n\n Borrowed(&'q mut DbSession<DB>),\n\n}\n\n\n\nimpl<'q, DB: ExtDatabase> Deref for DbSessionRef<'q, DB> {\n\n type Target = DbSession<DB>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n match self {\n\n Self::Owned(e) => e,\n", "file_path": "src/db_utils.rs", "rank": 77, "score": 56394.18805780343 }, { "content": "/// Represents a generic backend implementation\n\npub trait Backend: Send + Sync {\n\n /// The type of session managed by this backend\n\n type Session: QueryBackend;\n\n\n\n /// Create a new profile\n\n fn create_profile(&self, name: Option<String>) -> BoxFuture<'_, Result<String>>;\n\n\n\n /// Get the name of the active profile\n\n fn get_profile_name(&self) -> &str;\n\n\n\n /// Remove an existing profile\n\n fn remove_profile(&self, name: String) -> BoxFuture<'_, Result<bool>>;\n\n\n\n /// Create a [`Scan`] against the store\n\n fn scan(\n\n &self,\n\n profile: Option<String>,\n\n kind: EntryKind,\n\n category: String,\n\n tag_filter: Option<TagFilter>,\n", "file_path": "src/store.rs", "rank": 78, "score": 55469.16032548412 }, { "content": " /// Accessor for the error kind\n\n pub fn kind(&self) -> ErrorKind {\n\n self.kind\n\n }\n\n\n\n pub(crate) fn with_cause<T: Into<Box<dyn StdError + Send + Sync>>>(mut self, err: T) -> Self {\n\n self.cause = Some(err.into());\n\n self\n\n }\n\n}\n\n\n\nimpl Display for Error {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n if let Some(msg) = self.message.as_ref() {\n\n f.write_str(msg)?;\n\n } else {\n\n f.write_str(self.kind.as_str())?;\n\n }\n\n if let Some(cause) = self.cause.as_ref() {\n\n write!(f, \"\\nCaused by: {}\", cause)?;\n", "file_path": "src/error.rs", "rank": 79, "score": 40993.71202407114 }, { "content": " Self::Encryption => \"Encryption error\",\n\n Self::Input => \"Input error\",\n\n Self::NotFound => \"Not found\",\n\n Self::Unexpected => \"Unexpected error\",\n\n Self::Unsupported => \"Unsupported\",\n\n }\n\n }\n\n}\n\n\n\nimpl Display for ErrorKind {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n f.write_str(self.as_str())\n\n }\n\n}\n\n\n\n/// The standard crate error type\n\n#[derive(Debug)]\n\npub struct Error {\n\n pub(crate) kind: ErrorKind,\n\n pub(crate) cause: Option<Box<dyn StdError + Send + Sync + 'static>>,\n", "file_path": "src/error.rs", "rank": 80, "score": 40993.51599082521 }, { "content": " /// The input parameters to the method were incorrect\n\n Input,\n\n\n\n /// The requested record was not found\n\n NotFound,\n\n\n\n /// An unexpected error occurred\n\n Unexpected,\n\n\n\n /// An unsupported operation was requested\n\n Unsupported,\n\n}\n\n\n\nimpl ErrorKind {\n\n /// Convert the error kind to a string reference\n\n pub fn as_str(&self) -> &'static str {\n\n match self {\n\n Self::Backend => \"Backend error\",\n\n Self::Busy => \"Busy\",\n\n Self::Duplicate => \"Duplicate\",\n", "file_path": "src/error.rs", "rank": 81, "score": 40988.29247511384 }, { "content": " }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn source(&self) -> Option<&(dyn StdError + 'static)> {\n\n self.cause\n\n .as_ref()\n\n .map(|err| unsafe { std::mem::transmute(&**err) })\n\n }\n\n}\n\n\n\nimpl PartialEq for Error {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.kind == other.kind && self.message == other.message\n\n }\n\n}\n\n\n\nimpl From<ErrorKind> for Error {\n", "file_path": "src/error.rs", "rank": 82, "score": 40986.9851548271 }, { "content": "use std::error::Error as StdError;\n\nuse std::fmt::{self, Display, Formatter};\n\n\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n/// The possible kinds of error produced by the crate\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum ErrorKind {\n\n /// An unexpected error from the store backend\n\n Backend,\n\n\n\n /// The store backend was too busy to handle the request\n\n Busy,\n\n\n\n /// An insert operation failed due to a unique key conflict\n\n Duplicate,\n\n\n\n /// An encryption or decryption operation failed\n\n Encryption,\n\n\n", "file_path": "src/error.rs", "rank": 83, "score": 40986.14265693421 }, { "content": " pub(crate) message: Option<String>,\n\n}\n\n\n\nimpl Error {\n\n pub(crate) fn from_msg<T: Into<String>>(kind: ErrorKind, msg: T) -> Self {\n\n Self {\n\n kind,\n\n cause: None,\n\n message: Some(msg.into()),\n\n }\n\n }\n\n\n\n pub(crate) fn from_opt_msg<T: Into<String>>(kind: ErrorKind, msg: Option<T>) -> Self {\n\n Self {\n\n kind,\n\n cause: None,\n\n message: msg.map(Into::into),\n\n }\n\n }\n\n\n", "file_path": "src/error.rs", "rank": 84, "score": 40980.8917560252 }, { "content": " fn from(kind: ErrorKind) -> Self {\n\n Self {\n\n kind,\n\n cause: None,\n\n message: None,\n\n }\n\n }\n\n}\n\n\n\n// FIXME would be preferable to remove this auto-conversion and handle\n\n// all sqlx errors manually, to ensure there is some context around the error\n\n#[cfg(any(feature = \"indy_compat\", feature = \"postgres\", feature = \"sqlite\"))]\n\nimpl From<sqlx::Error> for Error {\n\n fn from(err: sqlx::Error) -> Self {\n\n Error::from(ErrorKind::Backend).with_cause(err)\n\n }\n\n}\n\n\n\nimpl From<indy_utils::EncryptionError> for Error {\n\n fn from(err: indy_utils::EncryptionError) -> Self {\n", "file_path": "src/error.rs", "rank": 85, "score": 40977.67328596466 }, { "content": " ($kind:ident) => {\n\n $crate::error::Error::from($crate::error::ErrorKind::$kind)\n\n };\n\n ($kind:ident, $($args:tt)+) => {\n\n $crate::error::Error::from_msg($crate::error::ErrorKind::$kind, format!($($args)+))\n\n };\n\n ($($args:tt)+) => {\n\n $crate::error::Error::from_msg($crate::error::ErrorKind::Input, format!($($args)+))\n\n };\n\n}\n\n\n\nmacro_rules! err_map {\n\n ($($params:tt)*) => {\n\n |err| err_msg!($($params)*).with_cause(err)\n\n };\n\n}\n", "file_path": "src/error.rs", "rank": 86, "score": 40974.25949695399 }, { "content": " Error::from_opt_msg(ErrorKind::Encryption, err.context)\n\n }\n\n}\n\n\n\nimpl From<indy_utils::UnexpectedError> for Error {\n\n fn from(err: indy_utils::UnexpectedError) -> Self {\n\n Error::from_opt_msg(ErrorKind::Unexpected, err.context)\n\n }\n\n}\n\n\n\nimpl From<indy_utils::ValidationError> for Error {\n\n fn from(err: indy_utils::ValidationError) -> Self {\n\n Error::from_opt_msg(ErrorKind::Input, err.context)\n\n }\n\n}\n\n\n\nmacro_rules! err_msg {\n\n () => {\n\n $crate::error::Error::from($crate::error::ErrorKind::Input)\n\n };\n", "file_path": "src/error.rs", "rank": 87, "score": 40973.972172577094 }, { "content": " #[inline]\n\n pub fn exist(names: Vec<String>) -> Self {\n\n Self {\n\n query: wql::Query::Exist(names),\n\n }\n\n }\n\n\n\n /// Convert the tag filter to JSON format\n\n pub fn to_string(&self) -> Result<String, Error> {\n\n serde_json::to_string(&self.query).map_err(err_map!(\"Error encoding tag filter\"))\n\n }\n\n}\n\n\n\nimpl FromStr for TagFilter {\n\n type Err = Error;\n\n\n\n fn from_str(query: &str) -> Result<Self, Error> {\n\n let query = serde_json::from_str(query).map_err(err_map!(\"Error parsing tag query\"))?;\n\n Ok(Self { query })\n\n }\n", "file_path": "src/types.rs", "rank": 88, "score": 40603.23034487895 }, { "content": "use std::fmt::{self, Debug, Formatter};\n\nuse std::mem;\n\nuse std::ops::Deref;\n\nuse std::str::FromStr;\n\n\n\nuse aead::Buffer;\n\nuse serde::{\n\n de::{Error as SerdeError, MapAccess, SeqAccess, Visitor},\n\n ser::SerializeMap,\n\n Deserialize, Deserializer, Serialize, Serializer,\n\n};\n\nuse zeroize::Zeroize;\n\n\n\nuse super::error::Error;\n\nuse super::wql;\n\n\n\npub type ProfileId = i64;\n\n\n\npub type Expiry = chrono::DateTime<chrono::Utc>;\n\n\n", "file_path": "src/types.rs", "rank": 89, "score": 40598.793442333015 }, { "content": "\n\n fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n formatter.write_str(\"an object containing zero or more entry tags\")\n\n }\n\n\n\n fn visit_map<M>(self, mut access: M) -> Result<Self::Value, M::Error>\n\n where\n\n M: MapAccess<'d>,\n\n {\n\n let mut v = Vec::with_capacity(access.size_hint().unwrap_or_default());\n\n\n\n while let Some((key, values)) = access.next_entry::<&str, EntryTagValues>()? {\n\n let (tag, enc) = match key.chars().next() {\n\n Some('~') => (key[1..].to_owned(), false),\n\n None => return Err(M::Error::custom(\"invalid tag name: empty string\")),\n\n _ => (key.to_owned(), true),\n\n };\n\n match (values, enc) {\n\n (EntryTagValues::Single(value), true) => {\n\n v.push(EntryTag::Encrypted(tag, value))\n", "file_path": "src/types.rs", "rank": 90, "score": 40598.69483525039 }, { "content": " fn eq(&self, other: &Self) -> bool {\n\n self.1 == other.1 && self.0 == other.0\n\n }\n\n }\n\n\n\n impl<'a> Eq for TagName<'a> {}\n\n\n\n impl Serialize for TagName<'_> {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n if self.1 {\n\n serializer.serialize_str(&self.0)\n\n } else {\n\n serializer.collect_str(&format_args!(\"~{}\", self.0))\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/types.rs", "rank": 91, "score": 40596.88004000181 }, { "content": " }\n\n Ok(EntryTagValues::Multiple(v))\n\n }\n\n }\n\n\n\n deserializer.deserialize_any(TagValuesVisitor)\n\n }\n\n}\n\n\n\nimpl Serialize for EntryTagSet {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n use std::collections::BTreeMap;\n\n\n\n #[derive(PartialOrd, Ord)]\n\n struct TagName<'a>(&'a str, bool);\n\n\n\n impl<'a> PartialEq for TagName<'a> {\n", "file_path": "src/types.rs", "rank": 92, "score": 40595.85274954484 }, { "content": " }\n\n }\n\n\n\n pub(crate) fn sorted_tags(&self) -> Option<Vec<&EntryTag>> {\n\n self.tags.as_ref().and_then(sorted_tags)\n\n }\n\n}\n\n\n\nimpl PartialEq for Entry {\n\n fn eq(&self, rhs: &Self) -> bool {\n\n self.category == rhs.category\n\n && self.name == rhs.name\n\n && self.value == rhs.value\n\n && self.sorted_tags() == rhs.sorted_tags()\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum EntryKind {\n\n Key = 1,\n", "file_path": "src/types.rs", "rank": 93, "score": 40595.568628947076 }, { "content": "impl Drop for SecretBytes {\n\n fn drop(&mut self) {\n\n self.zeroize();\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for SecretBytes {\n\n fn from(inner: &[u8]) -> Self {\n\n Self(inner.to_vec())\n\n }\n\n}\n\n\n\nimpl From<&str> for SecretBytes {\n\n fn from(inner: &str) -> Self {\n\n Self(inner.as_bytes().to_vec())\n\n }\n\n}\n\n\n\nimpl From<String> for SecretBytes {\n\n fn from(inner: String) -> Self {\n", "file_path": "src/types.rs", "rank": 94, "score": 40594.30209719228 }, { "content": " where\n\n E: SerdeError,\n\n {\n\n Ok(EntryTagValues::Single(value.to_owned()))\n\n }\n\n\n\n fn visit_string<E>(self, value: String) -> Result<Self::Value, E>\n\n where\n\n E: SerdeError,\n\n {\n\n Ok(EntryTagValues::Single(value))\n\n }\n\n\n\n fn visit_seq<S>(self, mut access: S) -> Result<Self::Value, S::Error>\n\n where\n\n S: SeqAccess<'d>,\n\n {\n\n let mut v = Vec::with_capacity(access.size_hint().unwrap_or_default());\n\n while let Some(value) = access.next_element()? {\n\n v.push(value)\n", "file_path": "src/types.rs", "rank": 95, "score": 40594.24310797088 }, { "content": " Plaintext(String, String),\n\n}\n\n\n\nimpl EntryTag {\n\n /// Accessor for the tag name\n\n pub fn name(&self) -> &str {\n\n match self {\n\n Self::Encrypted(name, _) | Self::Plaintext(name, _) => name,\n\n }\n\n }\n\n\n\n /// Accessor for the tag value\n\n pub fn value(&self) -> &str {\n\n match self {\n\n Self::Encrypted(_, val) | Self::Plaintext(_, val) => val,\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for EntryTag {\n", "file_path": "src/types.rs", "rank": 96, "score": 40594.158611349325 }, { "content": " let mut tags = BTreeMap::new();\n\n for tag in self.0.iter() {\n\n let (name, value) = match tag {\n\n EntryTag::Encrypted(name, val) => (TagName(name.as_str(), true), val.as_str()),\n\n EntryTag::Plaintext(name, val) => (TagName(name.as_str(), false), val.as_str()),\n\n };\n\n tags.entry(name).or_insert_with(|| vec![]).push(value);\n\n }\n\n\n\n let mut map = serializer.serialize_map(Some(tags.len()))?;\n\n for (tag_name, values) in tags.into_iter() {\n\n if values.len() > 1 {\n\n map.serialize_entry(&tag_name, &values)?;\n\n } else {\n\n map.serialize_entry(&tag_name, &values[0])?;\n\n }\n\n }\n\n map.end()\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub(crate) struct EncEntryTag {\n\n pub name: Vec<u8>,\n\n pub value: Vec<u8>,\n\n pub plaintext: bool,\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 97, "score": 40593.690794974216 }, { "content": "\n\n /// Try to convert the buffer value to a string reference\n\n pub fn as_opt_str(&self) -> Option<&str> {\n\n std::str::from_utf8(self.0.as_slice()).ok()\n\n }\n\n\n\n pub(crate) fn into_vec(mut self) -> Vec<u8> {\n\n let mut v = vec![]; // note: no heap allocation for empty vec\n\n mem::swap(&mut v, &mut self.0);\n\n mem::forget(self);\n\n v\n\n }\n\n}\n\n\n\nimpl Debug for SecretBytes {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n if cfg!(test) {\n\n f.debug_tuple(\"Secret\")\n\n .field(&MaybeStr(self.0.as_slice()))\n\n .finish()\n", "file_path": "src/types.rs", "rank": 98, "score": 40593.53070984714 }, { "content": " fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Self::Encrypted(name, value) => f\n\n .debug_tuple(\"Encrypted\")\n\n .field(&name)\n\n .field(&value)\n\n .finish(),\n\n Self::Plaintext(name, value) => f\n\n .debug_tuple(\"Plaintext\")\n\n .field(&name)\n\n .field(&value)\n\n .finish(),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]\n\npub(crate) struct EntryTagSet(Vec<EntryTag>);\n\n\n\nimpl EntryTagSet {\n", "file_path": "src/types.rs", "rank": 99, "score": 40593.33438101713 } ]
Rust
src/ciphers/bacon.rs
Swarley-hax/cienli
e02a20cc071812f0159d9821a8f07968645973a0
use regex::{Captures, Regex}; pub struct Bacon { letters: (char, char), } impl Bacon { pub fn new(letters: (char, char)) -> Result<Bacon, &'static str> { if letters.0 == letters.1 { return Err("Error: Letters must be different from each other!!"); } Ok(Bacon { letters: letters }) } pub fn encipher(&self, message: &str) -> String { message .to_ascii_uppercase() .chars() .map(|character| match character { 'A'..='Z' => format!("{:05b}", character as usize - 65) .replace("0", &self.letters.0.to_string()) .replace("1", &self.letters.1.to_string()), _ => character.to_string(), }) .collect::<String>() } pub fn decipher(&self, message: &str) -> String { let binary_message = message .replace(&self.letters.0.to_string(), "0") .replace(&self.letters.1.to_string(), "1"); let re = Regex::new(r"[01]{5}").unwrap(); let result = re.replace_all(&binary_message, |cap: &Captures| { ((u8::from_str_radix(&cap[0], 2).unwrap() + 65) as char).to_string() }); result.to_string() } } #[cfg(test)] mod tests { use super::Bacon; #[test] fn encipher_test() { let bacon = Bacon::new(('a', 'b')).unwrap(); assert_eq!( "aabbbaabaaababbababbabbba aababbaaababaaaaabaaabbabaaabb", bacon.encipher("Hello Friend") ); } #[test] fn decipher_test() { let bacon = Bacon::new(('a', 'b')).unwrap(); assert_eq!( "HELLO FRIEND", bacon.decipher("aabbbaabaaababbababbabbba aababbaaababaaaaabaaabbabaaabb") ); } #[test] fn encipher_with_different_letters() { let bacon = Bacon::new(('+', '=')).unwrap(); assert_eq!( "++===++=+++=+==+=+==+===+ ++=+==+++=+=+++++=+++==+=+++==", bacon.encipher("Hello Friend") ); } #[test] fn decipher_with_different_letters() { let bacon = Bacon::new(('+', '=')).unwrap(); assert_eq!( "HELLO FRIEND", bacon.decipher("++===++=+++=+==+=+==+===+ ++=+==+++=+=+++++=+++==+=+++==") ); } #[test] fn same_letters() { assert!(Bacon::new(('a', 'a')).is_err()); } }
use regex::{Captures, Regex}; pub struct Bacon { letters: (char, char), } impl Bacon { pub fn new(letters: (char, char)) -> Result<Bacon, &'static str> { if letters.0 == letters.
_test() { let bacon = Bacon::new(('a', 'b')).unwrap(); assert_eq!( "aabbbaabaaababbababbabbba aababbaaababaaaaabaaabbabaaabb", bacon.encipher("Hello Friend") ); } #[test] fn decipher_test() { let bacon = Bacon::new(('a', 'b')).unwrap(); assert_eq!( "HELLO FRIEND", bacon.decipher("aabbbaabaaababbababbabbba aababbaaababaaaaabaaabbabaaabb") ); } #[test] fn encipher_with_different_letters() { let bacon = Bacon::new(('+', '=')).unwrap(); assert_eq!( "++===++=+++=+==+=+==+===+ ++=+==+++=+=+++++=+++==+=+++==", bacon.encipher("Hello Friend") ); } #[test] fn decipher_with_different_letters() { let bacon = Bacon::new(('+', '=')).unwrap(); assert_eq!( "HELLO FRIEND", bacon.decipher("++===++=+++=+==+=+==+===+ ++=+==+++=+=+++++=+++==+=+++==") ); } #[test] fn same_letters() { assert!(Bacon::new(('a', 'a')).is_err()); } }
1 { return Err("Error: Letters must be different from each other!!"); } Ok(Bacon { letters: letters }) } pub fn encipher(&self, message: &str) -> String { message .to_ascii_uppercase() .chars() .map(|character| match character { 'A'..='Z' => format!("{:05b}", character as usize - 65) .replace("0", &self.letters.0.to_string()) .replace("1", &self.letters.1.to_string()), _ => character.to_string(), }) .collect::<String>() } pub fn decipher(&self, message: &str) -> String { let binary_message = message .replace(&self.letters.0.to_string(), "0") .replace(&self.letters.1.to_string(), "1"); let re = Regex::new(r"[01]{5}").unwrap(); let result = re.replace_all(&binary_message, |cap: &Captures| { ((u8::from_str_radix(&cap[0], 2).unwrap() + 65) as char).to_string() }); result.to_string() } } #[cfg(test)] mod tests { use super::Bacon; #[test] fn encipher
random
[ { "content": "pub fn key_gen(key: &str, message_len: usize) -> Result<String, &'static str> {\n\n let mut result: String = String::from(key);\n\n\n\n if key.len() <= 0 || message_len <= 0 {\n\n return Err(\"Error: Key and Message length must be 1 or greater than 1!!\");\n\n } else {\n\n if key.len() == message_len {\n\n return Ok(key.to_string());\n\n } else {\n\n if key.len() > message_len {\n\n return Ok(key[..message_len].to_string());\n\n } else {\n\n if key.len() < message_len {\n\n for left in 0..(message_len - key.len()) {\n\n result.push(key.as_bytes()[left % key.len()] as char);\n\n }\n\n }\n\n }\n\n }\n\n Ok(result)\n", "file_path": "src/common/mod.rs", "rank": 0, "score": 89018.38180337136 }, { "content": "pub fn hill_cipher(text: &str, key: Vec<Vec<i32>>) -> String {\n\n if text.len() % key.len() != 0 {\n\n panic!(\"Text length must be a multiple of y-dimension of key.\");\n\n }\n\n\n\n // Create matrix of text\n\n let mut text_matrix = Vec::new();\n\n\n\n // Create rows\n\n for _ in 0..key.len() {\n\n text_matrix.push(Vec::<i32>::new());\n\n }\n\n\n\n for (i, c) in text.chars().enumerate() {\n\n text_matrix[i % key.len()].push(char_to_code(c).into());\n\n }\n\n\n\n let mut ciphertext = matrix::multiply(key, text_matrix);\n\n\n\n matrix::modulus(&mut ciphertext, 26);\n", "file_path": "src/ciphers/hill.rs", "rank": 1, "score": 61445.530994106 }, { "content": "pub fn hill_cipher_decrypt(ciphertext: &str, key: Vec<Vec<i32>>) -> String {\n\n if ciphertext.len() % key.len() != 0 {\n\n panic!(\"Text length must be a multiple of y-dimension of key.\");\n\n }\n\n\n\n // Create matrix of text\n\n let mut text_matrix = Vec::new();\n\n\n\n // Create rows\n\n for _ in 0..key.len() {\n\n text_matrix.push(Vec::<i32>::new());\n\n }\n\n\n\n for (i, c) in ciphertext.chars().enumerate() {\n\n text_matrix[i % key.len()].push(char_to_code(c).into());\n\n }\n\n\n\n let key = matrix::modular_matrix_multiplicative_inverse(&key, 26);\n\n println!(\"key={:?}\", key);\n\n\n", "file_path": "src/ciphers/hill.rs", "rank": 2, "score": 59868.42913561317 }, { "content": "fn code_to_char(c: i32) -> char {\n\n return ((c + 0x41) as u8) as char;\n\n}\n\n\n", "file_path": "src/ciphers/hill.rs", "rank": 3, "score": 59473.766918798385 }, { "content": "fn char_to_code(a: char) -> i32 {\n\n // A -> 0, Z -> 25\n\n return ((a as u32) - 0x41) as i32;\n\n}\n\n\n", "file_path": "src/ciphers/hill.rs", "rank": 4, "score": 59473.766918798385 }, { "content": "/// Caesar Cipher\n\n///\n\n/// The struct is generated through the new() function.\n\n///\n\npub struct Caesar {\n\n rotation: u8,\n\n}\n\n\n\nimpl Caesar {\n\n /// Initialize a caesar cipher with a rotation.\n\n ///\n\n /// # Examples:\n\n /// ```\n\n /// use cienli::ciphers::caesar::Caesar;\n\n /// let caesar = Caesar::new(5).unwrap();\n\n /// ```\n\n ///\n\n /// # Errors:\n\n /// The rotation must be in range 1 and 26.\n\n pub fn new(rotation: u8) -> Result<Caesar, &'static str> {\n", "file_path": "src/ciphers/caesar.rs", "rank": 13, "score": 8.615915196105522 }, { "content": "use crate::common::key_gen;\n\n\n\n/// Vigenere Cipher\n\n///\n\n/// The struct is generated through the new() function.\n\n///\n\npub struct Vigenere<'a> {\n\n key: &'a str,\n\n}\n\n\n\nimpl Vigenere<'_> {\n\n /// Initialize a vignere cipher with a key.\n\n ///\n\n /// # Examples:\n\n /// ```\n\n /// use cienli::ciphers::vigenere::Vigenere;\n\n /// let vigenere = Vigenere::new(\"ABCDE\");\n\n /// ```\n\n pub fn new(key: &str) -> Vigenere {\n\n Vigenere { key: key }\n", "file_path": "src/ciphers/vigenere.rs", "rank": 14, "score": 7.95395888559425 }, { "content": "use crate::common::key_gen;\n\n\n\n/// Xor Cipher\n\n///\n\n/// The struct is generated through the new() function.\n\n///\n\npub struct Xor<'a> {\n\n key: &'a str,\n\n}\n\n\n\nimpl Xor<'_> {\n\n /// Initialize a xor cipher with the key.\n\n ///\n\n /// # Examples:\n\n /// ```\n\n /// use cienli::ciphers::xor::Xor;\n\n /// let xor = Xor::new(\"VMMN8\");\n\n /// ```\n\n pub fn new(key: &str) -> Xor {\n\n Xor { key: key }\n", "file_path": "src/ciphers/xor.rs", "rank": 15, "score": 7.953958885594249 }, { "content": " /// use cienli::ciphers::atbash::Atbash;\n\n /// let atbash = Atbash::new(\"Svool Uirvmw :)\");\n\n /// ```\n\n ///\n\n pub fn new(message: &str) -> Atbash {\n\n Atbash { message: message }\n\n }\n\n\n\n /// Enciphers a message with the atbash cipher.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::atbash::Atbash;\n\n /// let atbash = Atbash::new(\"Hello Friend :)\");\n\n ///\n\n /// assert_eq!(\"Svool Uirvmw :)\", atbash.encipher());\n\n /// ```\n\n pub fn encipher(&self) -> String {\n\n self.message\n\n .chars()\n", "file_path": "src/ciphers/atbash.rs", "rank": 16, "score": 6.795725789404587 }, { "content": "/// Atbash Cipher\n\n///\n\n/// The struct is generated through the new() function\n\n///\n\npub struct Atbash<'a> {\n\n message: &'a str,\n\n}\n\n\n\nimpl Atbash<'_> {\n\n /// Initialize a atbash cipher with a message or a cipher.\n\n ///\n\n /// # Examples:\n\n /// - Initialization with a message:\n\n /// ```\n\n /// use cienli::ciphers::atbash::Atbash;\n\n /// let atbash = Atbash::new(\"Hello Friend :)\");\n\n /// ```\n\n ///\n\n /// - Initialization with a cipher:\n\n /// ```\n", "file_path": "src/ciphers/atbash.rs", "rank": 17, "score": 6.716579884755334 }, { "content": "\n\n for indx in 0..message.len() {\n\n result.push(match message[indx] as char {\n\n 'A'..='Z' => (((key[indx] + message[indx]) % 26) + 65) as char,\n\n 'a'..='z' => ((((key[indx] - 32) + message[indx]) % 26) + 97) as char,\n\n _ => message[indx] as char,\n\n });\n\n }\n\n result\n\n }\n\n\n\n /// Deciphers a cipher with the vigenere cipher.\n\n ///\n\n /// # Examples:\n\n /// ```\n\n /// use cienli::ciphers::vigenere::Vigenere;\n\n /// let vigenere = Vigenere::new(\"ABcdE\");\n\n ///\n\n /// assert_eq!(\"Qwert :)\", vigenere.decipher(\"Qxgux :)\"));\n\n pub fn decipher(&self, message: &str) -> String {\n", "file_path": "src/ciphers/vigenere.rs", "rank": 18, "score": 6.525048177662141 }, { "content": " /// ```\n\n /// use cienli::ciphers::scytale::Scytale;\n\n /// let scytale = Scytale::new(0);\n\n ///\n\n /// assert!(scytale.is_err());\n\n /// ```\n\n pub fn new(key: usize) -> Result<Scytale, &'static str> {\n\n match key {\n\n 0 => Err(\"Key cannot be zero\"),\n\n _ => Ok(Scytale { key: key }),\n\n }\n\n }\n\n\n\n /// Enciphers a message with the scytale cipher.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::scytale::Scytale;\n\n /// let scytale = Scytale::new(3).unwrap();\n\n ///\n", "file_path": "src/ciphers/scytale.rs", "rank": 19, "score": 6.462497408688223 }, { "content": " Ok(_v) => Ok(Affine {\n\n alpha: key.0,\n\n beta: key.1,\n\n }),\n\n Err(v) => Err(v),\n\n }\n\n }\n\n\n\n /// Enciphers a message with the affine cipher.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::affine::Affine;\n\n /// let affine = Affine::new((5, 2)).unwrap();\n\n ///\n\n /// assert_eq!(\"Lwffu :)\", affine.encipher(\"Hello :)\"));\n\n /// ```\n\n pub fn encipher(&self, message: &str) -> String {\n\n message\n\n .chars()\n", "file_path": "src/ciphers/affine.rs", "rank": 20, "score": 6.172808678575703 }, { "content": " /// ```\n\n ///\n\n /// - Initialization with a non-coprime key:\n\n /// ```\n\n /// use cienli::ciphers::affine::Affine;\n\n /// let affine = Affine::new((10, 2));\n\n /// assert!(affine.is_err());\n\n /// ```\n\n /// this example will\n\n ///\n\n /// - Initialization with a big key:\n\n /// ```\n\n /// use cienli::ciphers::affine::Affine;\n\n /// let affine = Affine::new((27, 2));\n\n /// assert!(affine.is_err());\n\n /// ```\n\n pub fn new(key: (u16, u16)) -> Result<Affine, &'static str> {\n\n let is_key_valid = Affine::key_checker(key);\n\n\n\n match is_key_valid {\n", "file_path": "src/ciphers/affine.rs", "rank": 21, "score": 6.027170484040228 }, { "content": "pub enum RotType {\n\n Rot5,\n\n Rot13,\n\n Rot18,\n\n Rot47,\n\n}\n\n\n\n/// Rot Cipher\n\n///\n\n/// The struct is generated through the new() function.\n\n///\n\npub struct Rot<'a> {\n\n message: &'a str,\n\n rot_type: RotType,\n\n}\n\n\n\nimpl Rot<'_> {\n\n /// Initialize a rot cipher with a message and rot type.\n\n ///\n\n /// # Examples:\n", "file_path": "src/ciphers/rot.rs", "rank": 22, "score": 6.0191255545040425 }, { "content": " _ => character,\n\n })\n\n .collect()\n\n }\n\n\n\n fn rot47(message: &str) -> String {\n\n message\n\n .chars()\n\n .map(|character| match character {\n\n '!'..='O' => ((character as u8) + 47) as char,\n\n 'P'..='~' => ((character as u8) - 47) as char,\n\n _ => character,\n\n })\n\n .collect()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{Rot, RotType};\n", "file_path": "src/ciphers/rot.rs", "rank": 23, "score": 5.898316022293381 }, { "content": " }\n\n\n\n /// Deciphers a cipher with the caesar cipher.\n\n ///\n\n /// # Examples:\n\n /// - Decipher with five times rotation:\n\n /// ```\n\n /// use cienli::ciphers::caesar::Caesar;\n\n /// let caesar = Caesar::new(5).unwrap();\n\n ///\n\n /// assert_eq!(\"Hello, This Is A Test\", caesar.decipher(\"Mjqqt, Ymnx Nx F Yjxy\"));\n\n /// ```\n\n pub fn decipher(&self, message: &str) -> String {\n\n Caesar::shift(message, 26 - self.rotation)\n\n }\n\n\n\n fn shift(message: &str, rotation: u8) -> String {\n\n message\n\n .chars()\n\n .map(|character| match character {\n", "file_path": "src/ciphers/caesar.rs", "rank": 24, "score": 5.898211932006456 }, { "content": " /// ```\n\n pub fn decipher(&self, message: &str) -> String {\n\n let mut alpha_inv = 0;\n\n while (self.alpha * alpha_inv) % 26 != 1 {\n\n alpha_inv += 1;\n\n }\n\n\n\n message\n\n .chars()\n\n .map(|character| match character {\n\n 'a'..='z' => {\n\n (alpha_inv * ((character as u16 - 97) - self.beta) % 26 + 97) as u8 as char\n\n }\n\n 'A'..='Z' => {\n\n (alpha_inv * ((character as u16 - 65) - self.beta) % 26 + 65) as u8 as char\n\n }\n\n _ => character,\n\n })\n\n .collect()\n\n }\n", "file_path": "src/ciphers/affine.rs", "rank": 25, "score": 5.7468738568479765 }, { "content": " /// ```\n\n /// use cienli::ciphers::xor::Xor;\n\n /// let xor = Xor::new(\"VMMN8\");\n\n /// assert_eq!(\"jp14N\", xor.decipher(\"<=|zv\"));\n\n /// ```\n\n pub fn decipher(&self, cipher: &str) -> String {\n\n let key = key_gen(&self.key, cipher.len()).unwrap();\n\n\n\n Xor::xor_engine(cipher, &key)\n\n }\n\n\n\n fn xor_engine(message: &str, key: &str) -> String {\n\n let mut result: String = String::new();\n\n\n\n let message = message.as_bytes();\n\n let key = key.as_bytes();\n\n\n\n for indx in 0..message.len() {\n\n result.push((message[indx] ^ key[indx]) as char)\n\n }\n", "file_path": "src/ciphers/xor.rs", "rank": 26, "score": 5.7097751517051805 }, { "content": "const TABLE: [[char; 5]; 5] = [\n\n ['A', 'B', 'C', 'D', 'E'],\n\n ['F', 'G', 'H', 'J', 'K'],\n\n ['L', 'M', 'N', 'O', 'P'],\n\n ['Q', 'R', 'S', 'T', 'U'],\n\n ['V', 'W', 'X', 'Y', 'Z'],\n\n]; // Removed letter: I\n\n\n\n/// Polybius square cipher\n\n///\n\n/// The struct is generated through the new() function\n\n///\n\npub struct PolybiusSquare<'a> {\n\n message: &'a str,\n\n}\n\n\n\nimpl PolybiusSquare<'_> {\n\n /// Initialize a polybius square cipher with a cipher/plain text\n\n ///\n\n /// # Examples:\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 27, "score": 5.58338514461312 }, { "content": " /// ```\n\n /// use cienli::ciphers::scytale::Scytale;\n\n /// let scytale = Scytale::new(3).unwrap();\n\n ///\n\n /// assert_eq!(\"Hello :)\", scytale.decipher(\"Hl:eo)l \"));\n\n /// ```\n\n pub fn decipher(&self, cipher: &str) -> String {\n\n if self.key >= cipher.chars().count() || self.key == 1 {\n\n return cipher.to_string();\n\n }\n\n\n\n let mut table = Scytale::generate_table(self.key, cipher, true);\n\n\n\n let mut message = String::new();\n\n while table\n\n .iter()\n\n .filter(|character| !character.is_empty())\n\n .count()\n\n > 0\n\n {\n", "file_path": "src/ciphers/scytale.rs", "rank": 28, "score": 5.447952254594671 }, { "content": "use num_integer::Integer;\n\n\n\n/// Affine Cipher\n\n///\n\n/// The struct is generated through the new() function\n\n///\n\npub struct Affine {\n\n alpha: u16,\n\n beta: u16,\n\n}\n\n\n\nimpl Affine {\n\n /// Initialize a affine cipher with a key\n\n ///\n\n /// # Examples:\n\n /// - Initialization with a valid key:\n\n /// ```\n\n /// use cienli::ciphers::affine::Affine;\n\n /// let affine = Affine::new((5, 2));\n\n /// assert!(affine.is_ok());\n", "file_path": "src/ciphers/affine.rs", "rank": 29, "score": 5.288633477277477 }, { "content": " self.encipher()\n\n }\n\n\n\n fn rot5(message: &str) -> String {\n\n message\n\n .chars()\n\n .map(|digit| match digit {\n\n '0'..='4' => ((digit as u8) + 5) as char,\n\n '5'..='9' => ((digit as u8) - 5) as char,\n\n _ => digit,\n\n })\n\n .collect()\n\n }\n\n\n\n fn rot13(message: &str) -> String {\n\n message\n\n .chars()\n\n .map(|character| match character {\n\n 'A'..='M' | 'a'..='m' => ((character as u8) + 13) as char,\n\n 'N'..='Z' | 'n'..='z' => ((character as u8) - 13) as char,\n", "file_path": "src/ciphers/rot.rs", "rank": 30, "score": 5.2337701126143354 }, { "content": " /// let polybius = PolybiusSquare::new(\"23153131345234423114\");\n\n /// assert_eq!(\"HELLOWORLD\", polybius.decipher().unwrap());\n\n /// ```\n\n ///\n\n /// # Error:\n\n /// If you try to decipher a non-numeric text you will get an error.\n\n pub fn decipher(&self) -> Result<String, &'static str> {\n\n if self.message.len() % 2 != 0 {\n\n return Err(\"1 column is missing\");\n\n }\n\n if !PolybiusSquare::is_string_numeric(self.message) {\n\n return Err(\"Ciphertext must be numeric\");\n\n }\n\n\n\n let mut result = String::new();\n\n\n\n let cipher_len = self.message.len();\n\n for i in 1..(cipher_len / 2) + 1 {\n\n let row_and_col = &self.message.as_bytes()[(i * 2) - 2..i * 2];\n\n let row: usize = (row_and_col[0] as char)\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 31, "score": 5.22369874383339 }, { "content": " .map(|character| match character {\n\n 'A'..='Z' => ((90 - character as u8) + 65) as char,\n\n 'a'..='z' => ((122 - character as u8) + 97) as char,\n\n _ => character,\n\n })\n\n .collect()\n\n }\n\n\n\n /// Deciphers a message with the atbash cipher.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::atbash::Atbash;\n\n /// let atbash = Atbash::new(\"Svool Uirvmw :)\");\n\n ///\n\n /// assert_eq!(\"Hello Friend :)\", atbash.decipher());\n\n /// ```\n\n pub fn decipher(&self) -> String {\n\n self.encipher()\n\n }\n", "file_path": "src/ciphers/atbash.rs", "rank": 32, "score": 5.129563203852046 }, { "content": "pub mod affine;\n\npub mod atbash;\n\npub mod bacon;\n\npub mod caesar;\n\npub mod polybius_square;\n\npub mod rot;\n\npub mod scytale;\n\npub mod vigenere;\n\npub mod xor;\n", "file_path": "src/ciphers/mod.rs", "rank": 33, "score": 5.127502930154095 }, { "content": "\n\n fn key_checker(key: (u16, u16)) -> Result<(), &'static str> {\n\n if (key.0 >= 1 && key.0 <= 26) && key.1 <= 26 {\n\n if key.0.gcd(&26) == 1 {\n\n Ok(())\n\n } else {\n\n Err(\"The alpha is not co-prime with 26\")\n\n }\n\n } else {\n\n Err(\"The is greater than 26\")\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Affine;\n\n\n\n #[test]\n\n fn invalid_key_length_test() {\n", "file_path": "src/ciphers/affine.rs", "rank": 34, "score": 5.031301328677316 }, { "content": "/// Scytale Cipher\n\n///\n\n/// the struct is generated through the new() function.\n\n///\n\npub struct Scytale {\n\n key: usize,\n\n}\n\n\n\nimpl Scytale {\n\n /// Initialize a scytale cipher with a key.\n\n ///\n\n /// # Examples:\n\n /// - Initialization with valid key:\n\n /// ```\n\n /// use cienli::ciphers::scytale::Scytale;\n\n /// let scytale = Scytale::new(3);\n\n ///\n\n /// assert!(scytale.is_ok());\n\n /// ```\n\n /// - Initialization with a zero key:\n", "file_path": "src/ciphers/scytale.rs", "rank": 35, "score": 4.908585678751566 }, { "content": " /// - Initialization with a plaintext:\n\n /// ```\n\n /// use cienli::ciphers::polybius_square::PolybiusSquare;\n\n ///\n\n /// let polybius = PolybiusSquare::new(\"Hello World :)\");\n\n /// ```\n\n ///\n\n /// - Initialization with a ciphertext:\n\n /// ```\n\n /// use cienli::ciphers::polybius_square::PolybiusSquare;\n\n ///\n\n /// let polybius = PolybiusSquare::new(\"23153131345234423114\");\n\n /// ```\n\n ///\n\n pub fn new(message: &str) -> PolybiusSquare {\n\n PolybiusSquare { message: message }\n\n }\n\n\n\n /// Enciphers a message with the polybius square cipher:\n\n ///\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 36, "score": 4.743512718583123 }, { "content": " /// assert_eq!(\"Hl:eo)l \", scytale.encipher(\"Hello :)\"))\n\n /// ```\n\n pub fn encipher(&self, message: &str) -> String {\n\n if self.key >= message.chars().count() {\n\n return message.to_string();\n\n }\n\n\n\n let table = Scytale::generate_table(self.key, message, false);\n\n\n\n table\n\n .iter()\n\n .flatten()\n\n .collect::<String>()\n\n .trim_end_matches(\"\\0\")\n\n .to_string()\n\n }\n\n\n\n /// Deciphers a message with the scytale cipher.\n\n ///\n\n /// # Example:\n", "file_path": "src/ciphers/scytale.rs", "rank": 37, "score": 4.591100747947934 }, { "content": " /// - Initialization with Rot13 type.:\n\n /// ```\n\n /// use cienli::ciphers::rot::{Rot, RotType};\n\n /// let rot = Rot::new(\"• Hello Friend 83110 :) •\", RotType::Rot13);\n\n /// ```\n\n pub fn new(message: &str, rot_type: RotType) -> Rot {\n\n Rot {\n\n message: message,\n\n rot_type: rot_type,\n\n }\n\n }\n\n\n\n /// Enciphers a message with the rot cipher.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// - Encipher with Rot47:\n\n /// ```\n\n /// use cienli::ciphers::rot::{Rot, RotType};\n\n /// let rot47 = Rot::new(\"• Hello Friend 83110 :) •\", RotType::Rot47);\n", "file_path": "src/ciphers/rot.rs", "rank": 38, "score": 4.241058190838353 }, { "content": " }\n\n\n\n /// Enciphers a message with the xor cipher.\n\n ///\n\n /// # Examples:\n\n /// ```\n\n /// use cienli::ciphers::xor::Xor;\n\n /// let xor = Xor::new(\"VMMN8\");\n\n ///\n\n /// assert_eq!(\"<=|zv\", xor.encipher(\"jp14N\"));\n\n /// ```\n\n pub fn encipher(&self, message: &str) -> String {\n\n let key = key_gen(&self.key, message.len()).unwrap();\n\n\n\n Xor::xor_engine(message, &key)\n\n }\n\n\n\n /// Deciphers a cipher with the xor cipher.\n\n ///\n\n /// # Examples:\n", "file_path": "src/ciphers/xor.rs", "rank": 39, "score": 4.184879581267052 }, { "content": " }\n\n\n\n /// Enciphers a message with the vigenere cipher.\n\n ///\n\n /// # Examples:\n\n /// ```\n\n /// use cienli::ciphers::vigenere::Vigenere;\n\n /// let vigenere = Vigenere::new(\"ABcdE\");\n\n ///\n\n /// assert_eq!(\"Qxgux :)\", vigenere.encipher(\"Qwert :)\"));\n\n /// ```\n\n pub fn encipher(&self, message: &str) -> String {\n\n let key = key_gen(&self.key.to_uppercase(), message.len())\n\n .unwrap()\n\n .as_bytes()\n\n .to_owned();\n\n\n\n let message = message.as_bytes();\n\n\n\n let mut result: String = String::new();\n", "file_path": "src/ciphers/vigenere.rs", "rank": 40, "score": 4.138727042036589 }, { "content": " /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::polybius_square::PolybiusSquare;\n\n ///\n\n /// let polybius = PolybiusSquare::new(\"Hello World :)\");\n\n /// assert_eq!(\"23153131345234423114\", polybius.encipher());\n\n /// ```\n\n pub fn encipher(&self) -> String {\n\n self.message\n\n .to_ascii_uppercase()\n\n .chars()\n\n .map(|character| match character {\n\n 'A'..='Z' => {\n\n let mut row = ((character as u8 - 65) / 5) + 1;\n\n let mut col = ((character as u8 - 65) % 5) + 1;\n\n\n\n if character == 'K' {\n\n row = row - 1;\n\n col = 5 - col + 1;\n\n } else if character >= 'J' {\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 41, "score": 4.097309606871579 }, { "content": " .to_string()\n\n .parse::<usize>()\n\n .unwrap()\n\n - 1;\n\n let col: usize = (row_and_col[1] as char)\n\n .to_string()\n\n .parse::<usize>()\n\n .unwrap()\n\n - 1;\n\n result.push(TABLE[row][col]);\n\n }\n\n\n\n Ok(result)\n\n }\n\n\n\n fn is_string_numeric(text: &str) -> bool {\n\n for character in text.chars() {\n\n if !character.is_numeric() {\n\n return false;\n\n }\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 42, "score": 4.0598288890567105 }, { "content": " if rotation >= 1 && rotation <= 26 {\n\n Ok(Caesar { rotation: rotation })\n\n } else {\n\n Err(\"Error: Rotation must be in range 1 and 26!!\")\n\n }\n\n }\n\n\n\n /// Enciphers a message with the caesar cipher.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// - Encipher with five times rotation:\n\n /// ```\n\n /// use cienli::ciphers::caesar::Caesar;\n\n /// let caesar = Caesar::new(5).unwrap();\n\n ///\n\n /// assert_eq!(\"Mjqqt, Ymnx Nx F Yjxy\", caesar.encipher(\"Hello, This Is A Test\"));\n\n /// ```\n\n pub fn encipher(&self, message: &str) -> String {\n\n Caesar::shift(message, self.rotation)\n", "file_path": "src/ciphers/caesar.rs", "rank": 43, "score": 3.922436013267604 }, { "content": " 'A'..='Z' => (((character as u8 - 65 + rotation) % 26) + 65) as char,\n\n 'a'..='z' => (((character as u8 - 97 + rotation) % 26) + 97) as char,\n\n _ => character,\n\n })\n\n .collect()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Caesar;\n\n\n\n #[test]\n\n fn encipher_test() {\n\n let caesar = Caesar::new(5).unwrap();\n\n\n\n assert_eq!(\n\n \"Mjqqt, Ymnx Nx F Yjxy\",\n\n caesar.encipher(\"Hello, This Is A Test\")\n\n );\n", "file_path": "src/ciphers/caesar.rs", "rank": 44, "score": 3.69305437015148 }, { "content": " .map(|character| match character {\n\n 'a'..='z' => {\n\n (((character as u16 - 97) * self.alpha + self.beta) % 26 + 97) as u8 as char\n\n }\n\n 'A'..='Z' => {\n\n (((character as u16 - 65) * self.alpha + self.beta) % 26 + 65) as u8 as char\n\n }\n\n _ => character,\n\n })\n\n .collect()\n\n }\n\n\n\n /// Deciphers a message with the affine cipher.\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::affine::Affine;\n\n /// let affine = Affine::new((5, 2)).unwrap();\n\n ///\n\n /// assert_eq!(\"Hello :)\", affine.decipher(\"Lwffu :)\"));\n", "file_path": "src/ciphers/affine.rs", "rank": 45, "score": 3.58729366554961 }, { "content": " for column in table.iter_mut() {\n\n message.push(column.remove(0));\n\n }\n\n }\n\n message.trim_end_matches(\"\\0\").to_string()\n\n }\n\n\n\n fn generate_table(height: usize, message: &str, decipher: bool) -> Vec<Vec<char>> {\n\n let width = (message.chars().count() as f32 / height as f32).ceil() as usize;\n\n\n\n let mut table = vec![vec!['\\0'; width]; height];\n\n\n\n for (position, element) in message.chars().enumerate() {\n\n let (column, row) = match decipher {\n\n true => (position / height, position % height),\n\n false => (position % height, position / height),\n\n };\n\n\n\n table[column][row] = element;\n\n }\n", "file_path": "src/ciphers/scytale.rs", "rank": 46, "score": 3.446865423024066 }, { "content": " #[test]\n\n fn test_char_to_code() {\n\n assert_eq!(char_to_code('A'), 0);\n\n assert_eq!(char_to_code('Z'), 25);\n\n }\n\n\n\n #[test]\n\n fn test_code_to_char() {\n\n assert_eq!(code_to_char(0), 'A');\n\n assert_eq!(code_to_char(25), 'Z');\n\n }\n\n}\n", "file_path": "src/ciphers/hill.rs", "rank": 47, "score": 3.21318784697875 }, { "content": " let mut plaintext = matrix::multiply(key, text_matrix);\n\n matrix::modulus(&mut plaintext, 26);\n\n\n\n let mut result = String::new();\n\n\n\n for i in 0..plaintext[0].len() {\n\n for j in 0..plaintext.len() {\n\n result.push(code_to_char(plaintext[j][i]));\n\n }\n\n }\n\n\n\n return result;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_hill_cipher_encryption() {\n", "file_path": "src/ciphers/hill.rs", "rank": 48, "score": 3.04871194889271 }, { "content": " /// Deciphers a cipher with the rot cipher.\n\n ///\n\n /// # Examples:\n\n ///\n\n /// - Decipher with Rot47:\n\n /// ```\n\n /// use cienli::ciphers::rot::{Rot, RotType};\n\n /// let rot47 = Rot::new(\"• w6==@ uC:6?5 gb``_ iX •\", RotType::Rot47);\n\n ///\n\n /// assert_eq!(\"• Hello Friend 83110 :) •\", rot47.decipher());\n\n /// ```\n\n ///\n\n /// - Decipher with Rot13:\n\n /// ```\n\n /// use cienli::ciphers::rot::{Rot, RotType};\n\n /// let rot13 = Rot::new(\"• Uryyb Sevraq 83110 :) •\", RotType::Rot13);\n\n ///\n\n /// assert_eq!(\"• Hello Friend 83110 :) •\", rot13.decipher());\n\n /// ```\n\n pub fn decipher(&self) -> String {\n", "file_path": "src/ciphers/rot.rs", "rank": 49, "score": 2.9318867986113113 }, { "content": "pub mod ciphers;\n\npub mod common;\n", "file_path": "src/lib.rs", "rank": 50, "score": 2.882538557005079 }, { "content": " let key = key_gen(&self.key.to_uppercase(), message.len())\n\n .unwrap()\n\n .as_bytes()\n\n .to_owned();\n\n\n\n let message = message.as_bytes();\n\n\n\n let mut result: String = String::new();\n\n\n\n for indx in 0..message.len() {\n\n result.push(match message[indx] as char {\n\n 'A'..='Z' => (((26 + message[indx] - key[indx]) % 26) + 65) as char,\n\n 'a'..='z' => (((26 + message[indx] - (key[indx] + 32)) % 26) + 97) as char,\n\n _ => message[indx] as char,\n\n });\n\n }\n\n result\n\n }\n\n}\n\n\n", "file_path": "src/ciphers/vigenere.rs", "rank": 51, "score": 2.750858465157359 }, { "content": "use crate::matrix;\n\n\n", "file_path": "src/ciphers/hill.rs", "rank": 52, "score": 2.355197443164464 }, { "content": " ///\n\n /// assert_eq!(\"• w6==@ uC:6?5 gb``_ iX •\", rot47.encipher());\n\n /// ```\n\n ///\n\n /// - Encipher with Rot13:\n\n /// ```\n\n /// use cienli::ciphers::rot::{Rot, RotType};\n\n /// let rot13 = Rot::new(\"• Hello Friend 83110 :) •\", RotType::Rot13);\n\n ///\n\n /// assert_eq!(\"• Uryyb Sevraq 83110 :) •\", rot13.encipher());\n\n /// ```\n\n pub fn encipher(&self) -> String {\n\n match self.rot_type {\n\n RotType::Rot5 => return Rot::rot5(self.message),\n\n RotType::Rot13 => return Rot::rot13(self.message),\n\n RotType::Rot18 => return Rot::rot13(&(Rot::rot5(self.message))),\n\n RotType::Rot47 => return Rot::rot47(self.message),\n\n }\n\n }\n\n\n", "file_path": "src/ciphers/rot.rs", "rank": 53, "score": 2.230775398209764 }, { "content": "\n\n let mut result = String::new();\n\n\n\n for i in 0..ciphertext[0].len() {\n\n for j in 0..ciphertext.len() {\n\n result.push(code_to_char(ciphertext[j][i]));\n\n }\n\n }\n\n\n\n return result;\n\n}\n\n\n", "file_path": "src/ciphers/hill.rs", "rank": 54, "score": 2.2259232124962396 }, { "content": " if col == 1 {\n\n col = 6;\n\n row = row - 1;\n\n }\n\n col = col - 1;\n\n }\n\n\n\n format!(\"{}{}\", row, col)\n\n }\n\n _ => String::from(\"\"),\n\n })\n\n .collect()\n\n }\n\n\n\n /// Deciphers a ciphertext with the polybius square cipher:\n\n ///\n\n /// # Example:\n\n /// ```\n\n /// use cienli::ciphers::polybius_square::PolybiusSquare;\n\n ///\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 55, "score": 1.6553859777212934 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::Vigenere;\n\n\n\n #[test]\n\n fn encipher_test() {\n\n let v = Vigenere::new(\"ABCDE\");\n\n\n\n assert_eq!(\"QXGUX :)\", v.encipher(\"QWERT :)\"));\n\n }\n\n\n\n #[test]\n\n fn decipher_test() {\n\n let v = Vigenere::new(\"ABCDE\");\n\n\n\n assert_eq!(\"Qwert :)\", v.decipher(\"Qxgux :)\"));\n\n }\n\n}\n", "file_path": "src/ciphers/vigenere.rs", "rank": 56, "score": 1.546011224854233 }, { "content": "\n\n table\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Scytale;\n\n\n\n #[test]\n\n fn invalid_key_test() {\n\n assert!(Scytale::new(0).is_err());\n\n }\n\n\n\n #[test]\n\n fn big_key_test() {\n\n let scytale = Scytale::new(15).unwrap();\n\n assert_eq!(\"Hello :)\", scytale.encipher(\"Hello :)\"))\n\n }\n\n\n", "file_path": "src/ciphers/scytale.rs", "rank": 57, "score": 1.5258480417983857 }, { "content": "\n\n result\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Xor;\n\n\n\n #[test]\n\n fn encipher() {\n\n let xor = Xor::new(\"VMMN8\");\n\n\n\n assert_eq!(\"<=|zv\", xor.encipher(\"jp14N\"));\n\n }\n\n\n\n #[test]\n\n fn decipher() {\n\n let xor = Xor::new(\"VMMN8\");\n\n\n\n assert_eq!(\"jp14N\", xor.decipher(\"<=|zv\"));\n\n }\n\n}\n", "file_path": "src/ciphers/xor.rs", "rank": 58, "score": 1.4501939320523027 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Atbash;\n\n\n\n #[test]\n\n fn atbash_encipher() {\n\n let atbash = Atbash::new(\"Hello Friend :)\");\n\n assert_eq!(\"Svool Uirvmw :)\", atbash.encipher())\n\n }\n\n\n\n #[test]\n\n fn atbash_decipher() {\n\n let atbash = Atbash::new(\"Svool Uirvmw :)\");\n\n assert_eq!(\"Hello Friend :)\", atbash.decipher());\n\n }\n\n}\n", "file_path": "src/ciphers/atbash.rs", "rank": 59, "score": 1.4324382581228097 }, { "content": " }\n\n true\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::PolybiusSquare;\n\n\n\n #[test]\n\n fn encipher_test() {\n\n let polybius = PolybiusSquare::new(\"Hello World :)\");\n\n\n\n assert_eq!(\"23153131345234423114\", polybius.encipher());\n\n }\n\n\n\n #[test]\n\n fn decipher_test() {\n\n let polybius = PolybiusSquare::new(\"23153131345234423114\");\n\n\n\n assert_eq!(\"HELLOWORLD\", polybius.decipher().unwrap());\n\n }\n\n}\n", "file_path": "src/ciphers/polybius_square.rs", "rank": 60, "score": 1.4324382581228097 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::key_gen;\n\n\n\n #[test]\n\n fn make_key_bigger() {\n\n assert_eq!(String::from(\"TESTTESTTE\"), key_gen(\"TEST\", 10).unwrap());\n\n }\n\n\n\n #[test]\n\n fn make_key_smaller() {\n\n assert_eq!(String::from(\"TE\"), key_gen(\"TEST\", 2).unwrap());\n\n }\n\n\n\n #[test]\n\n fn key_is_equal() {\n\n assert_eq!(String::from(\"TEST\"), key_gen(\"TEST\", 4).unwrap());\n", "file_path": "src/common/mod.rs", "rank": 61, "score": 1.3344099306300288 }, { "content": "# CiEnLi\n\n\n\n![b](https://img.shields.io/crates/l/cienli)\n\n![b](https://img.shields.io/crates/d/cienli)\n\n![b](https://img.shields.io/crates/v/cienli)\n\n![b](https://img.shields.io/docsrs/cienli)\n\n\n\nCiEnLi is a library of historical ciphers implemented in [rust](https://www.rust-lang.org/).\n\n\n\n## The list of ciphers must add:\n\n- [x] Vigenère\n\n- [x] ROT5\n\n- [x] ROT13 \n\n- [x] ROT18\n\n- [x] ROT47\n\n- [x] Bacon\n\n- [x] Caesar\n\n- [x] XOR\n\n- [x] Atbash\n\n- [x] Affine\n\n- [x] Scytale\n\n- [x] Polybius Square\n\n- [x] Hill\n\n- [ ] Playfair\n\n- [ ] Rail-fence\n\n- [ ] Fractionated Morse\n\n- [ ] Trifid\n\n- [ ] Porta\n\n- [ ] ADFGVX\n\n- [ ] Straddle Checkerboard\n\n- [ ] Bifid\n\n- [ ] Homophonic\n\n- [ ] Four-Square\n\n- [ ] Autokey\n\n- [ ] Columnar Transposition\n\n\n\nIf you have cipher in mind that there is no inside list, please add it to the list :)\n", "file_path": "README.md", "rank": 62, "score": 1.1332030638419073 } ]
Rust
intonaco/src/cif.rs
urs-of-the-backwoods/fresco
9914df6d534f591448ed1501965f1bd03f3724de
use std; use std::io::{Cursor}; use std::mem; use libc; use std::env::vars; use std::ffi::CStr; use snowflake::ProcessUniqueId; use cbor::{Encoder}; #[cfg(unix)] use libloading::os::unix::{Library, Symbol}; #[cfg(windows)] use libloading::os::windows::{Library, Symbol}; use component; use cbor::{Config, GenericDecoder, DecodeError, value}; use entity::{Entity, EntityPointer}; use system::{System, ObjectLibSystem, CallbackSystem}; use ftypes::{FrMsg, FrMsgLength, FrComponentType, FrItemType, FrPropertyType, FrItem, FrMessageFn, FrMessageFn2}; /* void (*inEntityCreate) (FrMsg m, FrMsgLength l, FrEntity *e); // Msg contains a CBOR array from arrays [u64, bs] void (*inEntityReadComponent) (FrEntity e, FrComponentType ct, FrMessageFn f); // also names are "Entity", read write works per component void (*inEntityWriteComponent) (FrEntity e, FrComponentType ct, FrMsg m, FrMsgLength l); void (*inEntityReadId) (FrEntity e, FrItem it, FrMessageFn f); void (*inEntityDestroy) (FrEntity e); void (*inObjectLibSystemInit) (FrGiornataEnv g, msgPointer m, msgLength l, FrSystem *ps); // Msg contains specific system creation parameters void (*inObjectLibSystemAddEntity) (FrSystem s, FrEntity e); void (*inObjectLibSystemRemoveEntity) (FrSystem s, FrEntity e); void (*inObjectLibSystemShutdown) (FrSystem s); void (*inObjectLibSystemStep) (FrSystem s); // runs one cycle of system (control over Thread needed) void (*inCallbackSystemInit) (FrSystem *ps); void (*inCallbackSystemRegisterReceiver) (FrSystem s, FrEntity e, FrComponentType ct, FrMessageFn2 f); void (*inCallbackSystemShutdown) (FrSystem s); void (*inCallbackSystemStep) (FrSystem s); */ #[no_mangle] pub extern "C" fn inEntityCreate(data: *const u8, len: u32, pp: *mut EntityPointer) { let mut cts:Vec<(u64, Vec<u8>)> = Vec::new(); if len > 0 { let dv = component::vec_from_c_char_p(data, len); let mut reader = Cursor::new(dv); let mut d = GenericDecoder::new(Config::default(), reader); loop { match d.value() { Ok(value::Value::U64(u)) => { match d.value() { Ok(value::Value::Bytes(v)) => match v { value::Bytes::Bytes(v) => cts.push((u, v)), value::Bytes::Chunks(l) => cts.push((u, l.front().unwrap().clone())), }, Ok(other) => panic!("inEntityCreate, bytes not following u64: {:?}", other), Err(err) => panic!("inEntityCreate, error after u64: {:?}", err), } }, Ok(value::Value::I64(u)) => panic!("inEntityCreate, found i64 instead of u64: {:?}", u), _ => break, } } } let ep = Entity::to_ptr(Entity::new(cts)); unsafe { *pp = ep; } } #[no_mangle] pub extern "C" fn inEntityDestroy(ep: EntityPointer) { let e = Entity::from_ptr(ep); } #[no_mangle] pub extern "C" fn inEntityReadComponent(ep: EntityPointer, ct: u64, ip: FrItem, rcb: FrMessageFn) { let av = Entity::do_with(ep, (|en| { return en.get(ct); })); unsafe { rcb(ip, av.as_ptr(), av.len() as u32); } } #[no_mangle] pub extern "C" fn inEntityWriteComponent(ep: EntityPointer, ct: u64, p: *const u8, l: u32) { Entity::do_with(ep, (|en| { let v = component::vec_from_c_char_p(p, l); en.set(ct, v); })); } #[no_mangle] pub extern "C" fn inEntityId(ep: EntityPointer, ip: FrItem, rcb: FrMessageFn) { let id_bs = Entity::do_with(ep, (|en| { return en.id(); })); let id_u8 = unsafe { mem::transmute::<ProcessUniqueId, [u8; 16]>(id_bs) }; let mut buf = vec![]; let mut enc = Encoder::new(buf); enc.bytes(&id_u8); let id_bs2 = &enc.into_writer(); unsafe { rcb(ip, id_u8.as_ptr(), id_u8.len() as u32); } } #[no_mangle] pub extern "C" fn inObjectLibSystemInit(env: *const libc::c_char, pp: *mut *mut ObjectLibSystem) { for (k, v) in vars() { if k.eq( unsafe { &CStr::from_ptr(env).to_string_lossy().into_owned() }) { let lib = Library::new(v).unwrap(); let b = Box::new(ObjectLibSystem::new(&lib)); unsafe { *pp = Box::into_raw(b); } mem::forget(lib); return; } }; panic!("Intonaco: env variable GIORNATA not set!"); return; } #[no_mangle] pub extern "C" fn inObjectLibSystemStep(ols: *mut ObjectLibSystem) { unsafe { let olsb = Box::from_raw(ols); olsb.step_system(); std::mem::forget(olsb); } } #[no_mangle] pub extern "C" fn inObjectLibSystemShutdown(ols: *mut ObjectLibSystem) { } #[no_mangle] pub extern "C" fn inObjectLibSystemAddEntity (ols: *mut ObjectLibSystem, ep: EntityPointer) { unsafe { let olsb = Box::from_raw(ols); olsb.add_entity(ep); std::mem::forget(olsb); } } #[no_mangle] pub extern "C" fn inObjectLibSystemRemoveEntity (ols: *mut ObjectLibSystem, ep: EntityPointer) { } #[no_mangle] pub extern "C" fn inCallbackSystemInit(pp: *mut *mut CallbackSystem) { let b = Box::new(CallbackSystem::new()); unsafe { *pp = Box::into_raw(b); } } #[no_mangle] pub extern "C" fn inCallbackSystemRegisterReceiver (cbs: *mut CallbackSystem, ep: EntityPointer, ct: u64, mfp: FrMessageFn2) { unsafe { let cb = Box::from_raw(cbs); cb.register_callback(ep, ct, mfp); std::mem::forget(cb); } } #[no_mangle] pub extern "C" fn inCallbackSystemShutdown(cbs: *mut CallbackSystem) { } #[no_mangle] pub extern "C" fn inCallbackSystemStep(cbs: *mut CallbackSystem) { unsafe { let cb = Box::from_raw(cbs); cb.step_system(); std::mem::forget(cb); } }
use std; use std::io::{Cursor}; use std::mem; use libc; use std::env::vars; use std::ffi::CStr; use snowflake::ProcessUniqueId; use cbor::{Encoder}; #[cfg(unix)] use libloading::os::unix::{Library, Symbol}; #[cfg(windows)] use libloading::os::windows::{Library, Symbol}; use component; use cbor::{Config, GenericDecoder, DecodeError, value}; use entity::{Entity, EntityPointer}; use system::{System, ObjectLibSystem, CallbackSystem}; use ftypes::{FrMsg, FrMsgLength, FrComponentType, FrItemType, FrPropertyType, FrItem, FrMessageFn, FrMessageFn2}; /* void (*inEntityCreate) (FrMsg m, FrMsgLength l, FrEntity *e); // Msg contains a CBOR array from arrays [u64, bs] void (*inEntityReadComponent) (FrEntity e, FrComponentType ct, FrMessageFn f); // also names are "Entity", read write works per component void (*inEntityWriteComponent) (FrEntity e, FrComponentType ct, FrMsg m, FrMsgLength l); void (*inEntityReadId) (FrEntity e, FrItem it, FrMessageFn f); void (*inEntityDestroy) (FrEntity e); void (*inObjectLibSystemInit) (FrGiornataEnv g, msgPointer m, msgLength l, FrSystem *ps); // Msg contains specific system creation parameters void (*inObjectLibSystemAddEntity) (FrSystem s, FrEntity e); void (*inObjectLibSystemRemoveEntity) (FrSystem s, FrEntity e); void (*inObjectLibSystemShutdown) (FrSystem s); void (*inObjectLibSystemStep) (FrSystem s); // runs one cycle of system (control over Thread needed) void (*inCallbackSystemInit) (FrSystem *ps); void (*inCallbackSystemRegisterReceiver) (FrSystem s, FrEntity e, FrComponentType ct, FrMessageFn2 f); void (*inCallbackSystemShutdown) (FrSystem s); void (*inCallbackSystemStep) (FrSystem s); */ #[no_mangle] pub extern "C" fn inEntityCreate(data: *const u8, len: u32, pp: *mut EntityPointer) { let mut cts:Vec<(u64, Vec<u8>)> = Vec::new(); if len > 0 { let dv = component::vec_from_c_char_p(data, len); let mut reader = Cursor::new(dv); let mut d = GenericDecoder::new(Config::default(), reader); loop { match d.value() { Ok(value::Value::U64(u)) => { match d.value() { Ok(value::Value::Bytes(v)) => match v { value::Bytes::Bytes(v) => cts.push((u, v)), value::Bytes::Chunks(l) => cts.push((u, l.front().unwrap().clone())), }, Ok(other) => panic!("inEntityCreate, bytes not following u64: {:?}", other), Err(err) => panic!("inEntityCreate, error after u64: {:?}", err), } }, Ok(value::Value::I64(u)) => panic!("inEntityCreate, found i64 instead of u64: {:?}", u), _ => break, } } } let ep = Entity::to_ptr(Entity::new(cts)); unsafe { *pp = ep; } } #[no_mangle] pub extern "C" fn inEntityDestroy(ep: EntityPointer) { let e = Entity::from_ptr(ep); } #[no_mangle] pub extern "C" fn inEntityReadComponent(ep: EntityPointer, ct: u64, ip: FrItem, rcb: FrMessageFn) { let av = Entity::do_with(ep, (|en| { return en.get(ct); })); unsafe { rcb(ip, av.as_ptr(), av.len() as u32); } } #[no_mangle] pub extern "C" fn inEntityWriteComponent(ep: EntityPointer, ct: u64, p: *const u8, l: u32) { Entity::do_with(ep, (|en| { let v = component::vec_from_c_char_p(p, l); en.set(ct, v); })); } #[no_mangle] pub extern "C" fn inEntityId(ep: EntityPointer, ip: FrItem, rcb: FrMessageFn) { let id_bs = Entity::do_with(ep, (|en| { return en.id(); })); let id_u8 = unsafe { mem::transmute::<ProcessUniqueId, [u8; 16]>(id_bs) }; let mut buf = vec![]; let mut enc = Encoder::new(buf); enc.bytes(&id_u8); let id_bs2 = &enc.into_writer(); unsafe { rcb(ip, id_u8.as_ptr(), id_u8.len() as u32); } } #[no_mangle] pub extern "C" fn inObjectLibSystemInit(env: *const libc::c_char, pp: *mut *mut ObjectLibSystem) { for (k, v) in vars() { if k.eq( unsafe { &CStr::from_ptr(env).to_string_lossy().into_owned() }) { let lib = Library::new(v).unwrap(); let b = Box::new(ObjectLibSystem::new(&lib)); unsafe { *pp = Box::into_raw(b); } mem::forget(lib); return; } }; panic!("Intonaco: env variable GIORNATA not set!"); return; } #[no_mangle] pub extern "C" fn inObjectLibSystemStep(ols: *mut ObjectLibSystem) { unsafe { let olsb = Box::from_raw(ols); olsb.step_system(); std::mem::forget(olsb); } } #[no_mangle] pub extern "C" fn inObjectLibSystemShutdown(ols: *mut ObjectLibSystem) { } #[no_mangle] pub extern "C" fn inObjectLibSystemAddEntity (ols: *mut ObjectLibSystem, ep: EntityPointer) { unsafe { let olsb = Box::from_raw(ols); olsb.add_entity(ep); std::mem::forget(olsb); } } #[no_mangle] pub extern "C" fn inObjectLibSystemRemoveEntity (ols: *mut ObjectLibSystem, ep: EntityPointer) { } #[no_mangle] pub extern "C" fn inCallbackSystemInit(pp: *mut *mut CallbackSystem) { let b = Box::new(CallbackSystem::new()); unsafe { *pp = Box::into_raw(b); } } #[no_mangle]
#[no_mangle] pub extern "C" fn inCallbackSystemShutdown(cbs: *mut CallbackSystem) { } #[no_mangle] pub extern "C" fn inCallbackSystemStep(cbs: *mut CallbackSystem) { unsafe { let cb = Box::from_raw(cbs); cb.step_system(); std::mem::forget(cb); } }
pub extern "C" fn inCallbackSystemRegisterReceiver (cbs: *mut CallbackSystem, ep: EntityPointer, ct: u64, mfp: FrMessageFn2) { unsafe { let cb = Box::from_raw(cbs); cb.register_callback(ep, ct, mfp); std::mem::forget(cb); } }
function_block-full_function
[ { "content": "pub fn set_c_value(fp: FrMessageFn2, ep: EntityPointer, ct: u64, msg: &[u8]) {\n\n unsafe {\n\n fp (ep, ct, msg.as_ptr(), msg.len() as u32);\n\n }\n\n}\n\n\n\nimpl CallbackSystem {\n\n pub fn new() -> CallbackSystem {\n\n let cbs = CallbackSystem {\n\n queue: Arc::new(MsQueue::new()),\n\n };\n\n cbs\n\n }\n\n\n\n pub fn register_callback(&self, ep: EntityPointer, ct: u64, cb: FrMessageFn2) {\n\n Entity::do_with(ep, |e| {\n\n let queue2 = self.queue.clone();\n\n e.values()[&ct].add_callback(\n\n Arc::new(move | v | { queue2.push(CallbackSystemCommands::SetCValue(ep, ct, cb, v)); })\n\n );\n", "file_path": "intonaco/src/system.rs", "rank": 0, "score": 222174.98587594283 }, { "content": "pub fn vec_from_c_char_p(data: *const u8, len: u32) -> Vec<u8> {\n\n let mut dv = Vec::with_capacity(len as usize);\n\n unsafe {\n\n for i in 0..len {\n\n dv.push(*data.offset(i as isize) as u8);\n\n }\n\n }\n\n dv\n\n}\n\n\n", "file_path": "intonaco/src/component.rs", "rank": 1, "score": 211194.20991996193 }, { "content": "pub fn set_c_value_on_object_lib_item(fp: SetValueOnObjectLibItemCallback, ip: FrItem, msg: &[u8]) {\n\n unsafe {\n\n fp (ip, msg.as_ptr(), msg.len() as FrMsgLength);\n\n }\n\n}\n\n\n\n// the interface to external libraries, exposing object lib interfaces\n\npub struct ObjectLibInterface {\n\n create_function: Symbol <extern \"C\" fn(FrItemType, FrMsg, FrMsgLength) -> FrItem>,\n\n destroy_function: Symbol <extern \"C\" fn(FrItemType, FrItem)>,\n\n get_msg_sender_function: Symbol <extern \"C\" fn(FrItemType, FrPropertyType) -> Option<FrMessageFn>>,\n\n register_msg_receiver_function: Symbol <extern \"C\" fn(FrItemType, FrPropertyType, FrItem, EntityPointer, FrMessageFn2)>,\n\n}\n\n\n\nimpl ObjectLibInterface {\n\n\n\n pub fn create_item(&self, item_type: FrItemType, item_init_data: &[u8]) -> FrItem\n\n {\n\n unsafe {\n\n (self.create_function)(item_type, item_init_data.as_ptr(), item_init_data.len() as FrMsgLength)\n", "file_path": "intonaco/src/system.rs", "rank": 2, "score": 192804.40652032237 }, { "content": "pub trait System {\n\n fn add_entity(&self, ep: EntityPointer);\n\n fn step_system(&self);\n\n}\n\n\n\n//\n\n// ObjectLibSystem, System with Object realted libraries as implementation behind it\n\n//\n\n\n\npub enum ObjectLibSystemCommands {\n\n AddEntityToOLS(EntityPointer, Arc<Barrier>),\n\n RemoveObjectLibItemFromOLS(u64, thread_guard::Value<FrItem>),\n\n SetValueOnObjectLibItem(thread_guard::Value<SetValueOnObjectLibItemCallback>, thread_guard::Value<FrItem>, Arc<Vec<u8>>),\n\n}\n\n\n\npub struct ObjectLibSystem {\n\n queue: Arc<MsQueue<ObjectLibSystemCommands>>,\n\n lib_if: ObjectLibInterface ,\n\n}\n\n\n", "file_path": "intonaco/src/system.rs", "rank": 3, "score": 87179.17195341241 }, { "content": "// creating a object lib interface from a dynamic loaded library (DLL, .so)\n\nfn get_object_lib_interface(dynamic_lib: & Library) -> ObjectLibInterface {\n\n // find hgamer3d by dynamic loading\n\n\n\n let create_item: Symbol<extern \"C\" fn(FrItemType, FrMsg, FrMsgLength) -> FrItem> = unsafe {\n\n dynamic_lib.get(b\"gioCreateItem\\0\").unwrap()\n\n }; \n\n let destroy_item: Symbol<extern \"C\" fn(FrItemType, FrItem)> = unsafe {\n\n dynamic_lib.get(b\"gioDestroyItem\\0\").unwrap()\n\n }; \n\n let get_msg_sender: Symbol<extern \"C\" fn(FrItemType, FrPropertyType) -> Option<FrMessageFn>> = unsafe {\n\n dynamic_lib.get(b\"gioGetMsgSender\\0\").unwrap()\n\n }; \n\n let register_msg_receiver: Symbol<extern \"C\" fn(FrItemType, FrPropertyType, FrItem, EntityPointer, FrMessageFn2)> = unsafe {\n\n dynamic_lib.get(b\"gioRegisterMsgReceiver\\0\").unwrap()\n\n }; \n\n\n\n ObjectLibInterface {\n\n create_function: create_item,\n\n destroy_function: destroy_item,\n\n get_msg_sender_function: get_msg_sender,\n", "file_path": "intonaco/src/system.rs", "rank": 4, "score": 78693.03090794205 }, { "content": "fn main() {\n\n println!(\"\");\n\n}", "file_path": "intonaco/build.rs", "rank": 5, "score": 45635.93034280414 }, { "content": "func readComponent(dat string) Component {\n\n\tvar db Component\n\n\t_, err := toml.Decode(dat, &db)\n\n\tif err != nil {\n\n\t\tlog.Fatal(\"cannot read component \", err)\n\n\t}\n\n\treturn db\n", "file_path": "arriccio/main.go", "rank": 6, "score": 44552.70110079867 }, { "content": "func writeComponent(db Component, fname string) {\n\n\tvar buf bytes.Buffer\n\n\te := toml.NewEncoder(&buf)\n\n\terr := e.Encode(db)\n\n\tif err != nil {\n\n\t\tlog.Fatal(\"cannot write component \", err)\n\n\t} else {\n\n\t\tioutil.WriteFile(fname, buf.Bytes(), 0660)\n\n\t}\n", "file_path": "arriccio/main.go", "rank": 7, "score": 44549.95685046485 }, { "content": "func evaluateEnvSetting(env []string, settings []string, installdir string) []string {\n\n\t// interpretation of environment commmand settings\n\n\t// possible commands\n\n\t//\n\n\t// add-path ENVVAR rel-path [sep]\n\n\t// set-value ENVVAR val [sep]\n\n\t//\n\n\n\n\t// create map\n\n\tm := make(map[string]string)\n\n\torig_key := make(map[string]string)\n\n\tfor _, e := range env {\n\n\t\tkvs := strings.SplitN(e, \"=\", 2)\n\n\t\tk := strings.TrimSpace(kvs[0])\n\n\t\tuk := strings.ToUpper(k)\n\n\t\tm[uk] = strings.TrimSpace(kvs[1])\n\n\t\torig_key[uk] = k\n\n\t}\n\n\n\n\t// modify map\n\n\tfor _, s := range settings {\n\n\t\tfs := strings.Fields(s)\n\n\t\tif len(fs) == 3 || len(fs) == 4 {\n\n\t\t\tv, ok := m[fs[1]]\n\n\t\t\tval := \"\"\n\n\t\t\tif fs[0] == \"add-path\" {\n\n\t\t\t\tval1, err := filepath.Abs(filepath.Join(installdir, fs[2]))\n\n\t\t\t\tif err != nil {\n\n\t\t\t\t\tlog.Fatal(\"wrong relative path given: \", fs[2], err)\n\n\t\t\t\t}\n\n\t\t\t\tval = val1\n\n\t\t\t} else if fs[0] == \"set-value\" {\n\n\t\t\t\tval = fs[2]\n\n\t\t\t} else {\n\n\t\t\t\tlog.Fatal(\"wrong command in setting: \", fs[0])\n\n\t\t\t}\n\n\n\n\t\t\t// check for separator\n\n\t\t\ts := string(os.PathListSeparator)\n\n\t\t\tif len(fs) == 4 {\n\n\t\t\t\ts = fs[3]\n\n\t\t\t}\n\n\n\n\t\t\t// compose result\n\n\t\t\tif ok {\n\n\t\t\t\tm[strings.ToUpper(fs[1])] = v + s + val\n\n\t\t\t} else {\n\n\t\t\t\tm[strings.ToUpper(fs[1])] = val\n\n\t\t\t}\n\n\n\n\t\t} else {\n\n\t\t\tlog.Fatal(\"wrong number of arguments in setting: \", s)\n\n\t\t}\n\n\t}\n\n\n\n\t// create output environment\n\n\tout := []string{}\n\n\tfor k, v := range m {\n\n\t\tif val, ok := orig_key[k]; ok {\n\n\t\t\tout = append(out, val+\"=\"+v)\n\n\t\t} else {\n\n\t\t\tout = append(out, k+\"=\"+v)\n\n\t\t}\n\n\t}\n\n\treturn out\n", "file_path": "arriccio/main.go", "rank": 8, "score": 43058.69853208032 }, { "content": "func runComponentWithDependencies(cmd string, db AliasDB, workDir string, args []string, debug bool, unsafe bool, console bool, update bool) {\n\n\n\n\t// url is either given or taken from alias database\n\n\turl := cmd\n\n\t// check, do we have an alias for command\n\n\tif val, ok := db.Commands[cmd]; ok {\n\n\t\turl = val\n\n\t}\n\n\n\n\t// resolve dependencies\n\n\tok, rlist := resolveDependencies(db, url, []ImplementationDependency{}, update)\n\n\tif !ok {\n\n\t\tlog.Fatal(\"Could not resolve dependencies for cmd: \", url)\n\n\t}\n\n\n\n\t// check install dirs and install missing software\n\n\trlist2, ilist := enrichDepProcInfoWithInstallDir(db, rlist)\n\n\n\n\t// install files\n\n\tinstallDownloads(ilist, unsafe)\n\n\n\n\tif debug {\n\n\t\tfor _, el := range rlist2 {\n\n\t\t\tprintDepProcInfo(el)\n\n\t\t}\n\n\t} else {\n\n\t\t// build run command and start it, only, if not update\n\n\t\tif !update {\n\n\t\t\tcomposeEnvironmentAndRunCommand(rlist2, args, console)\n\n\t\t}\n\n\t}\n", "file_path": "arriccio/main.go", "rank": 9, "score": 42771.33054054813 }, { "content": "func getImplFName(fname string) string {\n\n\tbase := filepath.Base(fname)\n\n\tidir := filepath.Join(getArriccioDir(), \"impl\")\n\n\tiname := filepath.Join(idir, base) + \".i\"\n\n\treturn iname\n", "file_path": "arriccio/main.go", "rank": 10, "score": 41407.0750000006 }, { "content": "function getPlatString(name, version)\n\n\to, a = getOS()\n\n\treturn (name .. '-' .. a .. '-' .. o .. '-' .. version)\n\nend\n\n\n\n-- local functions, utilities\n\nlocal function aioString()\n\n\to, a = getOS()\n\n\tif o == \"windows\" then\n\n\t\treturn glue.bin .. \"\\\\win\\\\aio.exe\"\t\n\n\telseif o == \"darwin\" then\n\n\t\treturn glue.bin .. \"/darwin/aio\"\t\n\n\telseif o == \"linux\" then\n\n\t\treturn glue.bin .. \"/linux/aio\"\t\n\n\tend\n\nend\n\n\n\nlocal function getIntonacoVersion()\n\n\tio.input(\"../intonaco/Cargo.toml\")\n\n\twhile true do\n", "file_path": "scripts/build.lua", "rank": 11, "score": 35210.39019213282 }, { "content": "//\n\n// Fresco Framework for Multi-Language Programming\n\n// Copyright 2015-2016 Peter Althainz\n\n// \n\n// Distributed under the Apache License, Version 2.0\n\n// (See attached file LICENSE or copy at \n\n// http://www.apache.org/licenses/LICENSE-2.0)\n\n// \n\n// file: intonacto/src/lib.rs\n\n//\n\n\n\nextern crate cbor;\n\nextern crate libc;\n\nextern crate crossbeam;\n\nextern crate libloading;\n\nextern crate snowflake;\n\nextern crate rustc_serialize;\n\n\n\npub mod ftypes;\n\npub mod component;\n\npub mod entity;\n\npub mod thread_guard;\n\npub mod system;\n\npub mod cif;\n\n\n\n#[macro_use] \n\nextern crate lazy_static;\n\n\n", "file_path": "intonaco/src/lib.rs", "rank": 12, "score": 28451.080016138836 }, { "content": "\n\n let dv = component::vec_from_c_char_p(data, len);\n\n let mut reader = Cursor::new(dv);\n\n let (ct, v) = component::read_component(&mut reader).unwrap();\n\n\n\n Entity::do_with(ep, (|en| {\n\n en.set(ct, v); \n\n }));\n\n } else {\n\n println!(\"found entity_set with data len = 0\");\n\n }\n\n}\n\n\n\npub struct DataPointer(Arc<Vec<u8>>);\n\n\n\n// reading of data\n\n\n\n#[no_mangle] \n\npub extern \"C\" fn entity_get_data(ep: EntityPointer, ct: u64, pp: *mut *mut DataPointer) {\n\n\n", "file_path": "intonaco/src/entity.rs", "rank": 13, "score": 28212.17935541291 }, { "content": " let av = Entity::do_with(ep, (|en| {\n\n return en.get(ct);\n\n }));\n\n\n\n unsafe {\n\n *pp = Box::into_raw(Box::new(DataPointer(av)));\n\n }\n\n}\n\n\n\n#[no_mangle] \n\npub extern \"C\" fn entity_data_read(dp: *mut DataPointer, p_cp: *mut *const libc::c_char, p_len: *mut libc::c_int) {\n\n let bav = unsafe {Box::from_raw(dp)};\n\n unsafe {\n\n let DataPointer(ref av) = *bav;\n\n *p_len = av.len() as i32;\n\n *p_cp = (mem::transmute::<&[u8],&[i8]>(av)).as_ptr();\n\n }\n\n std::mem::forget(bav);\n\n}\n\n\n", "file_path": "intonaco/src/entity.rs", "rank": 14, "score": 28202.485308494277 }, { "content": " }\n\n }\n\n\n\n pub fn set(&self, ct: u64, cv: Vec<u8>) {\n\n if self.values.contains_key(&ct) {\n\n self.values[&ct].set(cv);\n\n }\n\n else {\n\n panic!(\"Entity set: key not found: {:x}\", ct);\n\n }\n\n }\n\n\n\n pub fn values(&self) -> &BTreeMap <u64, component::Component<Vec<u8>>> {\n\n &self.values\n\n }\n\n\n\n pub fn id(&self) -> ProcessUniqueId { \n\n self.id\n\n }\n\n\n", "file_path": "intonaco/src/entity.rs", "rank": 15, "score": 28201.66599602244 }, { "content": "use cbor::{Config, GenericDecoder, DecodeError, value};\n\n\n\npub struct Entity {\n\n id: ProcessUniqueId, \n\n values: BTreeMap <u64, component::Component<Vec<u8>>>,\n\n// destroyers: Mutex<Vec<RemoveObjectLibItemFromSystemFunctionInfo>>,\n\n}\n\n\n\nimpl std::cmp::PartialEq for Entity {\n\n fn eq(&self, other: &Entity) -> bool {\n\n return self.id == other.id;\n\n } \n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct EntityPointer(*mut Entity);\n\n\n\nunsafe impl Send for EntityPointer {}\n\nunsafe impl Sync for EntityPointer {}\n\n\n", "file_path": "intonaco/src/entity.rs", "rank": 16, "score": 28200.130093964683 }, { "content": "impl Entity {\n\n\n\n pub fn new(cts: Vec<(u64, Vec<u8>)>) -> Box<Entity> {\n\n let mut m = BTreeMap::new();\n\n for (k, v) in cts {\n\n m.insert(k, component::Component::new(v));\n\n };\n\n Box::new(\n\n Entity {\n\n id: ProcessUniqueId::new(),\n\n values: m,\n\n })\n\n }\n\n\n\n pub fn get(&self, ct: u64) -> Arc<Vec<u8>> {\n\n if self.values.contains_key(&ct) {\n\n return self.values[&ct].get();\n\n }\n\n else {\n\n panic!(\"Entity get: key not found: {:x}\", ct);\n", "file_path": "intonaco/src/entity.rs", "rank": 17, "score": 28199.16218398146 }, { "content": "/*\n\n#[no_mangle] \n\npub extern \"C\" fn entity_id(ep: EntityPointer) -> *mut ProcessUniqueId {\n\n let id = Entity::do_with(ep, (|en| {\n\n en.id() \n\n }));\n\n let bid = Box::new(id);\n\n return Box::into_raw(bid);\n\n}\n\n\n\n#[no_mangle] \n\npub extern \"C\" fn entity_id_free(bid: *mut ProcessUniqueId) {\n\n let bid = unsafe {Box::from_raw(bid)};\n\n // drops out of scope and deletes box\n\n}\n\n\n\n\n\n#[no_mangle] \n\npub extern \"C\" fn entity_set(data: *const libc::c_char, len: libc::c_int, ep: EntityPointer) {\n\n if len > 0 {\n", "file_path": "intonaco/src/entity.rs", "rank": 18, "score": 28196.734110428195 }, { "content": " pub fn to_ptr(b: Box<Entity>) -> EntityPointer {\n\n EntityPointer(Box::into_raw(b))\n\n }\n\n\n\n pub fn from_ptr(ep: EntityPointer) -> Box<Entity> {\n\n let EntityPointer(b) = ep;\n\n let e = unsafe { Box::from_raw(b) };\n\n e\n\n }\n\n\n\n pub fn do_with<F, G>(ep: EntityPointer, f: F) -> G where F: FnOnce(&Entity) -> G {\n\n let e = Entity::from_ptr(ep);\n\n let r = f(&*e);\n\n std::mem::forget(e);\n\n return r;\n\n }\n\n\n\n}\n\n\n\n\n", "file_path": "intonaco/src/entity.rs", "rank": 19, "score": 28193.153927481915 }, { "content": "#[no_mangle] \n\npub extern \"C\" fn entity_data_release(dp: *mut DataPointer) {\n\n let _ = unsafe {Box::from_raw(dp)};\n\n}\n\n\n\n*/\n\n\n\n//\n\n// Tests\n\n//\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use std::mem::{size_of};\n\n use snowflake::{ProcessUniqueId};\n\n\n\n #[test]\n\n fn size_of_unique_id() { // important test to have correct conversion of UniqueId's to [u8;16]\n\n assert!(size_of::<ProcessUniqueId>() == 16);\n\n }\n\n}\n", "file_path": "intonaco/src/entity.rs", "rank": 20, "score": 28188.480066745597 }, { "content": "//\n\n// Fresco Framework for Multi-Language Programming\n\n// Copyright 2015-2016 Peter Althainz\n\n// \n\n// Distributed under the Apache License, Version 2.0\n\n// (See attached file LICENSE or copy at \n\n// http://www.apache.org/licenses/LICENSE-2.0)\n\n// \n\n// file: intonacto/src/entity.rs\n\n//\n\n\n\nuse component;\n\nuse std;\n\nuse snowflake::ProcessUniqueId;\n\nuse std::collections::{BTreeMap};\n\nuse std::sync::Arc;\n\nuse libc;\n\nuse std::io::{Cursor};\n\nuse std::ptr::copy;\n\nuse std::mem;\n", "file_path": "intonaco/src/entity.rs", "rank": 21, "score": 28185.453670137165 }, { "content": " }\n\n }\n\n\n\n pub fn set(&self, v: T) {\n\n let av = Arc::new(v);\n\n self.data.set(av.clone());\n\n let mref = self.setter.get(); \n\n for s in &*mref {\n\n s(av.clone());\n\n }\n\n }\n\n\n\n pub fn get(&self) -> Arc<T> {\n\n self.data.get()\n\n }\n\n\n\n // remark those functions are not logically threadsafe, although technically. Reading values during adding, setting setters is ok.\n\n // Having two threads simultaneously adding setters is not working, one thread might overwrite changes of other thread.\n\n // Therefore usage in system code should be guarantee that systems add entities sequentially. (Or we need an add/delete mutex).\n\n\n", "file_path": "intonaco/src/component.rs", "rank": 22, "score": 28078.62839788294 }, { "content": "//\n\n// Fresco Framework for Multi-Language Programming\n\n// Copyright 2015-2016 Peter Althainz\n\n// \n\n// Distributed under the Apache License, Version 2.0\n\n// (See attached file LICENSE or copy at \n\n// http://www.apache.org/licenses/LICENSE-2.0)\n\n// \n\n// file: intonacto/src/component.rs\n\n//\n\n\n\n// The component inherits its name from the role it plays in the entity-component-system model, which is \n\n// the base of underlying ideas. In addition to isolate threading behaviour, our components have some\n\n// useful properties, like being accessible from multiple threads without locking the data structure.\n\n// This is achieved by leveraging crossbeam functionality, specifically ArcCell. In addition component\n\n// is a \"reactive\" value, with the ability to register callbacks on value change.\n\n\n\nuse libc;\n\nuse std::io::{Cursor, Seek, SeekFrom, Read};\n\nuse std::sync::Arc;\n", "file_path": "intonaco/src/component.rs", "rank": 23, "score": 28075.827670248775 }, { "content": "use crossbeam::sync::ArcCell;\n\nuse std::io::copy;\n\n\n\n//\n\n// Reactive Component\n\n//\n\n\n\npub struct Component<T> {\n\n data: ArcCell<T>,\n\n setter: ArcCell<Vec<Arc<Fn(Arc<T>)>>>,\n\n dropper: ArcCell<Vec<Arc<Fn()>>>,\n\n}\n\n\n\nimpl<T> Component<T> {\n\n\n\n pub fn new(v: T) -> Component<T> {\n\n Component {\n\n data: ArcCell::new(Arc::new(v)),\n\n setter: ArcCell::new(Arc::new(Vec::new())),\n\n dropper: ArcCell::new(Arc::new(Vec::new())),\n", "file_path": "intonaco/src/component.rs", "rank": 24, "score": 28071.478839852607 }, { "content": " pub fn add_callback(&self, f: Arc<Fn(Arc<T>)>) { \n\n let mut v = (*self.setter.get()).clone();\n\n v.push(f);\n\n self.setter.set(Arc::new(v));\n\n }\n\n\n\n pub fn add_dropper(&self, f: Arc<Fn()>) { \n\n let mut v = (*self.dropper.get()).clone();\n\n v.push(f);\n\n self.dropper.set(Arc::new(v));\n\n }\n\n\n\n}\n\n\n\nimpl<T> Drop for Component<T> {\n\n fn drop(&mut self) {\n\n let mref = self.dropper.get(); \n\n for d in &*mref {\n\n d();\n\n }\n\n }\n\n}\n\n\n\n// helper functions to translate data from C\n\n\n", "file_path": "intonaco/src/component.rs", "rank": 25, "score": 28071.14957472561 }, { "content": " register_msg_receiver_function: register_msg_receiver,\n\n }\n\n}\n\n\n\n// a callback from the object lib towards the entities\n\npub extern \"C\" fn object_lib_callback(ep: EntityPointer, ct: FrPropertyType, data: FrMsg, len: FrMsgLength) {\n\n Entity::do_with(ep, (|e| {e.set(ct, component::vec_from_c_char_p(data, len));}));\n\n}\n\n\n\n\n\n\n\n\n\n\n\n//\n\n// CallbackSystem, System needed for callbacks, establish a routine, if value changes\n\n//\n\n\n\n// Rust implementation\n\n\n\npub enum CallbackSystemCommands {\n\n SetCValue(EntityPointer, u64, FrMessageFn2, Arc<Vec<u8>>),\n\n}\n\n\n\npub struct CallbackSystem {\n\n queue: Arc<MsQueue<CallbackSystemCommands>>,\n\n}\n\n\n", "file_path": "intonaco/src/system.rs", "rank": 26, "score": 27972.20297833438 }, { "content": " }); // entity do with \n\n\n\n b.wait(); // finalize wait, to free calling thread\n\n }, // cmd\n\n\n\n ObjectLibSystemCommands::SetValueOnObjectLibItem(mfp, ip, val) => {\n\n set_c_value_on_object_lib_item(*(mfp.borrow_mut()), *(ip.borrow_mut()), &val); \n\n },\n\n\n\n ObjectLibSystemCommands::RemoveObjectLibItemFromOLS(ct, ip) => {\n\n self.lib_if.destroy_item(ct, *(ip.borrow_mut())); // to do: insert u64 type !!!\n\n // b.wait();\n\n },\n\n }\n\n\n\n }\n\n\n\n}\n\n\n\n\n\n//\n\n// system implementation for component libraries from C/C++\n\n//\n\n\n\npub type SetValueOnObjectLibItemCallback = FrMessageFn; \n", "file_path": "intonaco/src/system.rs", "rank": 27, "score": 27969.205987477708 }, { "content": " // encode to cbor as data, bytestring\n\n let id_u8 = unsafe { mem::transmute::<ProcessUniqueId, [u8; 16]>(e.id()) };\n\n let mut buf = vec![]; \n\n let mut enc = Encoder::new(buf);\n\n enc.bytes(&id_u8);\n\n set_c_value_on_object_lib_item(mfp_id, ip, &enc.into_writer()); \n\n },\n\n None => {}\n\n }\n\n // set attributes\n\n for (k2, v2) in e.values() {\n\n // add sender\n\n match self.lib_if.get_message_sender(*k, *k2)\n\n {\n\n Some(mfp) => {\n\n let queue2 = self.queue.clone();\n\n let tgmfp2 = thread_guard::Value::new(mfp);\n\n let tgip2 = thread_guard::Value::new(ip.clone());\n\n v2.add_callback(\n\n Arc::new(move | av | { queue2.push(ObjectLibSystemCommands::SetValueOnObjectLibItem(tgmfp2.clone(), tgip2.clone(), av)); })\n", "file_path": "intonaco/src/system.rs", "rank": 28, "score": 27968.497014096905 }, { "content": "\n\n fn step_system(&self) {\n\n\n\n let cmd = self.queue.pop();\n\n match cmd {\n\n ObjectLibSystemCommands::AddEntityToOLS(ep, b) => {\n\n\n\n Entity::do_with(ep, |e| { \n\n\n\n for (k, v) in e.values() {\n\n\n\n let ip = self.lib_if.create_item(*k, &v.get());\n\n let tgip = thread_guard::Value::new(ip.clone());\n\n\n\n if ip != std::ptr::null() {\n\n\n\n // set entity id on item, if interested in it\n\n match self.lib_if.get_message_sender(*k, CT_ENTITY_ID) \n\n {\n\n Some(mfp_id) => {\n", "file_path": "intonaco/src/system.rs", "rank": 29, "score": 27966.159717650873 }, { "content": "\n\n/*\n\n\n\n intonaco is an entity component system runtime, which decouples access to the entities from different\n\n programming languages and different threads. All important data access is lock-free, although multi-threading \n\n safe. The runtime is composed from the following pieces.\n\n\n\n Reactive Values and Entities\n\n ----------------------------\n\n A reactive value is a lockfree-value, which also calls some callback functions, once a value is changed. Entities \n\n are maps of reactive values, where the key is the component identity (an u64).\n\n\n\n Systems\n\n -------\n\n A system is defined as a running loop (attached to one and only thread) responding to commands. Those commands\n\n are send to the thread in a non-blocking messaging queue, the commands are:\n\n\n\n - add an entity to the system, obtaining system specific data, called entity item\n\n - destroy entity item data from the system, deleting the system specific data\n\n - set a value on a component within an entity for the system specific data\n", "file_path": "intonaco/src/system.rs", "rank": 30, "score": 27964.154800350283 }, { "content": " });\n\n }\n\n}\n\n\n\nimpl System for CallbackSystem {\n\n\n\n fn add_entity(&self, ep: EntityPointer) {\n\n }\n\n\n\n fn step_system(&self) {\n\n let cmdo = self.queue.try_pop();\n\n match cmdo {\n\n None => thread::sleep(Duration::from_millis(20)),\n\n Some(cmd) => match cmd {\n\n CallbackSystemCommands::SetCValue(ep, ct, cb, v) => {\n\n set_c_value(cb, ep, ct, &v);\n\n },\n\n },\n\n }\n\n }\n\n}\n\n\n\n\n\n\n\n\n", "file_path": "intonaco/src/system.rs", "rank": 31, "score": 27962.61893186795 }, { "content": "use std::io::{Cursor, Write};\n\nuse std::process::{exit};\n\nuse std::time::Duration;\n\n\n\nuse std::thread;\n\nuse std::sync::{Arc, Mutex, Barrier};\n\n\n\nuse crossbeam::sync::MsQueue;\n\nuse snowflake::ProcessUniqueId;\n\n\n\n#[cfg(unix)]\n\nuse libloading::os::unix::{Library, Symbol};\n\n#[cfg(windows)]\n\nuse libloading::os::windows::{Library, Symbol};\n\n\n\nuse entity::{Entity, EntityPointer};\n\nuse ftypes::{FrMsg, FrMsgLength, FrComponentType, FrItemType, FrPropertyType, FrItem, FrMessageFn, FrMessageFn2};\n\n\n\n\n\nconst CT_ENTITY_ID: u64 = 0x112cc0dc2647d39e;\n", "file_path": "intonaco/src/system.rs", "rank": 32, "score": 27960.510637839354 }, { "content": "//\n\n// Fresco Framework for Multi-Language Programming\n\n// Copyright 2015-2016 Peter Althainz\n\n// \n\n// Distributed under the Apache License, Version 2.0\n\n// (See attached file LICENSE or copy at \n\n// http://www.apache.org/licenses/LICENSE-2.0)\n\n// \n\n// file: intonacto/src/system.rs\n\n//\n\n\n\nuse std;\n\nuse libc;\n\nuse thread_guard;\n\nuse component;\n\nuse cbor::{Config, GenericDecoder, DecodeError, Encoder, value};\n\nuse rustc_serialize::{Encodable};\n\n\n\nuse std::mem;\n\nuse std::ffi::CString;\n", "file_path": "intonaco/src/system.rs", "rank": 33, "score": 27958.627946521163 }, { "content": " );\n\n set_c_value_on_object_lib_item(mfp, ip, &v2.get()); \n\n },\n\n None => {}\n\n }\n\n // register object lib callbacks\n\n self.lib_if.register_message_receiver(*k, *k2, ip, ep, object_lib_callback);\n\n }\n\n // drop logic\n\n let queue1 = self.queue.clone();\n\n let c = *k;\n\n v.add_dropper(\n\n Arc::new(move || { \n\n // let b2 = Arc::new(Barrier::new(2));\n\n queue1.push(ObjectLibSystemCommands::RemoveObjectLibItemFromOLS(c, tgip.clone()));\n\n // b2.wait();\n\n })\n\n );\n\n }; // if ip != 0\n\n } // loop over k, v\n", "file_path": "intonaco/src/system.rs", "rank": 34, "score": 27956.2341853377 }, { "content": "impl ObjectLibSystem {\n\n\n\n pub fn new(lib: & Library) -> ObjectLibSystem {\n\n let li = get_object_lib_interface(lib);\n\n let q = Arc::new(MsQueue::new());\n\n ObjectLibSystem {\n\n queue: q, \n\n lib_if: li\n\n }\n\n }\n\n\n\n}\n\n\n\nimpl System for ObjectLibSystem {\n\n\n\n fn add_entity(&self, ep: EntityPointer) {\n\n let b = Arc::new(Barrier::new(2));\n\n self.queue.push(ObjectLibSystemCommands::AddEntityToOLS(ep.clone(), b.clone()));\n\n b.wait();\n\n }\n", "file_path": "intonaco/src/system.rs", "rank": 35, "score": 27955.23699644506 }, { "content": "\n\n There are two types of systems, one handling external object libraries (written in C++) and one handling\n\n entities itself and responding to external callbacks.\n\n\n\n\n\n*/\n\n\n\n\n\n//\n\n// System trait\n\n//\n\n\n", "file_path": "intonaco/src/system.rs", "rank": 36, "score": 27954.7703349504 }, { "content": " }\n\n }\n\n\n\n pub fn destroy_item(&self, item_type: FrItemType, ip: FrItem)\n\n {\n\n (self.destroy_function)(item_type, ip);\n\n }\n\n\n\n pub fn get_message_sender(&self, item_type: FrItemType, property_type: FrPropertyType) -> Option<FrMessageFn>\n\n {\n\n unsafe {\n\n (self.get_msg_sender_function)(item_type, property_type)\n\n }\n\n }\n\n\n\n pub fn register_message_receiver(&self, item_type: FrItemType, event_type: FrPropertyType, ip: FrItem, ep: EntityPointer, fp: FrMessageFn2)\n\n {\n\n (self.register_msg_receiver_function)(item_type, event_type, ip, ep, fp);\n\n }\n\n\n\n}\n\n\n\n\n", "file_path": "intonaco/src/system.rs", "rank": 37, "score": 27953.538151180786 }, { "content": "use std::sync::Arc;\n\nuse snowflake::ProcessUniqueId;\n\nuse std::marker::PhantomData;\n\n\n\n// thread local storage, to support thread bound objects\n\nthread_local!(static GUARD: Guard = Guard::new());\n\n\n\npub struct Value<T> {\n\n id: ProcessUniqueId,\n\n value: Arc<RefCell<T>>,\n\n}\n\n\n\nimpl<T> Clone for Value<T> {\n\n fn clone(&self) -> Self {\n\n Value {\n\n id: self.id,\n\n value: self.value.clone(),\n\n }\n\n }\n\n}\n", "file_path": "intonaco/src/thread_guard.rs", "rank": 38, "score": 26964.641128619685 }, { "content": "unsafe impl<T> Send for Value<T> {}\n\nunsafe impl<T> Sync for Value<T> {}\n\n\n\nimpl<T> Value<T> {\n\n\n\n pub fn borrow_mut(&self) -> RefMut<T> {\n\n // panic, if guard id is not the same\n\n GUARD.with(|g| { assert_eq!(g.id, self.id) });\n\n// if self.id != guard.id {panic!(\"ecs-glue::thread_guard: id not the same, usage by foreign thread/guard no allowed!\");};\n\n self.value.borrow_mut()\n\n }\n\n\n\n pub fn new(v: T) -> Value<T> {\n\n// let mut new_id = None;\n\n let new_id = GUARD.with(|g| { g.id });\n\n Value {\n\n id: new_id, // take id of this thread (thread local storage)\n\n value: Arc::new(RefCell::new(v)),\n\n }\n\n }\n", "file_path": "intonaco/src/thread_guard.rs", "rank": 39, "score": 26964.43581604387 }, { "content": "//\n\n// Fresco Framework for Multi-Language Programming\n\n// Copyright 2015-2016 Peter Althainz\n\n// \n\n// Distributed under the Apache License, Version 2.0\n\n// (See attached file LICENSE or copy at \n\n// http://www.apache.org/licenses/LICENSE-2.0)\n\n// \n\n// file: intonacto/src/thread_guard.rs\n\n//\n\n\n\n//\n\n// thread_guard creates the possibility to hand out pointers to something to a different\n\n// thread, which can be send to the original thread for usage only inside the original thread.\n\n// This mechanism is guaranteed by a guard value, which allows to use the original pointer.\n\n// The motiviation behind this structure is to allow foreign threads to store information\n\n// about id's (entities) and send them with commands to the originator thread.\n\n//\n\n\n\nuse std::cell::{RefCell, RefMut};\n", "file_path": "intonaco/src/thread_guard.rs", "rank": 40, "score": 26962.471053189016 }, { "content": "}\n\n\n\npub struct Guard {\n\n id: ProcessUniqueId,\n\n m: PhantomData<*mut ()>, // make Guard not Send, therefore also not Sync\n\n}\n\n\n\nimpl Guard {\n\n pub fn new() -> Self {\n\n Guard {\n\n id: ProcessUniqueId::new(),\n\n m: PhantomData,\n\n }\n\n }\n\n}\n", "file_path": "intonaco/src/thread_guard.rs", "rank": 41, "score": 26959.39947046632 }, { "content": "\tsettings []string\n", "file_path": "arriccio/main.go", "rank": 42, "score": 23382.816055631527 }, { "content": "type Component struct {\n\n\tId string `toml:\"id-url\"` // Url as id\n\n\tDescription string `toml:\"description\"` // description\n\n\tSigningKey string `toml:\"signing-key\"` // https location of public key for signature\n\n\tLicense string `toml:\"license-short\"` // License type\n\n\tFullLicenseText string `toml:\"license-full\"` // full text of license, included in component description\n\n\tImplementations []ComponentImplementation `toml:\"implementation\"`\n", "file_path": "arriccio/main.go", "rank": 43, "score": 23103.373117442898 }, { "content": "func writeSignature(fname string, sig ssh.Signature) {\n\n\tf, err := os.Create(fname)\n\n\tif err != nil {\n\n\t\tlog.Fatal(\"error creating signature file: \", err)\n\n\t}\n\n\tdefer f.Close()\n\n\tf.WriteString(sig.Format)\n\n\tf.WriteString(\"\\n\")\n\n\tf.WriteString(base64.StdEncoding.EncodeToString(sig.Blob))\n\n\tf.WriteString(\"\\n\")\n", "file_path": "arriccio/main.go", "rank": 44, "score": 22423.508309221175 }, { "content": "func readSignature(fname string) ssh.Signature {\n\n\tf, err := os.Open(fname)\n\n\tif err != nil {\n\n\t\tlog.Fatal(\"error open signature file: \", err)\n\n\t}\n\n\tdefer f.Close()\n\n\ts := bufio.NewScanner(f)\n\n\tvar sig ssh.Signature\n\n\ts.Scan()\n\n\tsig.Format = s.Text()\n\n\ts.Scan()\n\n\tsig.Blob, _ = base64.StdEncoding.DecodeString(s.Text())\n\n\treturn sig\n", "file_path": "arriccio/main.go", "rank": 45, "score": 22423.508309221175 }, { "content": " FrMessageFn sendFn;\n", "file_path": "types/ctypes/interfaces.h", "rank": 46, "score": 22335.46717554403 }, { "content": "type ComponentImplementation struct {\n\n\tArchitecture string `toml:\"architecture\"`\n\n\tOS string `toml:\"operating-system\"`\n\n\tLocation string `toml:\"archive-download-location\"` // download Url tgz\n\n\tCommand string `toml:\"start-local-command\"`\n\n\tEnvironment []string `toml:\"environment-settings\"`\n\n\tDependencies []ImplementationDependency `toml:\"dependency\"`\n", "file_path": "arriccio/main.go", "rank": 47, "score": 22126.448541243673 }, { "content": "func matchArchAndOs(arch string, os string) bool {\n\n\treturn (arch == \"*\" || arch == runtime.GOARCH) && // GOARCH can be 386, amd64, amd64p32, ppc64 or arm\n\n\t\t(os == \"*\" || os == runtime.GOOS) // GOOS can be darwin, freebsd, linux, windows\n", "file_path": "arriccio/main.go", "rank": 48, "score": 21573.13872768888 }, { "content": "func writeAliasDB(db AliasDB) {\n\n\tfname := filepath.Join(getArriccioDir(), \"arr_db\")\n\n\n\n\tvar buf bytes.Buffer\n\n\te := toml.NewEncoder(&buf)\n\n\terr := e.Encode(db)\n\n\tif err != nil {\n\n\t\tfmt.Println(err)\n\n\t\tlog.Fatal(\"cannot write alias db\")\n\n\t} else {\n\n\t\tioutil.WriteFile(fname, buf.Bytes(), 0660)\n\n\t}\n", "file_path": "arriccio/main.go", "rank": 49, "score": 21513.798776947846 }, { "content": "func readAliasDB() AliasDB {\n\n\tfname := filepath.Join(getArriccioDir(), \"arr_db\")\n\n\tdat, err := ioutil.ReadFile(fname)\n\n\tif err == nil {\n\n\t\tvar db AliasDB\n\n\t\t_, err := toml.Decode(string(dat), &db)\n\n\t\tif err == nil {\n\n\t\t\treturn db\n\n\t\t} else {\n\n\t\t\tfmt.Println(err)\n\n\t\t\tlog.Fatal(\"cannot read alias db\")\n\n\t\t}\n\n\t}\n\n\treturn AliasDB{make(map[string]string), make(map[string]string)}\n", "file_path": "arriccio/main.go", "rank": 50, "score": 21513.798776947846 }, { "content": "func checkNameUrl(cmd string, db AliasDB) {\n\n\tif isUrlValid(cmd) {\n\n\t\treturn\n\n\t}\n\n\tif _, ok := db.Commands[cmd]; ok {\n\n\t\treturn\n\n\t}\n\n\tlog.Fatal(\"need <url> or <name> not: \", cmd)\n", "file_path": "arriccio/main.go", "rank": 51, "score": 21513.798776947846 }, { "content": "func showComponentInfo(cmd string, db AliasDB) {\n\n\t// url is either given or taken from alias database\n\n\turl := cmd\n\n\t// check, do we have an alias for command\n\n\tif val, ok := db.Commands[cmd]; ok {\n\n\t\turl = val\n\n\t}\n\n\n\n\taif, _, _ := getComponentFromUrl(db, url, false)\n\n\n\n\t//\tfmt.Printf(\"%+v\", aif)\n\n\n\n\tprintln(\"Component Info on:\", aif.Id, \"\\n\")\n\n\tprintln(\"Description:\")\n\n\tprintln(aif.Description, \"\\n\")\n\n\tprintln(\"License:\")\n\n\tprintln(aif.License, \"\\n\")\n", "file_path": "arriccio/main.go", "rank": 52, "score": 21228.79056213755 }, { "content": "func getComponentFromUrl(db AliasDB, url string, update bool) (Component, string, bool) {\n\n\n\n\tfname := \"\"\n\n\tifloc := \"\" // component location, directory if locally found\n\n\n\n\t// check url\n\n\tvar dat []byte\n\n\tvar aif Component\n\n\tvar isDownload bool = false\n\n\n\n\tif isUrlValid(url) {\n\n\t\t// file is available locally, since alias is defined\n\n\t\tif val, ok := db.Locals[url]; !update && ok {\n\n\t\t\t// check valid path\n\n\t\t\tabspath, ok := isLocalDirValid(val)\n\n\t\t\tif !ok {\n\n\t\t\t\tlog.Fatal(\"Local arriccio file not valid: \", url, fname)\n\n\t\t\t}\n\n\t\t\tifloc = abspath\n\n\t\t\tfname = filepath.Join(abspath, \"arriccio.toml\")\n\n\t\t\tif _, err := os.Stat(fname); os.IsNotExist(err) {\n\n\t\t\t\tlog.Fatal(\"Local arriccio file not valid: \", fname)\n\n\t\t\t}\n\n\t\t\tdat, _ = ioutil.ReadFile(fname)\n\n\t\t\taif = readComponent(string(dat))\n\n\t\t\t// file needs to be downloaded or taken from cache\n\n\t\t} else {\n\n\t\t\tfname, aif, isDownload = getRemoteComponent(url, update)\n\n\t\t}\n\n\t} else {\n\n\t\tlog.Fatal(\"Component id is not a valid url: \", url)\n\n\t}\n\n\t// check, if url is correct\n\n\tif url != aif.Id {\n\n\t\tlog.Fatal(\"downloaded component description has not correct id!\\n url:\", url, \"\\n id:\", aif.Id)\n\n\t}\n\n\n\n\treturn aif, ifloc, isDownload\n", "file_path": "arriccio/main.go", "rank": 53, "score": 21228.79056213755 }, { "content": "func getRemoteComponent(urln string, update bool) (string, Component, bool) {\n\n\tfname, isCached := getUrlAsCachedFile(urln, update)\n\n\tdat, _ := ioutil.ReadFile(fname)\n\n\taif := readComponent(string(dat))\n\n\tif !isCached {\n\n\t\t// check url is consistent with id\n\n\t\tif urln != aif.Id {\n\n\t\t\tos.Remove(fname)\n\n\t\t\tlog.Fatal(\"downloaded component description has not correct id!\\n url:\", urln, \"\\n id:\", aif.Id)\n\n\t\t}\n\n\t\t// check key is https - important for security !!!\n\n\t\tif len(aif.SigningKey) < 9 || aif.SigningKey[0:8] != \"https://\" {\n\n\t\t\tos.Remove(fname)\n\n\t\t\tlog.Fatal(\"key file (https!) not provided: \", aif.SigningKey, \" for component: \", urln)\n\n\t\t}\n\n\t\t// verify signature\n\n\t\tfsig, _ := getUrlAsCachedFile(urln+\".sig\", update)\n\n\t\tfkey, _ := getUrlAsCachedFile(aif.SigningKey, update)\n\n\t\tif !verifyFile(fname, fsig, fkey) {\n\n\t\t\tos.Remove(fname)\n\n\t\t\tos.Remove(fsig)\n\n\t\t\tlog.Fatal(\"downloaded component description not correctly signed: \", urln)\n\n\t\t}\n\n\t}\n\n\treturn fname, aif, !isCached\n", "file_path": "arriccio/main.go", "rank": 54, "score": 21228.79056213755 }, { "content": "func composeEnvironmentAndRunCommand(depi []DependencyProcessingInfo, args []string, console bool) {\n\n\t// path handling: path of command will be added at the end of PATH\n\n\t// other env: will be added or set, depending on previous setting\n\n\n\n\t// adapt environment, set binary\n\n\tenv := os.Environ()\n\n\tbinary := \"\"\n\n\targlist := args\n\n\n\n\tfor i := len(depi) - 1; i >= 0; i-- { // reversed loop, to start with lowest dependency\n\n\t\tel := depi[i]\n\n\n\n\t\tenv = evaluateEnvSetting(env, el.settings, el.installdir)\n\n\n\n\t\t// command handling, binary will include all commands separated by space to allow command chaining\n\n\t\tif len(el.impl.Command) != 0 {\n\n\t\t\tbparts := strings.Fields(el.impl.Command)\n\n\t\t\tnewcmd := el.installdir + string(os.PathSeparator) + bparts[0] // append installdir to command\n\n\n\n\t\t\tif len(binary) != 0 { // if there is already a command, push this one to arglist\n\n\t\t\t\ta := make([]string, 1)\n\n\t\t\t\ta[0] = newcmd\n\n\t\t\t\targlist = append(a, arglist...)\n\n\t\t\t} else {\n\n\t\t\t\tbinary = newcmd\n\n\t\t\t\targlist = append(bparts[1:], arglist...)\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\t// if binary still empty, take first argument as binary\n\n\tif binary == \"\" && len(arglist) > 0 {\n\n\t\tbinary = arglist[0]\n\n\t\targlist = arglist[1:]\n\n\t}\n\n\n\n\t// run command\n\n\tcmd := exec.Command(binary, arglist...)\n\n\tcmd.Env = env\n\n\n\n\tif console {\n\n\t\tcmd.Stdout = os.Stdout\n\n\t\tcmd.Stderr = os.Stderr\n\n\t\tcmd.Stdin = os.Stdin\n\n\t\tcmd.Run()\n\n\t} else {\n\n\t\tcmd.Start()\n\n\t}\n", "file_path": "arriccio/main.go", "rank": 55, "score": 20702.644936892204 }, { "content": " void (*inEntityWriteComponent) (FrEntity e, FrComponentType ct, FrMsg m, FrMsgLength l);\n", "file_path": "types/ctypes/interfaces.h", "rank": 66, "score": 21.413674676594173 }, { "content": "pub type FrMsg = *const u8;\n\npub type FrMsgLength = u32;\n\n\n\npub type FrComponentType = u64;\n\npub type FrItemType = FrComponentType;\n\npub type FrPropertyType = FrComponentType;\n\n\n\n// represents opaque pointer to external entity\n\npub enum FrItemStruct {}\n\nunsafe impl Send for FrItemStruct {}\n\npub type FrItem = *const FrItemStruct;\n\n\n\npub type FrMessageFn = extern \"C\" fn (item: FrItem, msg: FrMsg, ml: FrMsgLength); \n\npub type FrMessageFn2 = extern \"C\" fn (item: EntityPointer, ct: FrPropertyType, msg: FrMsg, ml: FrMsgLength); \n\n\n\n\n", "file_path": "intonaco/src/ftypes.rs", "rank": 67, "score": 20.40768712869414 }, { "content": "# Arriccio\n\n\n\nTool for dependency management of binary components, with implementations on multiple platforms. Arriccio works, by reading a a configuration file and a good start is to understand the format of this file.\n\n\n\n## Configuration File\n\n\n\nThe configuration file is written in TOML and contains the following information:\n\n\n\n- `id-url` the principal id of the component, given as a URL.\n\n- `description` a short description of the component.\n\n- `license-short` short version of license\n\n- `license-full` full version of license\n\n- `signing-key` url (https) for the signing key\n\n- `implementation` platform specific implementations (details below)\n\n- `implementation.dependencies` dependenies of specific implementation\n\n\n\n### Implementation\n\n\n\nThere can be multiple implementations, so this is given as an array of tables (see TOML syntax guide). Contents of an Implemenation can be:\n\n\n\n- `architecture` the underlying machine architecture for this implementation, possible values are `amd64` and `*`\n\n- `operating-system` the OS for this implementation, values can be: `windows`, `darwin` or `linux`\n\n- `archive-download-location` the url of an downloadable archive, containing the implementation itself\n\n- `start-local-command` command to start in target folder of unpacked archive\n\n- `environment-settings` array of commands to modify environement, see below\n\n\n\n### Implementation Dependencies\n\n\n\nThe implementation dependencies are also given as an array of tables below each (!) implementation, since they might be implementation specific. They can contain the following keys:\n\n\n\n- `id-url` the id of the dependency\n\n- `environment-settings` settings for the dependency in this specific context (dependency injection)\n\n\n\n### Environment Settings\n\n\n", "file_path": "arriccio/README.md", "rank": 68, "score": 15.044805510126674 }, { "content": "For each component environment settings can be specified, either directly in the component itself, or in the dependency section. Those environemnt settings are given as an array of strings, each string being a single command. The following commands are possible:\n\n\n\n- `add-path SAMPLE_PATH ./bin` add-path adds the target location as an absolute path to the given environment variable. The path is taken local to the target install location of the component and appended as an absolute path.\n\n- `set-value VALUE val` the corresponding value is set in the environment.\n\n\n\nThe environment settings are used to let components find each others locations, so that for example an executable is able to load a needed runtime library, or similar.\n\n\n\ntbd\n\n\n\n\n\n\n\n\n", "file_path": "arriccio/README.md", "rank": 69, "score": 13.568395466169735 }, { "content": "\t-- copy executable\n\n\tlocal fs = assert(io.popen(\"ls target/debug\"), \"ls not working on your system\")\n\n\tlocal s = nil\n\n\twhile true do\n\n\t\ts = fs:read()\n\n\t\tif s then\n\n\t\t\tlocal m = s:match(\"intonaco\")\n\n\t\t\tif m then\n\n\t\t\t\tos.execute(\"cp target/debug/\" .. s .. \" package/\" .. dir .. \"/intonaco.gio\")\n\n\t\t\t\tbreak\n\n\t\t\tend\n\n\t\telse\n\n\t\t\tbreak\n\n\t\tend\n\n\tend\n\n\tfs:close()\n\nend\n\n\n\nlocal function buildIntonaco()\n\n\t-- build executable\n", "file_path": "scripts/build.lua", "rank": 71, "score": 9.489834045221947 }, { "content": "func getArriccioDir() string {\n\n\tarrdir := \"\"\n\n\tenvdir := os.Getenv(\"AIO_COMPONENT_PATH\")\n\n\tif envdir != \"\" {\n\n\t\tarrdir = envdir\n\n\t} else {\n\n\t\tdir := getUserHomeDir()\n\n\t\tarrdir = filepath.Join(dir, \".aio\")\n\n\t}\n\n\t_, ok := isLocalDirValid(arrdir)\n\n\tif !ok {\n\n\t\tos.MkdirAll(arrdir, 0770)\n\n\t}\n\n\t_, ok = isLocalDirValid(filepath.Join(arrdir, \"cache\"))\n\n\tif !ok {\n\n\t\tos.MkdirAll(filepath.Join(arrdir, \"cache\"), 0770)\n\n\t}\n\n\t_, ok = isLocalDirValid(filepath.Join(arrdir, \"impl\"))\n\n\tif !ok {\n\n\t\tos.MkdirAll(filepath.Join(arrdir, \"impl\"), 0770)\n\n\t}\n\n\treturn arrdir\n", "file_path": "arriccio/main.go", "rank": 72, "score": 7.425359650240064 }, { "content": "# fresco\n\n\n\nfresco is a library to enable multi-language, multi-paradigm programming. It consists of multiple components, \n\nhandling different areas. There is a runtime for threading and interface topics (intonaco), a toolset to provide some data sttructure and tooling around it (sinopia) and a tool for running multiple components together and finding dependencies (arriccio).\n\n\n\nThe naming is originating in the fresco techniques, the intonaco is the last fresh layer of plaster, which intermix with the \n\ncolour pigments, the sinophia is a first scetch of the final picture in monochrome paint.\n\n\n\nThe single components have their own documentation (or at least some explanations):\n\n\n\n- [Arriccio](arriccio/README.md)\n\n\n", "file_path": "README.md", "rank": 73, "score": 6.46124969946986 }, { "content": "//\n\n// Fresco Framework for Multi-Language Programming\n\n// Copyright 2015-2016 Peter Althainz\n\n// \n\n// Distributed under the Apache License, Version 2.0\n\n// (See attached file LICENSE or copy at \n\n// http://www.apache.org/licenses/LICENSE-2.0)\n\n// \n\n// file: intonacto/src/ftypes.rs\n\n//\n\n\n\nuse entity::{EntityPointer};\n\n\n\n//\n\n// C-Interface\n\n//\n\n\n\n// Fresco types\n\n//\n\n\n", "file_path": "intonaco/src/ftypes.rs", "rank": 74, "score": 5.7592224966123275 }, { "content": "\t\tlocal line = io.read()\n\n\t\tif line == nil then break end\n\n\t\tlocal r = string.match(line, \"version = \\\"(.*)\\\"\")\n\n\t\tif r then\n\n\t\t\treturn r\n\n\t\tend\n\n\tend\n\nend\n\n\n\nlocal function packageIntonaco()\n\n\t-- clean output directory\n\n\tos.execute(\"rm -rf package\")\n\n\t-- create directory \n\n\tlocal dir = getPlatString(\"intonaco\", getIntonacoVersion())\n\n\tlfs.mkdir(\"package\")\n\n\tlfs.mkdir(\"package/\" .. dir)\n\n\n\n\t-- copy toml file\n\n\tos.execute(\"cp arriccio.toml package/arriccio.toml\")\n\n\n", "file_path": "scripts/build.lua", "rank": 75, "score": 4.843856222541341 }, { "content": "// A C++ functor\n\n\n", "file_path": "types/ctypes/functor.cpp", "rank": 76, "score": 2.8465214722190093 }, { "content": "func resolveDependencies(db AliasDB, cmd string, thisdep []ImplementationDependency, update bool) (bool, []DependencyProcessingInfo) {\n\n\n\n\t//\tprintln(\"resolve Dependencies for: \", cmd)\n\n\n\n\t// get aif and process\n\n\t// url is either given or taken from alias database\n\n\turl := cmd\n\n\t// check, do we have an alias for command\n\n\tif val, ok := db.Commands[cmd]; ok {\n\n\t\turl = val\n\n\t}\n\n\t// load toml file, returns directory, if locally found\n\n\taif, ifloc, isDownload := getComponentFromUrl(db, url, update)\n\n\n\n\t// resultlist\n\n\trlist := []DependencyProcessingInfo{}\n\n\n\n\t// build list of impl, sorted by version\n\n\tilist := []ComponentImplementation{}\n\n\tfor _, impl := range aif.Implementations {\n\n\t\tif matchArchAndOs(impl.Architecture, impl.OS) {\n\n\t\t\tilist = append(ilist, impl)\n\n\t\t}\n\n\t}\n\n\n\n\t// go through implementations\n\n\timplok := false\n\n\tfor _, impl := range ilist {\n\n\n\n\t\t// check dependencies are ok\n\n\t\trdeps := []DependencyProcessingInfo{}\n\n\t\tdepsok := true\n\n\t\tfor _, dep := range impl.Dependencies {\n\n\t\t\t// get valid impl\n\n\t\t\tok, r := resolveDependencies(db, dep.Id, []ImplementationDependency{dep}, update || isDownload)\n\n\t\t\tif ok {\n\n\t\t\t\tfor _, el := range r {\n\n\t\t\t\t\trdeps = append(rdeps, el)\n\n\t\t\t\t}\n\n\t\t\t} else {\n\n\t\t\t\tdepsok = false\n\n\t\t\t\tbreak\n\n\t\t\t}\n\n\t\t}\n\n\n\n\t\tif depsok {\n\n\t\t\tlocation := \"\"\n\n\t\t\t// special handling of local directories: if component was found locally and subdirectory of\n\n\t\t\t// implementation exists, then set location to this subdirectory.\n\n\t\t\tif ifloc != \"\" {\n\n\t\t\t\t// get last part of url\n\n\t\t\t\tparts := strings.Split(impl.Location, \"/\")\n\n\t\t\t\tdname := parts[len(parts)-1]\n\n\t\t\t\t// remove .tar.gz from name\n\n\t\t\t\tdname = strings.Replace(dname, \".tar.gz\", \"\", -1)\n\n\t\t\t\t// see if directory exists\n\n\t\t\t\tfdir := filepath.Join(ifloc, dname)\n\n\t\t\t\tabs, ok := isLocalDirValid(fdir)\n\n\t\t\t\tif ok {\n\n\t\t\t\t\tlocation = abs\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\t// impl found, now put together return values\n\n\t\t\timplok = true\n\n\t\t\tvar newd DependencyProcessingInfo\n\n\t\t\tif len(thisdep) > 0 {\n\n\t\t\t\tnewd = DependencyProcessingInfo{\n\n\t\t\t\t\taif,\n\n\t\t\t\t\timpl,\n\n\t\t\t\t\tlocation,\n\n\t\t\t\t\tappend(impl.Environment, thisdep[0].Environment...), // this is important, here happens dep. injections, we transfer dep env to settings\n\n\t\t\t\t}\n\n\t\t\t} else {\n\n\t\t\t\tnewd = DependencyProcessingInfo{\n\n\t\t\t\t\taif,\n\n\t\t\t\t\timpl,\n\n\t\t\t\t\tlocation,\n\n\t\t\t\t\timpl.Environment,\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\trlist = append(rlist, newd)\n\n\t\t\tfor _, el := range rdeps {\n\n\t\t\t\trlist = append(rlist, el)\n\n\t\t\t}\n\n\t\t\tbreak\n\n\t\t}\n\n\t}\n\n\n\n\treturn implok, rlist\n", "file_path": "arriccio/main.go", "rank": 77, "score": 2.8127605473806296 }, { "content": "func createClient() *http.Client {\n\n\ttr := &http.Transport{\n\n\t\tDisableCompression: true,\n\n\t}\n\n\tclient := &http.Client{Transport: tr}\n\n\treturn client\n", "file_path": "arriccio/main.go", "rank": 78, "score": 2.748507753996406 }, { "content": "\tlfs.chdir(\"intonaco\")\n\n\to, a = getOS()\n\n\tif o == \"windows\" then\n\n\t\tlocal fs = assert(io.popen(\"..\" .. pathSep() .. aioString() .. \" http://www.hgamer3d.org/tools/DetectVS.0717\"), \"detect vs not working\")\n\n\t\tlocal msDir = fs:read()\n\n\t\tfs:close()\n\n\t\tmsCmd = msDir .. \"\\\\VC\\\\Auxiliary\\\\Build\\\\vcvars64.bat\"\n\n\t\tos.execute(\"call \\\"\" .. msCmd .. \"\\\" && cargo build\")\n\n\telse\n\n\t\tos.execute(\"cargo build\")\n\n\tend\n\n\tpackageIntonaco()\n\nend\n\n\n\nlocal function buildArriccio()\n\n\tlfs.chdir(\"arriccio\")\n\n\to, a = getOS()\n\n\tif o == \"windows\" then\n\n\t\tos.execute(\"go build -o aio.exe\")\n\n\telse\n", "file_path": "scripts/build.lua", "rank": 79, "score": 2.6354357724135142 }, { "content": "\n\n\telseif arg[1] == \"sinopia\" then\n\n\t\tbuildSinopia()\n\n\t\tos.exit(0)\n\n\n\n\telseif arg[1] == \"sinopia-test\" then\n\n\t\ttestSinopia()\n\n\t\tos.exit(0)\n\n\n\n\telseif arg[1] == \"intonacoVersion\" then\n\n\t\tintonacoVersion()\n\n\t\tos.exit(0)\n\n\n\n\telseif arg[1] == \"register-intonaco\" then\n\n\t\tos.execute(aioString() .. \" local http://www.hgamer3d.org/component/Intonaco.0517 intonaco/package\")\n\n\t\tos.exit(0)\n\n\n\n\telseif arg[1] == \"unregister-intonaco\" then\n\n\t\tos.execute(aioString() .. \" remove-local http://www.hgamer3d.org/component/Intonaco.0517\")\n\n\t\tos.exit(0)\n", "file_path": "scripts/build.lua", "rank": 80, "score": 2.0325833211770306 } ]
Rust
backend/src/api/sounds.rs
dominikks/discord-soundboard-bot
900c93f66d55434ac5e0e95fc5db224a6cf78401
use crate::api::auth::UserId; use crate::api::Snowflake; use crate::audio_utils; use crate::db::models; use crate::db::DbConn; use crate::discord::management::check_guild_moderator; use crate::discord::management::check_guild_user; use crate::discord::management::get_guilds_for_user; use crate::discord::management::PermissionError; use crate::file_handling; use crate::CacheHttp; use bigdecimal::BigDecimal; use bigdecimal::FromPrimitive; use bigdecimal::ToPrimitive; use diesel::prelude::*; use diesel::result::Error as DieselError; use rocket::data::ToByteUnit; use rocket::response::NamedFile; use rocket::response::Responder; use rocket::routes; use rocket::Data; use rocket::Route; use rocket::State; use rocket_contrib::json::Json; use serde::Deserialize; use serde::Serialize; use serde_with::serde_as; use serde_with::TimestampSeconds; use serenity::model::id::GuildId; use std::convert::TryFrom; use std::num::TryFromIntError; use std::path::PathBuf; use std::time::SystemTime; use tokio::fs; pub fn get_routes() -> Vec<Route> { routes![ list_sounds, get_sound, create_sound, update_sound, delete_sound, upload_sound ] } #[derive(Debug, Responder)] enum SoundsError { #[response(status = 500)] IoError(String), #[response(status = 500)] SerenityError(String), #[response(status = 500)] InternalError(String), #[response(status = 500)] DieselError(String), #[response(status = 403)] InsufficientPermission(String), #[response(status = 404)] NotFound(String), #[response(status = 400)] InvalidSoundfile(String), } impl SoundsError { fn bigdecimal_error() -> Self { Self::InternalError(String::from("Number handling error")) } } impl From<DieselError> for SoundsError { fn from(err: DieselError) -> Self { if err == DieselError::NotFound { Self::NotFound(String::from("A sound with the given id does not exist")) } else { Self::DieselError(String::from("Failed to load data from database.")) } } } impl From<std::io::Error> for SoundsError { fn from(_: std::io::Error) -> Self { Self::IoError(String::from("Internal IO Error")) } } impl From<serenity::Error> for SoundsError { fn from(_: serenity::Error) -> Self { Self::SerenityError(String::from("Error fetching Discord API")) } } impl From<TryFromIntError> for SoundsError { fn from(_: TryFromIntError) -> Self { Self::InternalError(String::from("Number conversion failed")) } } impl From<PermissionError> for SoundsError { fn from(_: PermissionError) -> Self { Self::InsufficientPermission(String::from( "You do not have the permission to perform this action", )) } } #[serde_as] #[serde(rename_all = "camelCase")] #[derive(Serialize, Debug)] struct Sound { id: Snowflake, guild_id: Snowflake, name: String, category: String, #[serde_as(as = "TimestampSeconds<String>")] created_at: SystemTime, volume_adjustment: Option<f32>, soundfile: Option<Soundfile>, } #[serde_as] #[serde(rename_all = "camelCase")] #[derive(Serialize, Debug)] struct Soundfile { max_volume: f32, mean_volume: f32, length: f32, #[serde_as(as = "TimestampSeconds<String>")] uploaded_at: SystemTime, } impl TryFrom<(models::Sound, Option<models::Soundfile>)> for Sound { type Error = SoundsError; fn try_from(input: (models::Sound, Option<models::Soundfile>)) -> Result<Self, Self::Error> { let (s, f) = input; Ok(Self { id: Snowflake(u64::try_from(s.id)?), guild_id: Snowflake(s.guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?), name: s.name, category: s.category, created_at: s.created_at, volume_adjustment: s.volume_adjustment, soundfile: f.map(|f| Soundfile { max_volume: f.max_volume, mean_volume: f.mean_volume, length: f.length, uploaded_at: f.uploaded_at, }), }) } } #[get("/")] async fn list_sounds( cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<Json<Vec<Sound>>, SoundsError> { let guild_ids = get_guilds_for_user(cache_http.inner(), &db, user.into()) .await? .into_iter() .map(|(guildinfo, _)| { BigDecimal::from_u64(guildinfo.id.0).ok_or(SoundsError::bigdecimal_error()) }) .collect::<Result<Vec<_>, _>>()?; let sounds = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; sounds::table .filter(sounds::guild_id.eq_any(guild_ids)) .left_join(soundfiles::table) .load::<(models::Sound, Option<models::Soundfile>)>(c) }) .await?; Ok(Json( sounds .into_iter() .map(|data| Sound::try_from(data)) .collect::<Result<Vec<_>, _>>()?, )) } #[get("/<sound_id>")] async fn get_sound( sound_id: i32, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<NamedFile, SoundsError> { let (filename, guild_id) = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; soundfiles::table .find(sound_id) .inner_join(sounds::table) .select((soundfiles::file_name, sounds::guild_id)) .first::<(String, BigDecimal)>(c) }) .await?; let guild_id = GuildId(guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?); check_guild_user(&cache_http.inner(), &db, user.into(), guild_id).await?; Ok(NamedFile::open(file_handling::get_full_sound_path(&filename)).await?) } #[serde(rename_all = "camelCase")] #[derive(Deserialize, Debug)] struct CreateSoundParameter { guild_id: Snowflake, name: String, category: String, volume_adjustment: Option<f32>, } #[post("/", format = "json", data = "<params>")] async fn create_sound( cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, params: Json<CreateSoundParameter>, ) -> Result<Json<Sound>, SoundsError> { let params = params.into_inner(); check_guild_moderator( cache_http.inner(), &db, user.clone().into(), GuildId(params.guild_id.0), ) .await?; let uid = BigDecimal::from_u64(user.0).ok_or(SoundsError::bigdecimal_error())?; let gid = BigDecimal::from_u64(params.guild_id.0).ok_or(SoundsError::bigdecimal_error())?; let sound = db .run(|c| { use crate::db::schema::sounds; diesel::insert_into(sounds::table) .values(( sounds::guild_id.eq(gid), sounds::name.eq(params.name), sounds::category.eq(params.category), sounds::volume_adjustment.eq(params.volume_adjustment), sounds::created_by_user_id.eq(Some(uid.clone())), sounds::last_edited_by_user_id.eq(Some(uid)), )) .get_result::<models::Sound>(c) }) .await?; Ok(Json(Sound::try_from((sound, None))?)) } #[serde(rename_all = "camelCase")] #[derive(Deserialize, Debug)] struct UpdateSoundParameter { name: Option<String>, category: Option<String>, #[serde(default, with = "::serde_with::rust::double_option")] volume_adjustment: Option<Option<f32>>, } impl From<UpdateSoundParameter> for models::SoundChangeset { fn from(s: UpdateSoundParameter) -> Self { Self { name: s.name, category: s.category, volume_adjustment: s.volume_adjustment, } } } #[put("/<sound_id>", format = "json", data = "<params>")] async fn update_sound( sound_id: i32, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, params: Json<UpdateSoundParameter>, ) -> Result<(), SoundsError> { let guild_id = db .run(move |c| { use crate::db::schema::sounds; sounds::table .find(sound_id) .select(sounds::guild_id) .first::<BigDecimal>(c) }) .await?; let guild_id = guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?; check_guild_moderator( cache_http.inner(), &db, user.clone().into(), GuildId(guild_id), ) .await?; let uid = BigDecimal::from_u64(user.0).ok_or(SoundsError::bigdecimal_error())?; let params = params.into_inner(); db.run(move |c| { use crate::db::schema::sounds; diesel::update(sounds::table.filter(sounds::id.eq(sound_id))) .set(( &models::SoundChangeset::from(params), sounds::last_edited_at.eq(SystemTime::now()), sounds::last_edited_by_user_id.eq(Some(uid)), )) .execute(c) }) .await?; Ok(()) } #[delete("/<sound_id>")] async fn delete_sound( sound_id: i32, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<(), SoundsError> { let (guild_id, file_name) = fetch_guild_and_file(sound_id, &db).await?; check_guild_moderator(cache_http.inner(), &db, user.into(), GuildId(guild_id)).await?; if let Some(file_name) = file_name { if let Err(err) = fs::remove_file(file_handling::get_full_sound_path(&file_name)).await { if err.kind() != std::io::ErrorKind::NotFound { return Err(SoundsError::InternalError(String::from( "Failed to delete the corresponding sound file", ))); } } } let affected_rows = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; diesel::delete(soundfiles::table.filter(soundfiles::sound_id.eq(sound_id))).execute(c)?; diesel::delete(sounds::table.filter(sounds::id.eq(sound_id))).execute(c) }) .await?; if affected_rows > 0 { Ok(()) } else { Err(SoundsError::NotFound(String::from( "A soundfile with the given id does not exist", ))) } } #[post("/<sound_id>", format = "audio/mpeg", data = "<data>")] async fn upload_sound( sound_id: i32, data: Data, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<Json<Soundfile>, SoundsError> { let (guild_id, file_name) = fetch_guild_and_file(sound_id, &db).await?; check_guild_moderator( cache_http.inner(), &db, user.clone().into(), GuildId(guild_id), ) .await?; let uid = BigDecimal::from_u64(user.0).ok_or(SoundsError::bigdecimal_error())?; let file_name = file_name.unwrap_or(format!("{}_{}.mp3", guild_id, sound_id)); let file_path = file_handling::get_full_sound_path(&file_name); let save_res = save_sound_file(sound_id, uid, file_name, &file_path, data, &db).await; if save_res.is_err() { let delete_res = fs::remove_file(&file_path).await; let db_res = db .run(move |c| { use crate::db::schema::soundfiles; diesel::delete(soundfiles::table.filter(soundfiles::sound_id.eq(sound_id))).execute(c) }) .await; delete_res?; db_res?; } save_res } async fn save_sound_file( sound_id: i32, user_id: BigDecimal, file_name: String, file_path: &PathBuf, data: Data, db: &DbConn, ) -> Result<Json<Soundfile>, SoundsError> { data.open(10.mebibytes()).stream_to_file(file_path).await?; let volume = audio_utils::detect_volume(&file_path).await; let length = audio_utils::get_length(&file_path).await; if let (Some(volume), Some(length)) = (volume, length) { let sound_info = models::Soundfile { sound_id, file_name, max_volume: volume.max_volume, mean_volume: volume.mean_volume, length, uploaded_by_user_id: Some(user_id), uploaded_at: SystemTime::now(), }; { let sound_info = sound_info.clone(); db.run(move |c| { use crate::db::schema::soundfiles; diesel::insert_into(soundfiles::table) .values(&sound_info) .on_conflict(soundfiles::sound_id) .do_update() .set(&sound_info) .execute(c) }) .await?; } Ok(Json(Soundfile { max_volume: sound_info.max_volume, mean_volume: sound_info.mean_volume, length: sound_info.length, uploaded_at: sound_info.uploaded_at, })) } else { Err(SoundsError::InvalidSoundfile(String::from( "File could not be analyzed. Is it corrupted?", ))) } } async fn fetch_guild_and_file( sound_id: i32, db: &DbConn, ) -> Result<(u64, Option<String>), SoundsError> { let (guild_id, file_name) = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; sounds::table .find(sound_id) .left_join(soundfiles::table) .select((sounds::guild_id, soundfiles::file_name.nullable())) .first::<(BigDecimal, Option<String>)>(c) }) .await?; Ok(( guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?, file_name, )) }
use crate::api::auth::UserId; use crate::api::Snowflake; use crate::audio_utils; use crate::db::models; use crate::db::DbConn; use crate::discord::management::check_guild_moderator; use crate::discord::management::check_guild_user; use crate::discord::management::get_guilds_for_user; use crate::discord::management::PermissionError; use crate::file_handling; use crate::CacheHttp; use bigdecimal::BigDecimal; use bigdecimal::FromPrimitive; use bigdecimal::ToPrimitive; use diesel::prelude::*; use diesel::result::Error as DieselError; use rocket::data::ToByteUnit; use rocket::response::NamedFile; use rocket::response::Responder; use rocket::routes; use rocket::Data; use rocket::Route; use rocket::State; use rocket_contrib::json::Json; use serde::Deserialize; use serde::Serialize; use serde_with::serde_as; use serde_with::TimestampSeconds; use serenity::model::id::GuildId; use std::convert::TryFrom; use std::num::TryFromIntError; use std::path::PathBuf; use std::time::SystemTime; use tokio::fs; pub fn get_routes() -> Vec<Route> { routes![ list_sounds, get_sound, create_sound, update_sound, delete_sound, upload_sound ] } #[derive(Debug, Responder)] enum SoundsError { #[response(status = 500)] IoError(String), #[response(status = 500)] SerenityError(String), #[response(status = 500)] InternalError(String), #[response(status = 500)] DieselError(String), #[response(status = 403)] InsufficientPermission(String), #[response(status = 404)] NotFound(String), #[response(status = 400)] InvalidSoundfile(String), } impl SoundsError { fn bigdecimal_error() -> Self { Self::InternalError(String::from("Number handling error")) } } impl From<DieselError> for SoundsError { fn from(err: DieselError) -> Self { if err == DieselError::NotFound { Self::NotFound(String::from("A sound with the given id does not exist")) } else { Self::DieselError(String::from("Failed to load data from database.")) } } } impl From<std::io::Error> for SoundsError { fn from(_: std::io::Error) -> Self { Self::IoError(String::from("Internal IO Error")) } } impl From<serenity::Error> for SoundsError { fn from(_: serenity::Error) -> Self { Self::SerenityError(String::from("Error fetching Discord API")) } } impl From<TryFromIntError> for SoundsError { fn from(_: TryFromIntError) -> Self { Self::InternalError(String::from("Number conversion failed")) } } impl From<PermissionError> for SoundsError { fn from(_: PermissionError) -> Self { Self::InsufficientPermission(String::from( "You do not have the permission to perform this action", )) } } #[serde_as] #[serde(rename_all = "camelCase")] #[derive(Serialize, Debug)] struct Sound { id: Snowflake, guild_id: Snowflake, name: String, category: String, #[serde_as(as = "TimestampSeconds<String>")] created_at: SystemTime, volume_adjustment: Option<f32>, soundfile: Option<Soundfile>, } #[serde_as] #[serde(rename_all = "camelCase")] #[derive(Serialize, Debug)] struct Soundfile { max_volume: f32, mean_volume: f32, length: f32, #[serde_as(as = "TimestampSeconds<String>")] uploaded_at: SystemTime, } impl TryFrom<(models::Sound, Option<models::Soundfile>)> for Sound { type Error = SoundsError; fn try_from(input: (models::Sound, Option<models::Soundfile>)) -> Result<Self, Self::Error> { let (s, f) = input; Ok(Self { id: Snowflake(u64::try_from(s.id)?), guild_id: Snowflake(s.guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?), name: s.name, category: s.category, created_at: s.created_at, volume_adjustment: s.volume_adjustment, soundfile: f.map(|f| Soundfile { max_volume: f.max_volume, mean_volume: f.mean_volume, length: f.length, uploaded_at: f.uploaded_at, }), }) } } #[get("/")] async fn list_sounds( cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<Json<Vec<Sound>>, SoundsError> { let guild_ids = get_guilds_for_user(cache_http.inner(), &db, user.into()) .await? .into_iter() .map(|(guildinfo, _)| { BigDecimal::from_u64(guildinfo.id.0).ok_or(SoundsError::bigdecimal_error()) }) .collect::<Result<Vec<_>, _>>()?; let sounds = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; sounds::table .filter(sounds::guild_id.eq_any(guild_ids)) .left_join(soundfiles::table) .load::<(models::Sound, Option<models::Soundfile>)>(c) }) .await?; Ok(Json( sounds .into_iter() .map(|data| Sound::try_from(data)) .collect::<Result<Vec<_>, _>>()?, )) } #[get("/<sound_id>")] async fn get_sound( sound_id: i32, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<NamedFile, SoundsError> { let (filename, guild_id) = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; soundfiles::table .find(sound_id) .inner_join(sounds::table) .select((soundfiles::file_name, sounds::guild_id)) .first::<(String, BigDecimal)>(c) }) .await?; let guild_id = GuildId(guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?); check_guild_user(&cache_http.inner(), &db, user.into(), guild_id).await?; Ok(NamedFile::open(file_handling::get_full_sound_path(&filename)).await?) } #[serde(rename_all = "camelCase")] #[derive(Deserialize, Debug)] struct CreateSoundParameter { guild_id: Snowflake, name: String, category: String, volume_adjustment: Option<f32>, } #[post("/", format = "json", data = "<params>")] async fn create_sound( cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, params: Json<CreateSoundParameter>, ) -> Result<Json<Sound>, SoundsError> { let params = params.into_inner(); check_guild_moderator( cache_http.inner(), &db, user.clone().into(), GuildId(params.guild_id.0), ) .await?; let uid = BigDecimal::from_u64(user.0).ok_or(SoundsError::bigdecimal_error())?; let gid = BigDecimal::from_u64(params.guild_id.0).ok_or(SoundsError::bigdecimal_error())?; let sound = db .run(|c| { use crate::db::schema::sounds; diesel::insert_into(sounds::table) .values(( sounds::guild_id.eq(gid), sounds::name.eq(params.name), sounds::category.eq(params.category), sounds::volume_adjustment.eq(params.volume_adjustment), sounds::created_by_user_id.eq(Some(uid.clone())), sounds::last_edited_by_user_id.eq(Some(uid)), )) .get_result::<models::Sound>(c) }) .await?; Ok(Json(Sound::try_from((sound, None))?)) } #[serde(rename_all = "camelCase")] #[derive(Deserialize, Debug)] struct UpdateSoundParameter { name: Option<String>, category: Option<String>, #[serde(default, with = "::serde_with::rust::doub
sError> { let (guild_id, file_name) = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; sounds::table .find(sound_id) .left_join(soundfiles::table) .select((sounds::guild_id, soundfiles::file_name.nullable())) .first::<(BigDecimal, Option<String>)>(c) }) .await?; Ok(( guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?, file_name, )) }
le_option")] volume_adjustment: Option<Option<f32>>, } impl From<UpdateSoundParameter> for models::SoundChangeset { fn from(s: UpdateSoundParameter) -> Self { Self { name: s.name, category: s.category, volume_adjustment: s.volume_adjustment, } } } #[put("/<sound_id>", format = "json", data = "<params>")] async fn update_sound( sound_id: i32, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, params: Json<UpdateSoundParameter>, ) -> Result<(), SoundsError> { let guild_id = db .run(move |c| { use crate::db::schema::sounds; sounds::table .find(sound_id) .select(sounds::guild_id) .first::<BigDecimal>(c) }) .await?; let guild_id = guild_id.to_u64().ok_or(SoundsError::bigdecimal_error())?; check_guild_moderator( cache_http.inner(), &db, user.clone().into(), GuildId(guild_id), ) .await?; let uid = BigDecimal::from_u64(user.0).ok_or(SoundsError::bigdecimal_error())?; let params = params.into_inner(); db.run(move |c| { use crate::db::schema::sounds; diesel::update(sounds::table.filter(sounds::id.eq(sound_id))) .set(( &models::SoundChangeset::from(params), sounds::last_edited_at.eq(SystemTime::now()), sounds::last_edited_by_user_id.eq(Some(uid)), )) .execute(c) }) .await?; Ok(()) } #[delete("/<sound_id>")] async fn delete_sound( sound_id: i32, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<(), SoundsError> { let (guild_id, file_name) = fetch_guild_and_file(sound_id, &db).await?; check_guild_moderator(cache_http.inner(), &db, user.into(), GuildId(guild_id)).await?; if let Some(file_name) = file_name { if let Err(err) = fs::remove_file(file_handling::get_full_sound_path(&file_name)).await { if err.kind() != std::io::ErrorKind::NotFound { return Err(SoundsError::InternalError(String::from( "Failed to delete the corresponding sound file", ))); } } } let affected_rows = db .run(move |c| { use crate::db::schema::soundfiles; use crate::db::schema::sounds; diesel::delete(soundfiles::table.filter(soundfiles::sound_id.eq(sound_id))).execute(c)?; diesel::delete(sounds::table.filter(sounds::id.eq(sound_id))).execute(c) }) .await?; if affected_rows > 0 { Ok(()) } else { Err(SoundsError::NotFound(String::from( "A soundfile with the given id does not exist", ))) } } #[post("/<sound_id>", format = "audio/mpeg", data = "<data>")] async fn upload_sound( sound_id: i32, data: Data, cache_http: State<'_, CacheHttp>, db: DbConn, user: UserId, ) -> Result<Json<Soundfile>, SoundsError> { let (guild_id, file_name) = fetch_guild_and_file(sound_id, &db).await?; check_guild_moderator( cache_http.inner(), &db, user.clone().into(), GuildId(guild_id), ) .await?; let uid = BigDecimal::from_u64(user.0).ok_or(SoundsError::bigdecimal_error())?; let file_name = file_name.unwrap_or(format!("{}_{}.mp3", guild_id, sound_id)); let file_path = file_handling::get_full_sound_path(&file_name); let save_res = save_sound_file(sound_id, uid, file_name, &file_path, data, &db).await; if save_res.is_err() { let delete_res = fs::remove_file(&file_path).await; let db_res = db .run(move |c| { use crate::db::schema::soundfiles; diesel::delete(soundfiles::table.filter(soundfiles::sound_id.eq(sound_id))).execute(c) }) .await; delete_res?; db_res?; } save_res } async fn save_sound_file( sound_id: i32, user_id: BigDecimal, file_name: String, file_path: &PathBuf, data: Data, db: &DbConn, ) -> Result<Json<Soundfile>, SoundsError> { data.open(10.mebibytes()).stream_to_file(file_path).await?; let volume = audio_utils::detect_volume(&file_path).await; let length = audio_utils::get_length(&file_path).await; if let (Some(volume), Some(length)) = (volume, length) { let sound_info = models::Soundfile { sound_id, file_name, max_volume: volume.max_volume, mean_volume: volume.mean_volume, length, uploaded_by_user_id: Some(user_id), uploaded_at: SystemTime::now(), }; { let sound_info = sound_info.clone(); db.run(move |c| { use crate::db::schema::soundfiles; diesel::insert_into(soundfiles::table) .values(&sound_info) .on_conflict(soundfiles::sound_id) .do_update() .set(&sound_info) .execute(c) }) .await?; } Ok(Json(Soundfile { max_volume: sound_info.max_volume, mean_volume: sound_info.mean_volume, length: sound_info.length, uploaded_at: sound_info.uploaded_at, })) } else { Err(SoundsError::InvalidSoundfile(String::from( "File could not be analyzed. Is it corrupted?", ))) } } async fn fetch_guild_and_file( sound_id: i32, db: &DbConn, ) -> Result<(u64, Option<String>), Sound
random
[ { "content": "pub fn get_full_sound_path(filename: &str) -> PathBuf {\n\n (*SOUNDS_FOLDER).join(filename)\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum FileError {\n\n IoError(io::Error),\n\n}\n\n\n\nimpl From<io::Error> for FileError {\n\n fn from(err: io::Error) -> Self {\n\n FileError::IoError(err)\n\n }\n\n}\n\n\n\nimpl fmt::Display for FileError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n match self {\n\n FileError::IoError(err) => write!(f, \"FileError: IoError occurred. {}\", err),\n\n }\n", "file_path": "backend/src/file_handling.rs", "rank": 1, "score": 220057.47141876153 }, { "content": "pub fn get_routes() -> Vec<Route> {\n\n routes![\n\n user,\n\n login_pre,\n\n login_post,\n\n login_error,\n\n logout,\n\n get_auth_token\n\n ]\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct UserId(pub u64);\n\n\n\nimpl From<UserId> for SerenityUserId {\n\n fn from(user: UserId) -> Self {\n\n SerenityUserId(user.0)\n\n }\n\n}\n\n\n", "file_path": "backend/src/api/auth.rs", "rank": 2, "score": 214522.33339836763 }, { "content": "pub fn get_routes() -> Vec<Route> {\n\n routes![\n\n get_recordings,\n\n mix_recording,\n\n delete_recording,\n\n get_recording,\n\n get_mix\n\n ]\n\n}\n\n\n", "file_path": "backend/src/api/recorder.rs", "rank": 3, "score": 214522.33339836763 }, { "content": "pub fn get_routes() -> Vec<Route> {\n\n routes![stop, play, record]\n\n}\n\n\n", "file_path": "backend/src/api/commands.rs", "rank": 4, "score": 214522.33339836763 }, { "content": "pub fn get_routes() -> Vec<Route> {\n\n routes![\n\n get_all_random_infixes,\n\n set_random_infixes,\n\n get_guild_settings,\n\n set_guild_settings\n\n ]\n\n}\n\n\n", "file_path": "backend/src/api/settings.rs", "rank": 5, "score": 214522.33339836763 }, { "content": "#[get(\"/auth/login?<error>&<error_description>\")]\n\nfn login_error(error: String, error_description: String) -> status::Unauthorized<String> {\n\n warn!(?error, \"Oauth2 request failed: {}\", error_description);\n\n status::Unauthorized(Some(String::from(\n\n \"OAuth2 Request to Discord API failed. Could not authenticate you.\",\n\n )))\n\n}\n\n\n\n/// Login cookie data\n", "file_path": "backend/src/api/auth.rs", "rank": 7, "score": 198561.0725381205 }, { "content": "pub fn get_oauth_client() -> BasicClient {\n\n BasicClient::new(\n\n ClientId::new(DISCORD_CLIENT_ID.clone()),\n\n Some(ClientSecret::new(DISCORD_CLIENT_SECRET.clone())),\n\n AuthUrl::new(\"https://discord.com/api/oauth2/authorize\".to_string())\n\n .expect(\"Parse discord auth url\"),\n\n Some(\n\n TokenUrl::new(\"https://discord.com/api/oauth2/token\".to_string())\n\n .expect(\"Parse discord token url\"),\n\n ),\n\n )\n\n .set_redirect_url(\n\n RedirectUrl::new(format!(\"{}/api/auth/login\", BASE_URL.clone())).expect(\"Create redirect url\"),\n\n )\n\n}\n\n\n", "file_path": "backend/src/api/auth.rs", "rank": 10, "score": 171722.01663447157 }, { "content": "struct UserData {\n\n user_id: UserId,\n\n guild_id: GuildId,\n\n last_voice_activity: SystemTime,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum RecordingError {\n\n IoError(std::io::Error),\n\n NoData,\n\n}\n\n\n\nimpl fmt::Display for RecordingError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::result::Result<(), std::fmt::Error> {\n\n match self {\n\n RecordingError::NoData => write!(f, \"RecordingError: no data to record\"),\n\n RecordingError::IoError(err) => write!(f, \"RecordingError: IoError occurred. {}\", err),\n\n }\n\n }\n\n}\n", "file_path": "backend/src/discord/recorder.rs", "rank": 11, "score": 169020.3465503918 }, { "content": "#[serde_as]\n\n#[derive(Deserialize, Debug)]\n\nstruct DiscordUser {\n\n id: Snowflake,\n\n username: String,\n\n #[serde_as(as = \"DisplayFromStr\")]\n\n discriminator: u16,\n\n avatar: Option<String>,\n\n}\n\n\n\n/// This is the callback of the oauth request\n\n#[instrument(skip(cookies, oauth, db, code))]\n\n#[get(\"/auth/login?<code>&<state>\", rank = 2)]\n\nasync fn login_post(\n\n cookies: &CookieJar<'_>,\n\n oauth: State<'_, BasicClient>,\n\n db: DbConn,\n\n code: String,\n\n state: String,\n\n) -> Result<Redirect, AuthError> {\n\n let login_cookie = cookies\n\n .get_private(LOGIN_COOKIE)\n", "file_path": "backend/src/api/auth.rs", "rank": 12, "score": 167028.87159906782 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Debug, Serialize)]\n\nstruct User {\n\n id: Snowflake,\n\n username: String,\n\n discriminator: u16,\n\n avatar_url: String,\n\n guilds: Vec<GuildInfo>,\n\n}\n\n\n", "file_path": "backend/src/api/auth.rs", "rank": 13, "score": 164674.1975787532 }, { "content": "#[serde_as]\n\n#[derive(Deserialize, Serialize, Clone, Debug, PartialEq, Eq, Hash)]\n\nstruct Snowflake(#[serde_as(as = \"DisplayFromStr\")] pub u64);\n\n\n\nlazy_static! {\n\n // Custom settings for the frontend\n\n static ref APP_TITLE: Option<String> = var(\"APP_TITLE\").ok();\n\n // Discord data found in env\n\n static ref DISCORD_CLIENT_ID: String = var(\"DISCORD_CLIENT_ID\").expect(\"Expected DISCORD_CLIENT_ID as env\");\n\n static ref DISCORD_CLIENT_SECRET: String = var(\"DISCORD_CLIENT_SECRET\").expect(\"Expected DISCORD_CLIENT_SECRET as env\");\n\n}\n\n\n\npub async fn run(\n\n cache_http: CacheHttp,\n\n songbird: Arc<Songbird>,\n\n recorder: Arc<Recorder>,\n\n) -> Result<(), RocketError> {\n\n rocket::ignite()\n\n .attach(SpaceHelmet::default())\n\n .attach(db::DbConn::fairing())\n\n .attach(AdHoc::on_attach(\n\n \"Database Migrations\",\n", "file_path": "backend/src/api/mod.rs", "rank": 14, "score": 160593.48164361442 }, { "content": "#[post(\"/auth/logout\")]\n\nfn logout(cookies: &CookieJar<'_>) -> String {\n\n cookies.remove_private(Cookie::named(SESSION_COOKIE));\n\n String::from(\"User logged out\")\n\n}\n\n\n\n/// Beware: this replaces the current auth token by a new one. The old one becomes invalid.\n\n#[post(\"/auth/gettoken\")]\n\nasync fn get_auth_token(user: UserId, db: DbConn) -> Result<String, AuthError> {\n\n let uid = BigDecimal::from_u64(user.0).ok_or(AuthError::bigdecimal_error())?;\n\n let auth_token: String = iter::repeat(())\n\n .map(|_| OsRng.sample(Alphanumeric))\n\n .map(char::from)\n\n .take(32)\n\n .collect();\n\n\n\n {\n\n let auth_token = models::AuthToken {\n\n user_id: uid,\n\n token: auth_token.clone(),\n\n creation_time: SystemTime::now(),\n", "file_path": "backend/src/api/auth.rs", "rank": 15, "score": 146164.84402570105 }, { "content": "#[derive(Debug, Responder)]\n\nenum SettingsError {\n\n #[response(status = 500)]\n\n NumericalError(String),\n\n #[response(status = 500)]\n\n DieselError(String),\n\n #[response(status = 500)]\n\n SerenityError(String),\n\n #[response(status = 403)]\n\n InsufficientPermission(String),\n\n}\n\n\n\nimpl SettingsError {\n\n fn bigdecimal_error() -> Self {\n\n Self::NumericalError(String::from(\"BigDecimal handling error.\"))\n\n }\n\n}\n\n\n\nimpl From<DieselError> for SettingsError {\n\n fn from(err: DieselError) -> Self {\n\n error!(?err, \"Diesel error in Random Infix API.\");\n", "file_path": "backend/src/api/settings.rs", "rank": 18, "score": 136606.82522440547 }, { "content": "#[derive(Debug, Responder)]\n\nenum RecorderError {\n\n #[response(status = 500)]\n\n InternalError(String),\n\n #[response(status = 500)]\n\n IoError(String),\n\n #[response(status = 400)]\n\n RequestError(String),\n\n #[response(status = 404)]\n\n NotFound(String),\n\n #[response(status = 401)]\n\n NotAMember(String),\n\n}\n\n\n\nimpl From<io::Error> for RecorderError {\n\n fn from(_: io::Error) -> Self {\n\n RecorderError::IoError(String::from(\"IO Error\"))\n\n }\n\n}\n\n\n\nimpl From<OsString> for RecorderError {\n", "file_path": "backend/src/api/recorder.rs", "rank": 19, "score": 136606.82522440547 }, { "content": "#[derive(Debug, Responder)]\n\nenum CommandError {\n\n #[response(status = 404)]\n\n NotFound(String),\n\n #[response(status = 503)]\n\n ServiceUnavailable(String),\n\n #[response(status = 403)]\n\n NotAMember(String),\n\n #[response(status = 500)]\n\n InternalError(String),\n\n}\n\n\n\nimpl CommandError {\n\n fn bigdecimal_error() -> Self {\n\n Self::InternalError(String::from(\"Number handling error\"))\n\n }\n\n}\n\n\n\nimpl From<DiscordPlayError> for CommandError {\n\n fn from(error: DiscordPlayError) -> Self {\n\n match error {\n", "file_path": "backend/src/api/commands.rs", "rank": 20, "score": 136606.82522440547 }, { "content": "#[derive(Responder, Debug)]\n\nenum AuthError {\n\n #[response(status = 403)]\n\n CsrfMissmatch(String),\n\n #[response(status = 403)]\n\n MissingLoginCookie(String),\n\n #[response(status = 500)]\n\n RequestTokenError(String),\n\n #[response(status = 500)]\n\n UserDataError(String),\n\n #[response(status = 500)]\n\n InternalError(String),\n\n}\n\n\n\nimpl AuthError {\n\n fn bigdecimal_error() -> Self {\n\n Self::InternalError(String::from(\"Number handling error\"))\n\n }\n\n}\n\n\n\nimpl<RE: Error, T: oauth2::ErrorResponse> From<RequestTokenError<RE, T>> for AuthError {\n", "file_path": "backend/src/api/auth.rs", "rank": 21, "score": 136606.82522440547 }, { "content": "#[derive(Serialize, Debug)]\n\nstruct RecordingUser {\n\n /// Externally, we use the file nameas id. Is a unique id together with the guild_id and timestamp.\n\n id: String,\n\n username: String,\n\n}\n\n\n\nimpl TryFrom<file_handling::Recording> for Recording {\n\n type Error = RecorderError;\n\n\n\n fn try_from(r: file_handling::Recording) -> std::result::Result<Self, Self::Error> {\n\n let users: Result<Vec<RecordingUser>, _> = r\n\n .users\n\n .into_iter()\n\n .map(|user| {\n\n user\n\n .file_name\n\n .clone()\n\n .into_string()\n\n .map(|file_name| RecordingUser {\n\n id: file_name,\n", "file_path": "backend/src/api/recorder.rs", "rank": 22, "score": 136004.67225315675 }, { "content": "/// Creates the framework used by the discord client\n\npub fn create_framework() -> StandardFramework {\n\n StandardFramework::new()\n\n .configure(|c| c.prefix(\"~\"))\n\n .group(&GENERAL_GROUP)\n\n}\n\n\n", "file_path": "backend/src/discord/commands.rs", "rank": 23, "score": 135535.6782924728 }, { "content": "export interface User {\n\n id: string;\n\n username: string;\n\n discriminator: number;\n\n avatarUrl: string;\n\n guilds: Guild[];\n", "file_path": "frontend/src/app/services/api.service.ts", "rank": 24, "score": 123406.03432369798 }, { "content": " getSoundName(command: KeyCommand) {\n\n return command != null && typeof command !== 'string' ? command.name : '';\n", "file_path": "frontend/src/app/keybind-generator/searchable-sound-select/searchable-sound-select.component.ts", "rank": 25, "score": 117993.09423339494 }, { "content": " get sound() {\n\n return this.soundEntry.sound;\n", "file_path": "frontend/src/app/settings/sound-manager/sound-details/sound-details.component.ts", "rank": 26, "score": 103170.6029394127 }, { "content": "export class Sound implements ApiSound {\n\n id: string;\n\n guildId: string;\n\n name: string;\n\n category: string;\n\n createdAt: number;\n\n volumeAdjustment?: number;\n\n soundfile?: Soundfile;\n\n\n\n constructor(base: ApiSound) {\n\n this.id = base.id;\n\n this.guildId = base.guildId;\n\n this.name = base.name;\n\n this.category = base.category;\n\n this.createdAt = base.createdAt;\n\n this.volumeAdjustment = base.volumeAdjustment;\n\n this.soundfile = base.soundfile;\n\n }\n\n\n\n getDownloadUrl() {\n\n return `/api/sounds/${this.encodeId()}`;\n\n }\n\n\n\n getPlayUrl(guild: Guild | string) {\n\n // We can play a sound on a different guild than where it is located\n\n const guildid = typeof guild === 'string' ? guild : guild.id;\n\n return `/api/guilds/${guildid}/play/${this.encodeId()}`;\n\n }\n\n\n\n encodeId() {\n\n return this.id\n\n .split('/')\n\n .map(part => encodeURIComponent(part))\n\n .join('/');\n\n }\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 27, "score": 100682.616308103 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Serialize, Debug)]\n\nstruct Recording {\n\n guild_id: Snowflake,\n\n timestamp: u64,\n\n length: f32,\n\n users: Vec<RecordingUser>,\n\n}\n\n\n", "file_path": "backend/src/api/recorder.rs", "rank": 28, "score": 100353.33022002531 }, { "content": "#[instrument(skip(cookies, oauth))]\n\n#[get(\"/auth/login\", rank = 3)]\n\nfn login_pre(\n\n cookies: &CookieJar<'_>,\n\n oauth: State<'_, BasicClient>,\n\n) -> Result<Redirect, AuthError> {\n\n let (pkce_challenge, pkce_verifier) = PkceCodeChallenge::new_random_sha256();\n\n\n\n // Generate the full authorization URL.\n\n let (auth_url, csrf_state) = oauth\n\n .authorize_url(CsrfToken::new_random)\n\n .add_scope(Scope::new(\"identify\".to_string()))\n\n .set_pkce_challenge(pkce_challenge)\n\n .url();\n\n\n\n // Place the csrf token and pkce verifier as secure cookies on the client, expiring in 5 minutes\n\n cookies.add_private(\n\n Cookie::build(\n\n LOGIN_COOKIE,\n\n serde_json::to_string(&LoginInfo {\n\n csrf_state: csrf_state.secret().clone(),\n\n pkce_verifier: pkce_verifier.secret().clone(),\n", "file_path": "backend/src/api/auth.rs", "rank": 29, "score": 98858.43760729975 }, { "content": "#[skip_serializing_none]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[derive(Debug, Serialize)]\n\nstruct InfoResponse {\n\n version: String,\n\n build_id: Option<String>,\n\n build_timestamp: Option<u64>,\n\n title: Option<String>,\n\n discord_client_id: String,\n\n}\n\n\n\n#[get(\"/api/info\")]\n\nasync fn info() -> Json<InfoResponse> {\n\n Json(InfoResponse {\n\n version: VERSION.to_string(),\n\n build_id: BUILD_ID.map(|s| s.to_string()),\n\n build_timestamp: BUILD_TIMESTAMP.and_then(|s| s.parse::<u64>().ok()),\n\n title: APP_TITLE.clone(),\n\n discord_client_id: DISCORD_CLIENT_ID.clone(),\n\n })\n\n}\n", "file_path": "backend/src/api/mod.rs", "rank": 30, "score": 98543.28642156081 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\nstruct LoginInfo {\n\n csrf_state: String,\n\n pkce_verifier: String,\n\n}\n\n\n\n/// A user as defined by the Discord API\n", "file_path": "backend/src/api/auth.rs", "rank": 31, "score": 98540.13149318915 }, { "content": "#[serde_as]\n\n#[derive(Deserialize, Serialize, Debug)]\n\nstruct SessionInfo {\n\n #[serde_as(as = \"TimestampSeconds<String>\")]\n\n timestamp: SystemTime,\n\n user_id: Snowflake,\n\n}\n\n\n\n#[rocket::async_trait]\n\nimpl<'a, 'r> FromRequest<'a, 'r> for UserId {\n\n type Error = ();\n\n\n\n /// Protected api endpoints can inject `User`.\n\n async fn from_request(request: &'a Request<'r>) -> request::Outcome<Self, Self::Error> {\n\n let cookies = request.cookies();\n\n cookies\n\n .get_private(SESSION_COOKIE)\n\n .and_then(|cookie| serde_json::from_str::<SessionInfo>(cookie.value()).ok())\n\n .and_then(|cookie| {\n\n let diff = SystemTime::now()\n\n .duration_since(cookie.timestamp)\n\n .ok()?\n", "file_path": "backend/src/api/auth.rs", "rank": 32, "score": 98540.07854006885 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Serialize, Debug)]\n\nstruct GuildSettings {\n\n user_role_id: Option<Snowflake>,\n\n moderator_role_id: Option<Snowflake>,\n\n target_max_volume: f32,\n\n target_mean_volume: f32,\n\n roles: HashMap<Snowflake, String>,\n\n}\n\n\n\n#[get(\"/guilds/<guild_id>/settings\")]\n\nasync fn get_guild_settings(\n\n guild_id: u64,\n\n user: UserId,\n\n db: DbConn,\n\n cache_http: State<'_, CacheHttp>,\n\n) -> Result<Json<GuildSettings>, SettingsError> {\n\n let guild_id = GuildId(guild_id);\n\n check_guild_admin(cache_http.inner(), user.into(), guild_id).await?;\n\n\n\n let gid = BigDecimal::from_u64(guild_id.0).ok_or(SettingsError::bigdecimal_error())?;\n\n let guild_settings = db\n", "file_path": "backend/src/api/settings.rs", "rank": 33, "score": 98539.97714216154 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Serialize, Debug)]\n\nstruct RandomInfix {\n\n guild_id: Snowflake,\n\n infix: String,\n\n display_name: String,\n\n}\n\n\n\nimpl TryFrom<models::RandomInfix> for RandomInfix {\n\n type Error = ();\n\n\n\n fn try_from(infix: models::RandomInfix) -> Result<Self, Self::Error> {\n\n Ok(Self {\n\n guild_id: Snowflake(infix.guild_id.to_u64().ok_or(())?),\n\n infix: infix.infix,\n\n display_name: infix.display_name,\n\n })\n\n }\n\n}\n\n\n\n#[get(\"/randominfixes\")]\n\nasync fn get_all_random_infixes(\n", "file_path": "backend/src/api/settings.rs", "rank": 34, "score": 98539.97714216154 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Deserialize, Debug)]\n\nstruct MixingParameter {\n\n /// Where the mixed part should start and end. To calculate this, the sound\n\n /// files are assumed to be aligned at the end.\n\n start: f32,\n\n end: f32,\n\n /// All files that should be included\n\n user_ids: Vec<String>,\n\n}\n\n\n", "file_path": "backend/src/api/recorder.rs", "rank": 35, "score": 98539.97714216154 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Debug, Serialize)]\n\nstruct GuildInfo {\n\n id: Snowflake,\n\n name: String,\n\n icon_url: Option<String>,\n\n role: UserPermission,\n\n}\n\n\n\nimpl Serialize for UserPermission {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n serializer.serialize_str(match *self {\n\n UserPermission::Admin => \"admin\",\n\n UserPermission::Moderator => \"moderator\",\n\n UserPermission::User => \"user\",\n\n })\n\n }\n\n}\n\n\n", "file_path": "backend/src/api/auth.rs", "rank": 36, "score": 98539.97714216154 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Serialize, Debug)]\n\nstruct MixingResult {\n\n download_url: String,\n\n}\n\n\n\n#[instrument(skip(params, cache_http, db, user))]\n\n#[post(\n\n \"/guilds/<guild_id>/recordings/<timestamp>\",\n\n format = \"json\",\n\n data = \"<params>\"\n\n)]\n\nasync fn mix_recording(\n\n guild_id: u64,\n\n timestamp: u64,\n\n params: Json<MixingParameter>,\n\n cache_http: State<'_, CacheHttp>,\n\n db: DbConn,\n\n user: UserId,\n\n) -> Result<Json<MixingResult>, RecorderError> {\n\n let guild_id = GuildId(guild_id);\n\n check_guild_user(&cache_http.inner(), &db, user.into(), guild_id).await?;\n", "file_path": "backend/src/api/recorder.rs", "rank": 37, "score": 98539.97714216154 }, { "content": "interface ApiSound {\n\n id: string;\n\n guildId: string;\n\n name: string;\n\n category: string;\n\n createdAt: number;\n\n volumeAdjustment?: number;\n\n soundfile?: Soundfile;\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 38, "score": 97639.59918045823 }, { "content": " encodeId() {\n\n return this.id\n\n .split('/')\n\n .map(part => encodeURIComponent(part))\n\n .join('/');\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 39, "score": 97559.90355643257 }, { "content": "#[serde(rename_all = \"camelCase\")]\n\n#[derive(Deserialize, Debug)]\n\nstruct RandomInfixParameter {\n\n infix: String,\n\n display_name: String,\n\n}\n\n\n\n#[put(\"/guilds/<guild_id>/randominfixes\", format = \"json\", data = \"<params>\")]\n\nasync fn set_random_infixes(\n\n guild_id: u64,\n\n user: UserId,\n\n db: DbConn,\n\n cache_http: State<'_, CacheHttp>,\n\n params: Json<Vec<RandomInfixParameter>>,\n\n) -> Result<(), SettingsError> {\n\n check_guild_moderator(cache_http.inner(), &db, user.into(), GuildId(guild_id)).await?;\n\n\n\n let gid = BigDecimal::from_u64(guild_id).ok_or(SettingsError::bigdecimal_error())?;\n\n let random_infixes = params\n\n .into_inner()\n\n .into_iter()\n\n .map(|infix| models::RandomInfix {\n", "file_path": "backend/src/api/settings.rs", "rank": 40, "score": 96812.4926616614 }, { "content": " replaceSoundfile(soundfile: Soundfile) {\n\n this.sound.soundfile = soundfile;\n\n this.internalSound.soundfile = soundfile;\n", "file_path": "frontend/src/app/settings/sound-manager/sound-manager.component.ts", "rank": 41, "score": 96253.0007098256 }, { "content": " getPlayUrl(guild: Guild | string) {\n\n // We can play a sound on a different guild than where it is located\n\n const guildid = typeof guild === 'string' ? guild : guild.id;\n\n return `/api/guilds/${guildid}/play/${this.encodeId()}`;\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 42, "score": 95802.89652285141 }, { "content": " getDownloadUrl() {\n\n return `/api/sounds/${this.encodeId()}`;\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 43, "score": 95802.89652285141 }, { "content": "struct Handler;\n\n\n\n#[async_trait]\n\nimpl EventHandler for Handler {\n\n #[instrument(skip(self, ready))]\n\n async fn ready(&self, _: Context, ready: Ready) {\n\n info!(\"{} is connected!\", ready.user.name);\n\n }\n\n}\n\n\n\npub struct DiscordClient {\n\n pub client: Client,\n\n pub songbird: Arc<Songbird>,\n\n pub recorder: Arc<Recorder>,\n\n}\n\n\n\nimpl DiscordClient {\n\n #[instrument]\n\n pub async fn new() -> Self {\n\n let token = env::var(\"DISCORD_TOKEN\").expect(\"Expected DISCORD_TOKEN in env\");\n", "file_path": "backend/src/discord/client.rs", "rank": 44, "score": 95613.25619775182 }, { "content": "#[group]\n\n#[commands(join, leave, stop, ping, record, guildid, info)]\n\nstruct General;\n\n\n\n#[command]\n\n#[only_in(guilds)]\n\nasync fn join(ctx: &Context, msg: &Message) -> CommandResult {\n\n let guild = msg.guild(&ctx.cache).await.unwrap();\n\n let guild_id = guild.id;\n\n\n\n let connect_to = match guild\n\n .voice_states\n\n .get(&msg.author.id)\n\n .and_then(|voice_state| voice_state.channel_id)\n\n {\n\n Some(channel) => channel,\n\n None => {\n\n check_msg(msg.reply(&ctx, \":x: Not in a voice channel\").await);\n\n\n\n return Ok(());\n\n }\n\n };\n", "file_path": "backend/src/discord/commands.rs", "rank": 45, "score": 95613.25619775182 }, { "content": " replaceSoundfile(file: File, entry: SoundEntry) {\n\n this.isProcessing$.next(true);\n\n this.soundsService\n\n .uploadSound(entry.sound, file)\n\n .pipe(\n\n tap(soundfile => entry.replaceSoundfile(soundfile)),\n\n finalize(() => this.isProcessing$.next(false))\n\n )\n\n .subscribe({\n\n error: () => this.snackBar.open('Failed to upload soundfile', 'Damn'),\n\n });\n", "file_path": "frontend/src/app/settings/sound-manager/sound-manager.component.ts", "rank": 46, "score": 94864.0190025705 }, { "content": " trackById(_index: number, item: SoundEntry) {\n\n return item.sound.id;\n", "file_path": "frontend/src/app/settings/sound-manager/sound-manager.component.ts", "rank": 47, "score": 94864.0190025705 }, { "content": "/// Key used to put the Recorders into the serenity TypeMap\n\nstruct RecorderKey;\n\n\n\nimpl TypeMapKey for RecorderKey {\n\n type Value = Arc<Recorder>;\n\n}\n\n\n", "file_path": "backend/src/discord/recorder.rs", "rank": 48, "score": 93921.82325374614 }, { "content": "struct VoiceRecording {\n\n start: SystemTime,\n\n data: Vec<i16>,\n\n}\n\n\n\nimpl VoiceRecording {\n\n pub fn new() -> Self {\n\n Self {\n\n start: SystemTime::now(),\n\n data: Default::default(),\n\n }\n\n }\n\n}\n\n\n", "file_path": "backend/src/discord/recorder.rs", "rank": 49, "score": 93914.77012649125 }, { "content": "struct RecorderHandler {\n\n recorder: Arc<Recorder>,\n\n guild_id: GuildId,\n\n}\n\n\n\n#[async_trait]\n\nimpl VoiceEventHandler for RecorderHandler {\n\n #[instrument(skip(self, ctx))]\n\n async fn act(&self, ctx: &EventContext<'_>) -> Option<Event> {\n\n use EventContext as Ctx;\n\n match ctx {\n\n Ctx::SpeakingStateUpdate(Speaking { ssrc, user_id, .. }) => {\n\n // Discord voice calls use RTP, where every sender uses a randomly allocated\n\n // *Synchronisation Source* (SSRC) to allow receivers to tell which audio\n\n // stream a received packet belongs to. As this number is not derived from\n\n // the sender's user_id, only Discord Voice Gateway messages like this one\n\n // inform us about which random SSRC a user has been allocated. Future voice\n\n // packets will contain *only* the SSRC.\n\n //\n\n // You can implement logic here so that you can differentiate users'\n", "file_path": "backend/src/discord/recorder.rs", "rank": 50, "score": 93914.77012649125 }, { "content": " loadGuildSettings(guildId: string) {\n\n return this.http.get<GuildData>(`/api/guilds/${encodeURIComponent(guildId)}/settings`);\n", "file_path": "frontend/src/app/services/api.service.ts", "rank": 51, "score": 93540.94469936672 }, { "content": "export interface Soundfile {\n\n maxVolume: number;\n\n meanVolume: number;\n\n length: number;\n\n uploadedAt: number;\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 52, "score": 91643.31304681647 }, { "content": "export interface GuildData extends GuildSettings {\n\n roles: Map<string, string>;\n", "file_path": "frontend/src/app/services/api.service.ts", "rank": 53, "score": 89173.544840576 }, { "content": "/// Helper trait to add installation/creation methods to serenity's\n\n/// `ClientBuilder`.\n\npub trait RecorderInit {\n\n fn register_recorder(self, recorder: Arc<Recorder>) -> Self;\n\n}\n\n\n\nimpl RecorderInit for ClientBuilder<'_> {\n\n fn register_recorder(self, recorder: Arc<Recorder>) -> Self {\n\n self.type_map_insert::<RecorderKey>(recorder)\n\n }\n\n}\n\n\n\n/// Retrieve the Recorder State from a serenity context's\n\n/// shared key-value store.\n\npub async fn get(ctx: &Context) -> Option<Arc<Recorder>> {\n\n let data = ctx.data.read().await;\n\n\n\n data.get::<RecorderKey>().cloned()\n\n}\n", "file_path": "backend/src/discord/recorder.rs", "rank": 54, "score": 88392.82130158544 }, { "content": "import { ChangeDetectionStrategy, Component, EventEmitter, Input, Output } from '@angular/core';\n\nimport { SoundEntry } from '../sound-manager.component';\n\n\n\ntype VolumeAdjustmentMode = 'auto' | 'manual';\n\n\n\n@Component({\n\n selector: 'app-sound-details',\n\n templateUrl: './sound-details.component.html',\n\n styleUrls: ['./sound-details.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class SoundDetailsComponent {\n\n @Input() soundEntry: SoundEntry;\n\n @Input() isBusy: boolean;\n\n @Input() isPlaying: boolean;\n\n\n\n @Output() playClick = new EventEmitter<void>();\n\n @Output() deleteClick = new EventEmitter<void>();\n\n @Output() replaceSoundfile = new EventEmitter<File>();\n\n\n\n get sound() {\n\n return this.soundEntry.sound;\n\n }\n\n\n\n set volumeAdjustmentMode(mode: VolumeAdjustmentMode) {\n\n if (mode === 'auto') {\n\n this.soundEntry.sound.volumeAdjustment = null;\n\n } else {\n\n this.soundEntry.sound.volumeAdjustment = 0;\n\n }\n\n }\n\n get volumeAdjustmentMode() {\n\n return this.soundEntry.sound.volumeAdjustment == null ? 'auto' : 'manual';\n\n }\n\n\n\n constructor() {}\n\n\n\n onImportFileChange(event: Event) {\n\n const files = (event.target as HTMLInputElement).files;\n\n if (files.length === 1) {\n\n this.replaceSoundfile.emit(files[0]);\n\n }\n\n }\n\n}\n", "file_path": "frontend/src/app/settings/sound-manager/sound-details/sound-details.component.ts", "rank": 55, "score": 88208.23318246366 }, { "content": "interface ApiRecordingUser {\n\n username: string;\n\n id: string;\n", "file_path": "frontend/src/app/services/recorder.service.ts", "rank": 56, "score": 87214.47391767574 }, { "content": "import { trigger, state, style, transition, animate } from '@angular/animations';\n\nimport { ChangeDetectionStrategy, Component, OnDestroy } from '@angular/core';\n\nimport { BehaviorSubject, Subject } from 'rxjs';\n\nimport { takeUntil, withLatestFrom } from 'rxjs/operators';\n\nimport { ErrorService } from '../services/error.service';\n\n\n\n@Component({\n\n selector: 'app-error-box',\n\n templateUrl: './error-box.component.html',\n\n styleUrls: ['./error-box.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n animations: [\n\n trigger('enterLeave', [\n\n state('*', style({ transform: 'translateY(0)' })),\n\n transition(':enter', [style({ transform: 'translateY(100%)' }), animate('200ms ease-out')]),\n\n transition(':leave', [animate('200ms ease-in', style({ transform: 'translateY(100%)' }))]),\n\n ]),\n\n ],\n\n})\n\nexport class ErrorBoxComponent implements OnDestroy {\n\n private onDestroy$ = new Subject<void>();\n\n errors$ = new BehaviorSubject<string[]>([]);\n\n\n\n constructor(private errorService: ErrorService) {\n\n this.errorService.errors$.pipe(takeUntil(this.onDestroy$), withLatestFrom(this.errors$)).subscribe(([error, errors]) => {\n\n this.errors$.next([...errors, error]);\n\n });\n\n }\n\n\n\n ngOnDestroy() {\n\n this.onDestroy$.next();\n\n this.onDestroy$.complete();\n\n }\n\n\n\n ignore() {\n\n this.errors$.next([]);\n\n }\n\n}\n", "file_path": "frontend/src/app/error-box/error-box.component.ts", "rank": 57, "score": 86462.29803944481 }, { "content": "import { ChangeDetectionStrategy, Component } from '@angular/core';\n\nimport { SettingsService } from 'src/app/services/settings.service';\n\n\n\n@Component({\n\n templateUrl: './user-settings.component.html',\n\n styleUrls: ['./user-settings.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class UserSettingsComponent {\n\n constructor(public settingsService: SettingsService) {}\n\n}\n", "file_path": "frontend/src/app/settings/user-settings/user-settings.component.ts", "rank": 58, "score": 85591.83608988019 }, { "content": "import { ChangeDetectionStrategy, Component, OnDestroy, OnInit } from '@angular/core';\n\nimport { MatSnackBar } from '@angular/material/snack-bar';\n\nimport { ActivatedRoute } from '@angular/router';\n\nimport { BehaviorSubject, combineLatest, from, of, ReplaySubject, Subject } from 'rxjs';\n\nimport {\n\n debounceTime,\n\n distinctUntilChanged,\n\n filter,\n\n finalize,\n\n first,\n\n map,\n\n mergeMap,\n\n pairwise,\n\n shareReplay,\n\n switchMap,\n\n takeUntil,\n\n tap,\n\n toArray,\n\n withLatestFrom,\n\n} from 'rxjs/operators';\n\nimport { Sound, Soundfile, SoundsService } from 'src/app/services/sounds.service';\n\nimport { SettingsService } from 'src/app/services/settings.service';\n\nimport { clamp, sortBy } from 'lodash-es';\n\nimport { MatDialog } from '@angular/material/dialog';\n\nimport { SoundDeleteConfirmComponent } from './sound-delete-confirm/sound-delete-confirm.component';\n\nimport Fuse from 'fuse.js';\n\n\n\n@Component({\n\n templateUrl: './sound-manager.component.html',\n\n styleUrls: ['./sound-manager.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class SoundManagerComponent implements OnDestroy, OnInit {\n\n private onDestroy$ = new Subject<void>();\n\n\n\n get settings() {\n\n return this.settingsService.settings;\n\n }\n\n\n\n guildId$ = new ReplaySubject(1);\n\n sounds$ = new ReplaySubject<SoundEntry[]>(1);\n\n soundsWithChanges$ = this.sounds$.pipe(\n\n switchMap(sounds =>\n\n sounds.length === 0\n\n ? of([])\n\n : combineLatest(sounds.map(sound => sound.hasChanges$)).pipe(map(checks => sounds.filter((_, i) => checks[i])))\n\n ),\n\n shareReplay(1)\n\n );\n\n\n\n soundFilterString$ = new BehaviorSubject('');\n\n filteredSounds$ = combineLatest([this.soundFilterString$, this.sounds$]).pipe(\n\n debounceTime(300),\n\n map(([filterText, sounds]) => {\n\n if (filterText.length > 0) {\n\n const fuse = new Fuse(sounds, { keys: ['sound.name', 'sound.category'] });\n\n return fuse.search(filterText).map(res => res.item);\n\n } else {\n\n return sounds;\n\n }\n\n })\n\n );\n\n\n\n isSaving$ = new BehaviorSubject(false);\n\n isUploading$ = new BehaviorSubject(false);\n\n isProcessing$ = new BehaviorSubject(false); // tracks replace and delete operations\n\n\n\n hasChanges$ = this.soundsWithChanges$.pipe(map(sounds => sounds.length > 0));\n\n saveChanges$ = new Subject<void>();\n\n discardChanges$ = new Subject<void>();\n\n\n\n currentAudio$ = new BehaviorSubject<HTMLAudioElement>(null);\n\n playAudioClick$ = new Subject<SoundEntry>();\n\n\n\n constructor(\n\n private soundsService: SoundsService,\n\n private settingsService: SettingsService,\n\n private route: ActivatedRoute,\n\n private snackBar: MatSnackBar,\n\n private dialog: MatDialog\n\n ) {}\n\n\n\n ngOnInit() {\n\n // Update guildid and sounds on route change\n\n this.route.params.pipe(takeUntil(this.onDestroy$)).subscribe(params => {\n\n const guildId = params.guildId;\n\n this.guildId$.next(guildId);\n\n\n\n this.soundsService.sounds$\n\n .pipe(\n\n first(),\n\n map(sounds => sounds.filter(sound => sound.guildId === guildId).map(sound => new SoundEntry(this.soundsService, sound)))\n\n )\n\n .subscribe(sounds => this.sounds$.next(sounds));\n\n });\n\n\n\n // Play sounds\n\n this.playAudioClick$.pipe(takeUntil(this.onDestroy$), withLatestFrom(this.currentAudio$)).subscribe(([entry, audio]) => {\n\n if (audio) {\n\n audio.pause();\n\n } else {\n\n const newAudio = new Audio();\n\n newAudio.src = entry.sound.getDownloadUrl();\n\n this.currentAudio$.next(newAudio);\n\n }\n\n });\n\n // Delete HTMLAudioElements when a new one is played\n\n this.currentAudio$.pipe(takeUntil(this.onDestroy$), pairwise()).subscribe(([previous, current]) => {\n\n if (previous) {\n\n previous.remove();\n\n }\n\n if (current) {\n\n current.onpause = () => this.currentAudio$.next(null);\n\n current.load();\n\n current.play();\n\n }\n\n });\n\n // Set volume\n\n combineLatest([this.currentAudio$, this.settings.localVolume$])\n\n .pipe(\n\n takeUntil(this.onDestroy$),\n\n filter(([audio]) => audio != null)\n\n )\n\n .subscribe(([audio, volume]) => this.setAudioVolume(audio, volume));\n\n\n\n this.saveChanges$.pipe(takeUntil(this.onDestroy$), withLatestFrom(this.soundsWithChanges$)).subscribe(([, sounds]) => {\n\n this.isSaving$.next(true);\n\n from(sounds)\n\n .pipe(\n\n mergeMap(sound => sound.saveChanges(), 5),\n\n finalize(() => this.isSaving$.next(false))\n\n )\n\n .subscribe({\n\n error: () => this.snackBar.open('Failed to save changes to sounds', 'Damn'),\n\n });\n\n });\n\n this.discardChanges$\n\n .pipe(takeUntil(this.onDestroy$), withLatestFrom(this.soundsWithChanges$))\n\n .subscribe(([, sounds]) => sounds.forEach(sound => sound.discardChanges()));\n\n }\n\n\n\n onImportFileChange(event: Event, guildId: string) {\n\n const files = Array.from((event.target as HTMLInputElement).files);\n\n this.isUploading$.next(true);\n\n from(files)\n\n .pipe(\n\n mergeMap(file => {\n\n const endingIndex = file.name.lastIndexOf('.');\n\n const filename = endingIndex > 0 ? file.name.substring(0, endingIndex) : file.name;\n\n return this.soundsService\n\n .createSound(guildId, filename, '')\n\n .pipe(\n\n mergeMap(sound =>\n\n this.soundsService\n\n .uploadSound(sound, file)\n\n .pipe(map(soundfile => new SoundEntry(this.soundsService, new Sound({ ...sound, soundfile }))))\n\n )\n\n );\n\n }, 5),\n\n toArray(),\n\n withLatestFrom(this.sounds$),\n\n finalize(() => this.isUploading$.next(false))\n\n )\n\n .subscribe({\n\n next: ([newEntries, sounds]) => {\n\n this.sounds$.next([...sounds, ...sortBy(newEntries, entry => entry.sound.name.toLowerCase())]);\n\n this.snackBar.open('Upload successful');\n\n },\n\n error: () => this.snackBar.open('Upload of sounds failed!', 'Damn'),\n\n });\n\n }\n\n\n\n deleteSound(entry: SoundEntry) {\n\n this.dialog\n\n .open(SoundDeleteConfirmComponent, { data: { sound: entry.sound } })\n\n .afterClosed()\n\n .subscribe(result => {\n\n if (result) {\n\n this.isProcessing$.next(true);\n\n this.soundsService\n\n .deleteSound(entry.sound)\n\n .pipe(\n\n withLatestFrom(this.sounds$),\n\n tap(([, sounds]) => this.sounds$.next(sounds.filter(sound => sound !== entry))),\n\n finalize(() => this.isProcessing$.next(false))\n\n )\n\n .subscribe({\n\n error: () => this.snackBar.open('Failed to delete sound', 'Damn'),\n\n });\n\n }\n\n });\n\n }\n\n\n\n replaceSoundfile(file: File, entry: SoundEntry) {\n\n this.isProcessing$.next(true);\n\n this.soundsService\n\n .uploadSound(entry.sound, file)\n\n .pipe(\n\n tap(soundfile => entry.replaceSoundfile(soundfile)),\n\n finalize(() => this.isProcessing$.next(false))\n\n )\n\n .subscribe({\n\n error: () => this.snackBar.open('Failed to upload soundfile', 'Damn'),\n\n });\n\n }\n\n\n\n private setAudioVolume(audio: HTMLAudioElement, volume: number) {\n\n audio.volume = clamp(volume / 100, 0, 1);\n\n }\n\n\n\n ngOnDestroy() {\n\n this.onDestroy$.next();\n\n this.onDestroy$.complete();\n\n }\n\n\n\n trackById(_index: number, item: SoundEntry) {\n\n return item.sound.id;\n\n }\n\n}\n\n\n\nexport class SoundEntry {\n\n // This is the sound we started with\n\n private internalSound: Sound;\n\n // This sound is edited\n\n sound: Sound;\n\n\n\n checkChanges$ = new BehaviorSubject<void>(null);\n\n hasChanges$ = this.checkChanges$.pipe(\n\n map(\n\n () =>\n\n this.internalSound.category !== this.sound.category ||\n\n this.internalSound.name !== this.sound.name ||\n\n this.internalSound.volumeAdjustment !== this.sound.volumeAdjustment\n\n ),\n\n distinctUntilChanged(),\n\n shareReplay(1)\n\n );\n\n\n\n constructor(private soundsService: SoundsService, sound: Sound) {\n\n this.internalSound = new Sound(sound);\n\n this.sound = new Sound(sound);\n\n }\n\n\n\n saveChanges() {\n\n return of(this.sound).pipe(\n\n mergeMap(sound => this.soundsService.updateSound(sound).pipe(map(() => sound))),\n\n tap(sound => {\n\n this.internalSound = new Sound(sound);\n\n this.checkChanges$.next();\n\n })\n\n );\n\n }\n\n\n\n discardChanges() {\n\n this.sound = new Sound(this.internalSound);\n\n this.checkChanges$.next();\n\n }\n\n\n\n replaceSoundfile(soundfile: Soundfile) {\n\n this.sound.soundfile = soundfile;\n\n this.internalSound.soundfile = soundfile;\n\n }\n\n}\n", "file_path": "frontend/src/app/settings/sound-manager/sound-manager.component.ts", "rank": 59, "score": 84421.75507153981 }, { "content": "import { ChangeDetectionStrategy, Component, Inject } from '@angular/core';\n\nimport { MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';\n\nimport { Sound } from 'src/app/services/sounds.service';\n\n\n\n@Component({\n\n templateUrl: './sound-delete-confirm.component.html',\n\n styleUrls: ['./sound-delete-confirm.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class SoundDeleteConfirmComponent {\n\n constructor(private dialogRef: MatDialogRef<SoundDeleteConfirmComponent>, @Inject(MAT_DIALOG_DATA) public data: { sound: Sound }) {}\n\n\n\n confirm() {\n\n this.dialogRef.close(true);\n\n }\n\n\n\n abort() {\n\n this.dialogRef.close();\n\n }\n\n}\n", "file_path": "frontend/src/app/settings/sound-manager/sound-delete-confirm/sound-delete-confirm.component.ts", "rank": 60, "score": 82429.32844669398 }, { "content": "fn samples_to_duration(samples: usize) -> Duration {\n\n Duration::from_nanos((samples as f64 / SAMPLE_RATE / CHANNEL_COUNT as f64 * 1e9).round() as u64)\n\n}\n", "file_path": "backend/src/discord/recorder.rs", "rank": 61, "score": 79866.03375553805 }, { "content": "fn nanos_to_samples(nanos: u128) -> usize {\n\n (nanos as f64 * 1e-9 * SAMPLE_RATE * CHANNEL_COUNT as f64).round() as usize\n\n}\n\n\n", "file_path": "backend/src/discord/recorder.rs", "rank": 62, "score": 79866.03375553805 }, { "content": "#[instrument]\n\nfn check_msg(result: SerenityResult<Message>) {\n\n if let Err(why) = result {\n\n error!(\"Error sending message: {:?}\", why);\n\n }\n\n}\n", "file_path": "backend/src/discord/commands.rs", "rank": 63, "score": 78454.66728808101 }, { "content": " setUserRoleId(roleId: string, guildId: string) {\n\n this.userIsSaving$.next('saving');\n\n this.apiService.updateGuildSettings(guildId, { userRoleId: roleId }).subscribe(\n\n () => this.userIsSaving$.next('idle'),\n\n () => this.userIsSaving$.next('error')\n\n );\n", "file_path": "frontend/src/app/settings/guild-settings/guild-settings.component.ts", "rank": 64, "score": 77777.78587393167 }, { "content": "import { ChangeDetectionStrategy, Component, EventEmitter, Input, OnChanges, Output, SimpleChanges } from '@angular/core';\n\nimport { Sound } from 'src/app/services/sounds.service';\n\nimport Fuse from 'fuse.js';\n\nimport { KeyCommand } from '../keybind-generator.component';\n\n\n\n@Component({\n\n selector: 'app-searchable-sound-select',\n\n templateUrl: './searchable-sound-select.component.html',\n\n styleUrls: ['./searchable-sound-select.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class SearchableSoundSelectComponent implements OnChanges {\n\n @Input() sounds: Sound[];\n\n @Input() selectedCommand: KeyCommand;\n\n @Output() selectedCommandChange = new EventEmitter<KeyCommand>();\n\n\n\n soundsFuse: Fuse<Sound>;\n\n soundSearchFilter = '';\n\n filteredSounds: Sound[];\n\n\n\n ngOnChanges(changes: SimpleChanges) {\n\n if ('sounds' in changes) {\n\n this.soundsFuse = new Fuse(this.sounds, { keys: ['name'] });\n\n this.updateFilter();\n\n }\n\n }\n\n\n\n updateFilter() {\n\n if (this.sounds == null) {\n\n return;\n\n }\n\n\n\n if (this.soundSearchFilter.length > 0) {\n\n this.filteredSounds = this.soundsFuse.search(this.soundSearchFilter).map(res => res.item);\n\n } else {\n\n this.filteredSounds = this.sounds;\n\n }\n\n }\n\n\n\n getSoundName(command: KeyCommand) {\n\n return command != null && typeof command !== 'string' ? command.name : '';\n\n }\n\n}\n", "file_path": "frontend/src/app/keybind-generator/searchable-sound-select/searchable-sound-select.component.ts", "rank": 65, "score": 77693.98293212867 }, { "content": "import { Injectable } from '@angular/core';\n\nimport { MatSnackBar } from '@angular/material/snack-bar';\n\nimport { CanDeactivate, ActivatedRouteSnapshot, RouterStateSnapshot } from '@angular/router';\n\nimport { combineLatest, Observable } from 'rxjs';\n\nimport { first, map } from 'rxjs/operators';\n\nimport { SoundManagerComponent } from './sound-manager.component';\n\n\n\n@Injectable({\n\n providedIn: 'root',\n\n})\n\nexport class CanDeactivateSoundManagerGuard implements CanDeactivate<SoundManagerComponent> {\n\n constructor(private snackBar: MatSnackBar) {}\n\n\n\n canDeactivate(\n\n component: SoundManagerComponent,\n\n _route: ActivatedRouteSnapshot,\n\n _state: RouterStateSnapshot\n\n ): Observable<boolean> | Promise<boolean> | boolean {\n\n return combineLatest([component.isSaving$, component.hasChanges$, component.isProcessing$, component.isUploading$]).pipe(\n\n first(),\n\n map(([isSaving, hasChanges, isProcessing, isUploading]) => {\n\n if (isSaving) {\n\n this.snackBar.open('You cannot leave this component while saving');\n\n return false;\n\n }\n\n if (isUploading) {\n\n this.snackBar.open('You cannot leave this component while uploading');\n\n return false;\n\n }\n\n if (hasChanges) {\n\n this.snackBar.open('There are sounds with outstanding changes. Please save or discard them before continuing.');\n\n return false;\n\n }\n\n if (isProcessing) {\n\n this.snackBar.open('There are sounds currently being processed. Please wait until that is finished.');\n\n return false;\n\n }\n\n\n\n return true;\n\n })\n\n );\n\n }\n\n}\n", "file_path": "frontend/src/app/settings/sound-manager/can-deactivate-sound-manager.guard.ts", "rank": 85, "score": 75369.17183136294 }, { "content": " showError<T>(message: string) {\n\n return catchError<T, Observable<never>>(error => {\n\n console.error('caught error', message, error);\n\n this.errorsSubject$.next(message);\n\n return throwError(error);\n\n });\n", "file_path": "frontend/src/app/services/error.service.ts", "rank": 86, "score": 71187.05110176589 }, { "content": "@Component({\n\n selector: 'app-error-box',\n\n templateUrl: './error-box.component.html',\n\n styleUrls: ['./error-box.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n animations: [\n\n trigger('enterLeave', [\n\n state('*', style({ transform: 'translateY(0)' })),\n\n transition(':enter', [style({ transform: 'translateY(100%)' }), animate('200ms ease-out')]),\n\n transition(':leave', [animate('200ms ease-in', style({ transform: 'translateY(100%)' }))]),\n\n ]),\n\n ],\n\n})\n\nexport class ErrorBoxComponent implements OnDestroy {\n\n private onDestroy$ = new Subject<void>();\n\n errors$ = new BehaviorSubject<string[]>([]);\n\n\n\n constructor(private errorService: ErrorService) {\n\n this.errorService.errors$.pipe(takeUntil(this.onDestroy$), withLatestFrom(this.errors$)).subscribe(([error, errors]) => {\n\n this.errors$.next([...errors, error]);\n\n });\n\n }\n\n\n\n ngOnDestroy() {\n\n this.onDestroy$.next();\n\n this.onDestroy$.complete();\n\n }\n\n\n\n ignore() {\n\n this.errors$.next([]);\n\n }\n", "file_path": "frontend/src/app/error-box/error-box.component.ts", "rank": 87, "score": 70243.37454250651 }, { "content": " updateSound(sound: Sound) {\n\n return this.http\n\n .put(\n\n `/api/sounds/${encodeURIComponent(sound.id)}`,\n\n { name: sound.name, category: sound.category, volumeAdjustment: sound.volumeAdjustment },\n\n { responseType: 'text' }\n\n )\n\n .pipe(\n\n withLatestFrom(this.sounds$),\n\n tap(([, sounds]) => {\n\n const newSounds = sounds.filter(s => sound.id !== s.id);\n\n this._sounds$.next([...newSounds, sound]);\n\n })\n\n );\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 88, "score": 69853.64611494515 }, { "content": " stopSound(guild: Guild | string) {\n\n const guildid = typeof guild === 'string' ? guild : guild.id;\n\n return this.http.post(`/api/guilds/${guildid}/stop`, {}, { responseType: 'text' });\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 89, "score": 69853.64611494515 }, { "content": " createSound(guildId: string, name: string, category: string) {\n\n return this.http\n\n .post<ApiSound>(`/api/sounds`, { guildId, name, category })\n\n .pipe(\n\n map(sound => new Sound(sound)),\n\n withLatestFrom(this.sounds$),\n\n map(([sound, sounds]) => {\n\n this._sounds$.next([sound, ...sounds]);\n\n return sound;\n\n })\n\n );\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 90, "score": 69853.64611494515 }, { "content": " playSound(sound: Sound, guild: Guild | string) {\n\n return this.http.post(sound.getPlayUrl(guild), {});\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 91, "score": 69853.64611494515 }, { "content": " uploadSound(sound: Sound, file: File) {\n\n return this.http\n\n .post<Soundfile>(`/api/sounds/${encodeURIComponent(sound.id)}`, file, {\n\n headers: {\n\n 'Content-Type': file.type,\n\n },\n\n })\n\n .pipe(\n\n withLatestFrom(this.sounds$),\n\n map(([soundfile, sounds]) => {\n\n const newSounds = sounds.filter(s => sound.id !== s.id);\n\n this._sounds$.next([...newSounds, new Sound({ ...sound, soundfile })]);\n\n return soundfile;\n\n })\n\n );\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 92, "score": 69853.64611494515 }, { "content": " deleteSound(sound: Sound) {\n\n return this.http.delete(`/api/sounds/${encodeURIComponent(sound.id)}`, { responseType: 'text' }).pipe(\n\n withLatestFrom(this.sounds$),\n\n tap(([, sounds]) => {\n\n this._sounds$.next(sounds.filter(s => s.id !== sound.id));\n\n })\n\n );\n", "file_path": "frontend/src/app/services/sounds.service.ts", "rank": 93, "score": 69853.64611494515 }, { "content": "const routes: Routes = [\n\n {\n\n path: '',\n\n component: SoundboardComponent,\n\n },\n\n {\n\n path: 'keybind-generator',\n\n component: KeybindGeneratorComponent,\n\n },\n\n {\n\n path: 'recorder',\n\n component: RecorderComponent,\n\n },\n\n {\n\n path: 'settings',\n\n component: SettingsComponent,\n\n children: [\n\n {\n\n path: '',\n\n pathMatch: 'full',\n\n redirectTo: 'user',\n\n },\n\n {\n\n path: 'user',\n\n component: UserSettingsComponent,\n\n },\n\n {\n\n path: 'guilds/:guildId',\n\n canActivate: [GuildPermissionGuard],\n\n children: [\n\n {\n\n path: '',\n\n pathMatch: 'full',\n\n redirectTo: 'settings',\n\n },\n\n {\n\n path: 'settings',\n\n component: GuildSettingsComponent,\n\n canDeactivate: [CanDeactivateGuildSettingsGuard],\n\n },\n\n {\n\n path: 'sounds',\n\n component: SoundManagerComponent,\n\n canDeactivate: [CanDeactivateSoundManagerGuard],\n\n },\n\n ],\n\n },\n\n ],\n\n },\n\n {\n\n path: '**',\n\n redirectTo: '',\n\n },\n", "file_path": "frontend/src/app/app-routing.module.ts", "rank": 94, "score": 69623.89808547932 }, { "content": " deleteSound(entry: SoundEntry) {\n\n this.dialog\n\n .open(SoundDeleteConfirmComponent, { data: { sound: entry.sound } })\n\n .afterClosed()\n\n .subscribe(result => {\n\n if (result) {\n\n this.isProcessing$.next(true);\n\n this.soundsService\n\n .deleteSound(entry.sound)\n\n .pipe(\n\n withLatestFrom(this.sounds$),\n\n tap(([, sounds]) => this.sounds$.next(sounds.filter(sound => sound !== entry))),\n\n finalize(() => this.isProcessing$.next(false))\n\n )\n\n .subscribe({\n\n error: () => this.snackBar.open('Failed to delete sound', 'Damn'),\n\n });\n\n }\n\n });\n", "file_path": "frontend/src/app/settings/sound-manager/sound-manager.component.ts", "rank": 95, "score": 69407.18553195745 }, { "content": "@Component({\n\n selector: 'app-sound-details',\n\n templateUrl: './sound-details.component.html',\n\n styleUrls: ['./sound-details.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class SoundDetailsComponent {\n\n @Input() soundEntry: SoundEntry;\n\n @Input() isBusy: boolean;\n\n @Input() isPlaying: boolean;\n\n\n\n @Output() playClick = new EventEmitter<void>();\n\n @Output() deleteClick = new EventEmitter<void>();\n\n @Output() replaceSoundfile = new EventEmitter<File>();\n\n\n\n get sound() {\n\n return this.soundEntry.sound;\n\n }\n\n\n\n set volumeAdjustmentMode(mode: VolumeAdjustmentMode) {\n\n if (mode === 'auto') {\n\n this.soundEntry.sound.volumeAdjustment = null;\n\n } else {\n\n this.soundEntry.sound.volumeAdjustment = 0;\n\n }\n\n }\n\n get volumeAdjustmentMode() {\n\n return this.soundEntry.sound.volumeAdjustment == null ? 'auto' : 'manual';\n\n }\n\n\n\n constructor() {}\n\n\n\n onImportFileChange(event: Event) {\n\n const files = (event.target as HTMLInputElement).files;\n\n if (files.length === 1) {\n\n this.replaceSoundfile.emit(files[0]);\n\n }\n\n }\n", "file_path": "frontend/src/app/settings/sound-manager/sound-details/sound-details.component.ts", "rank": 96, "score": 69407.18553195745 }, { "content": " constructor(private errorService: ErrorService) {\n\n this.errorService.errors$.pipe(takeUntil(this.onDestroy$), withLatestFrom(this.errors$)).subscribe(([error, errors]) => {\n\n this.errors$.next([...errors, error]);\n\n });\n", "file_path": "frontend/src/app/error-box/error-box.component.ts", "rank": 97, "score": 69327.37044413747 }, { "content": " ignore() {\n\n this.errors$.next([]);\n", "file_path": "frontend/src/app/error-box/error-box.component.ts", "rank": 98, "score": 69327.37044413747 }, { "content": "@Component({\n\n templateUrl: './user-settings.component.html',\n\n styleUrls: ['./user-settings.component.scss'],\n\n changeDetection: ChangeDetectionStrategy.OnPush,\n\n})\n\nexport class UserSettingsComponent {\n\n constructor(public settingsService: SettingsService) {}\n", "file_path": "frontend/src/app/settings/user-settings/user-settings.component.ts", "rank": 99, "score": 69327.37044413747 } ]
Rust
tests/board.rs
SamRond/alpha-rust
434e6cc4c95ebd337f021f64c555b70c012010f4
mod utils; #[cfg(test)] mod tests { use alpha_rust::Board; use std::sync::Once; static INIT: Once = Once::new(); fn init() { INIT.call_once(|| { crate::utils::set_panic_hook(); }); } #[test] fn test_board_init() { init(); let board = Board::new("".to_string()); print!("\n\n"); print!("Checking if FEN is set to correct default value... "); assert_eq!(board.get_fen(), "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"); println!("true"); print!("Checking if white pieces are correctly initialized... "); assert_eq!(board.get_white_pieces()[0].len(), 8); assert_eq!(board.get_white_pieces()[1].len(), 2); assert_eq!(board.get_white_pieces()[2].len(), 2); assert_eq!(board.get_white_pieces()[3].len(), 2); assert_eq!(board.get_white_pieces()[4].len(), 1); assert_eq!(board.get_white_pieces()[5].len(), 1); println!("true"); print!("Checking if black pieces are correctly initialized... "); assert_eq!(board.get_black_pieces()[0].len(), 8); assert_eq!(board.get_black_pieces()[1].len(), 2); assert_eq!(board.get_black_pieces()[2].len(), 2); assert_eq!(board.get_black_pieces()[3].len(), 2); assert_eq!(board.get_black_pieces()[4].len(), 1); assert_eq!(board.get_black_pieces()[5].len(), 1); println!("true"); print!("Checking if black king is in correct position... "); assert_eq!(board.get_black_pieces()[5][0].get_position(), alpha_rust::Coordinates { rank: 8, file: 5}); println!("true"); print!("Checking if white king is in correct position... "); assert_eq!(board.get_white_pieces()[5][0].get_position(), alpha_rust::Coordinates { rank: 1, file: 5}); println!("true"); print!("Checking if e2 pawn is in correct position... "); assert_eq!(board.get_white_pieces()[0][4].get_position(), alpha_rust::Coordinates { rank: 2, file: 5}); println!("true"); print!("Checking if e7 pawn is in correct position... "); assert_eq!(board.get_black_pieces()[0][4].get_position(), alpha_rust::Coordinates { rank: 7, file: 5}); println!("true"); print!("\n\n"); } #[test] fn test_board_init_with_fen() { init(); let board = Board::new("rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2".to_string()); print!("Checking if FEN is set to correct value... "); assert_eq!(board.get_fen(), "rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2"); println!("true"); print!("Checking if white e pawn is in correct position... "); assert_eq!(board.get_white_pieces()[0][0].get_position(), alpha_rust::Coordinates { rank: 4, file: 5}); println!("true"); print!("Checking if black c pawn is in correct position... "); assert_eq!(board.get_black_pieces()[0][7].get_position(), alpha_rust::Coordinates { rank: 5, file: 3}); println!("true"); print!("Checking if white knight is in correct position... "); assert_eq!(board.get_white_pieces()[1][0].get_position(), alpha_rust::Coordinates { rank: 3, file: 6}); println!("true"); } #[test] fn test_board_init_with_two_queens() { init(); let board = Board::new("rnbqkbnr/pp1ppqpp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2".to_string()); print!("Checking if FEN is set to correct value... "); assert_eq!(board.get_fen(), "rnbqkbnr/pp1ppqpp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2"); println!("true"); print!("Checking if white a pawn is in correct position... "); assert_eq!(board.get_white_pieces()[0][0].get_position(), alpha_rust::Coordinates { rank: 4, file: 5}); println!("true"); print!("Checking if black h pawn is in correct position... "); assert_eq!(board.get_black_pieces()[0][6].get_position(), alpha_rust::Coordinates { rank: 5, file: 3}); println!("true"); print!("Checking if white knight is in correct position... "); assert_eq!(board.get_white_pieces()[1][0].get_position(), alpha_rust::Coordinates { rank: 3, file: 6}); println!("true"); print!("Checking if first black queen is in correct position... "); assert_eq!(board.get_black_pieces()[4][0].get_position(), alpha_rust::Coordinates { rank: 8, file: 4}); println!("true"); print!("Checking if second black queen is in correct position... "); assert_eq!(board.get_black_pieces()[4][1].get_position(), alpha_rust::Coordinates { rank: 7, file: 6}); println!("true"); } #[test] fn test_pawn_move() { init(); let mut board = Board::new("".to_string()); print!("\n\n"); print!("Checking if pawn move 1. e4 is successful... "); let pawn = &mut board.get_white_pieces()[0][4]; let mv = board.make_move(pawn, 4, 5); assert!(mv); assert_eq!(board.find_piece_by_coords(4, 5).unwrap(), pawn); println!("true"); } #[test] fn test_knight_move() { let mut board = Board::new("".to_string()); print!("\n\n"); print!("Checking if knight move is successful... "); let knight = &mut board.get_white_pieces()[1][1]; let mv = board.make_move(knight, 3, 6); assert!(mv); let res = board.find_piece_by_coords(3, 6).unwrap(); assert_eq!(res.get_position(), board.get_white_pieces()[1][1].get_position()); println!("true"); print!("\n\n"); } #[test] fn test_for_in_check() { let mut board = Board::new("".to_string()); print!("\n\n"); let w_pawn = &mut board.get_white_pieces()[0][4]; board.make_move(w_pawn, 4, 5); let b_pawn = &mut board.get_black_pieces()[0][5]; board.make_move(b_pawn, 5, 6); let w_queen = &mut board.get_white_pieces()[4][0]; board.make_move(w_queen, 5, 8); } }
mod utils; #[cfg(test)] mod tests { use alpha_rust::Board; use std::sync::Once; static INIT: Once = Once::new(); fn init() { INIT.call_once(|| { crate::utils::set_panic_hook(); }); } #[test] fn test_board_init() { init(); let board = Board::new("".to_string()); print!("\n\n"); print!("Checking if FEN is set to correct default value... "); assert_eq!(board.get_fen(), "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"); println!("true"); print!("Checking if white pieces are correctly initialized... "); assert_eq!(board.get_white_pieces()[0].len(), 8); assert_eq!(board.get_white_pieces()[1].len(), 2); assert_eq!(board.get_white_pieces()[2].len(), 2); assert_eq!(board.get_white_pieces()[3].len(), 2); assert_eq!(board.get_white_pieces()[4].len(), 1); assert_eq!(board.get_white_pieces()[5].len(), 1); println!("true"); print!("Checking if black pieces are correctly initialized... "); assert_eq!(board.get_black_pieces()[0].len(), 8); assert_eq!(board.get_black_pieces()[1].len(), 2); assert_eq!(board.get_black_pieces()[2].len(), 2); assert_eq!(board.get_black_pieces()[3].len(), 2); assert_eq!(board.get_black_pieces()[4].len(), 1); assert_eq!(board.get_black_pieces()[5].len(), 1); println!("true"); print!("Checking if black king is in correct position... "); assert_eq!(board.get_black_pieces()[5][0].get_position(), alpha_rust::Coordinates { rank: 8, file: 5}); println!("true"); print!("Checking if white king is in correct position... "); assert_eq!(board.get_white_pieces()[5][0].get_position(), alpha_rust::Coordinates { rank: 1, file: 5}); println!("true"); print!("Checking if e2 pawn is in correct position... "); assert_eq!(board.get_white_pieces()[0][4].get_position(), alpha_rust::Coordinates { rank: 2, file: 5}); println!("true"); print!("Checking if e7 pawn is in correct position... "); assert_eq!(board.get_black_pieces()[0][4].get_position(), alpha_rust::Coordinates { rank: 7, file: 5}); println!("true"); print!("\n\n"); } #[test] fn test_board_init_with_fen() { init(); let board = Board::new("rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2".to_string()); print!("Checking if FEN is set to correct value... "); assert_eq!(board.get_fen(), "rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2"); println!("true"); print!("Checking if white e pawn is in correct position... "); assert_eq!(board.get_white_pieces()[0][0].get_position(), alpha_rust::Coordinates { rank: 4, file: 5}); println!("true"); print!("Checking if black c pawn is in correct position... "); assert_eq!(board.get_black_pieces()[0][7].get_position(), alpha_rust::Coordinates { rank: 5, file: 3}); println!("true"); print!("Checking if white knight is in correct position... "); assert_eq!(board.get_white_pieces()[1][0].get_position(), alpha_rust::Coordinates { rank: 3, file: 6}); println!("true"); } #[test] fn test_board_init_with_two_queens() { init(); let board = Board::new("rnbqkbnr/pp1ppqpp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2".to_string()); print!("Checking if FEN is set to correct value... "); assert_eq!(board.get_fen(), "rnbqkbnr/pp1ppqpp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b KQkq - 1 2"); println!("true"); print!("Checking if white a pawn is in correct position... "); assert_eq!(board.get_white_pieces()[0][0].get_position(), alpha_rust::Coordinates { rank: 4, file: 5}); println!("true"); print!("Checking if black h pawn is in correct position... "); assert_eq!(board.get_black_pieces()[0][6].get_position(), alpha_rust::Coordinates { rank: 5, file: 3}); println!("true"); print!("Checking if white knight is in correct position... "); assert_eq!(board.get_white_pieces()[1][0].get_position(), alpha_rust::Coordinates { rank: 3, file: 6}); println!("true"); print!("Checking if first black queen is in correct position... "); assert_eq!(board.get_black_pieces()[4][0].get_position(), alpha_rust::Coordinates { rank: 8, file: 4}); println!("true"); print!("Checking if second black queen is in correct position... "); assert_eq!(board.get_black_pieces()[4][1].get_position(), alpha_rust::Coordinates { rank: 7, file: 6}); println!("true"); } #[test] fn test_pawn_move() { init(); let mut board = Board::new("".to_string()); print!("\n\n"); print!("Checking if pawn move 1. e4 is successful... "); let pawn = &mut board.get_white_pieces()[0][4];
#[test] fn test_knight_move() { let mut board = Board::new("".to_string()); print!("\n\n"); print!("Checking if knight move is successful... "); let knight = &mut board.get_white_pieces()[1][1]; let mv = board.make_move(knight, 3, 6); assert!(mv); let res = board.find_piece_by_coords(3, 6).unwrap(); assert_eq!(res.get_position(), board.get_white_pieces()[1][1].get_position()); println!("true"); print!("\n\n"); } #[test] fn test_for_in_check() { let mut board = Board::new("".to_string()); print!("\n\n"); let w_pawn = &mut board.get_white_pieces()[0][4]; board.make_move(w_pawn, 4, 5); let b_pawn = &mut board.get_black_pieces()[0][5]; board.make_move(b_pawn, 5, 6); let w_queen = &mut board.get_white_pieces()[4][0]; board.make_move(w_queen, 5, 8); } }
let mv = board.make_move(pawn, 4, 5); assert!(mv); assert_eq!(board.find_piece_by_coords(4, 5).unwrap(), pawn); println!("true"); }
function_block-function_prefix_line
[ { "content": "pub fn set_panic_hook() {\n\n // When the `console_error_panic_hook` feature is enabled, we can call the\n\n // `set_panic_hook` function at least once during initialization, and then\n\n // we will get better error messages if our code ever panics.\n\n //\n\n // For more details see\n\n // https://github.com/rustwasm/console_error_panic_hook#readme\n\n #[cfg(feature = \"console_error_panic_hook\")]\n\n console_error_panic_hook::set_once();\n\n}\n", "file_path": "tests/utils.rs", "rank": 0, "score": 75650.15541114115 }, { "content": "#[wasm_bindgen_test]\n\nfn pass() {\n\n assert_eq!(1 + 1, 2);\n\n}\n", "file_path": "tests/web.rs", "rank": 1, "score": 50580.22727161949 }, { "content": "let board = new wasm.BoardSingleton();\n", "file_path": "www/index.js", "rank": 11, "score": 27670.61460520453 }, { "content": "//! Test suite for the Web and headless browsers.\n\n\n\n#![cfg(target_arch = \"wasm32\")]\n\n\n\nextern crate wasm_bindgen_test;\n\nuse wasm_bindgen_test::*;\n\n\n\nwasm_bindgen_test_configure!(run_in_browser);\n\n\n\n#[wasm_bindgen_test]\n", "file_path": "tests/web.rs", "rank": 12, "score": 22052.810155237505 }, { "content": " // mostly exists for testing; returns a cloned vec of the white pieces\n\n pub fn get_black_pieces(&self) -> [Vec<Piece>; 6] {\n\n self.black_pieces.clone()\n\n }\n\n\n\n pub fn new(fen_in: String) -> Board {\n\n let mut fen = fen_in;\n\n\n\n if fen.is_empty() {\n\n fen = String::from(\"rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1\");\n\n }\n\n\n\n // generate pieces\n\n let mut ret = Board {\n\n fen: String::new(),\n\n white_pieces: Board::init_white_pieces(),\n\n black_pieces: Board::init_black_pieces()\n\n };\n\n\n\n ret.set_fen(fen);\n", "file_path": "src/lib.rs", "rank": 13, "score": 17.960229827680426 }, { "content": " white_pieces\n\n }\n\n\n\n // Fills 2d array of black pieces\n\n //\n\n // 0: pawns\n\n // 1: knights\n\n // 2: bishops\n\n // 3: rooks\n\n // 4: queens\n\n // 5: kings\n\n fn init_black_pieces() -> [Vec<Piece>; 6] {\n\n let mut black_pieces = [\n\n Vec::with_capacity(8), // max number of pawns is 8\n\n Vec::with_capacity(10), // max number of knights, bishops, and rooks is 10 (2 on board + 8 promotions)\n\n Vec::with_capacity(10),\n\n Vec::with_capacity(10),\n\n Vec::with_capacity(9), // max number of queens on the board (1 on board + 8 promotions)\n\n Vec::with_capacity(1), // max number of kings on the board\n\n ]; \n", "file_path": "src/lib.rs", "rank": 14, "score": 17.730245492457627 }, { "content": "\n\n // Iterates through the FEN and properly sets the coordinates of each piece\n\n fn set_piece_coords(&mut self) {\n\n let mut rank: i32 = 8;\n\n let mut file: i32 = 0;\n\n \n\n let mut white:bool;\n\n\n\n // keeps track of the number of each piece in the array of pieces, and subtracts for this number for each one that has been seen\n\n // [pawns, knights, bishops, rooks, queen]\n\n let mut white_piece_counts = [0, 0, 0, 0, 0];\n\n let mut black_piece_counts = [0, 0, 0, 0, 0];\n\n\n\n // populate count arrays \n\n self.count_pieces(&self.white_pieces, &mut white_piece_counts);\n\n self.count_pieces(&self.black_pieces, &mut black_piece_counts);\n\n\n\n // indexes in 2d array of pieces\n\n // 0: pawns\n\n // 1: knights\n", "file_path": "src/lib.rs", "rank": 15, "score": 17.538571089250244 }, { "content": " ret.set_piece_coords();\n\n\n\n return ret;\n\n }\n\n\n\n // Fills 2d array of white pieces\n\n //\n\n // 0: pawns\n\n // 1: knights\n\n // 2: bishops\n\n // 3: rooks\n\n // 4: queens\n\n // 5: kings\n\n fn init_white_pieces() -> [Vec<Piece>; 6] {\n\n let mut white_pieces = [\n\n Vec::with_capacity(8), // max number of pawns is 8\n\n Vec::with_capacity(10), // max number of knights, bishops, and rooks is 10 (2 on board + 8 promotions)\n\n Vec::with_capacity(10),\n\n Vec::with_capacity(10),\n\n Vec::with_capacity(9), // max number of queens on the board (1 on board + 8 promotions)\n", "file_path": "src/lib.rs", "rank": 16, "score": 17.28190988622773 }, { "content": "\n\n // Black pieces\n\n black_pieces: [Vec<Piece>; 6]\n\n}\n\n\n\nimpl Board {\n\n pub fn get_fen(&self) -> String {\n\n self.fen.clone()\n\n }\n\n\n\n pub fn set_fen(&mut self, fen_in: String) {\n\n self.fen = fen_in;\n\n self.set_piece_coords()\n\n }\n\n\n\n // mostly exists for testing; returns a cloned vec of the white pieces\n\n pub fn get_white_pieces(&self) -> [Vec<Piece>; 6] {\n\n self.white_pieces.clone()\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 17, "score": 15.951460504864919 }, { "content": "\n\n let new_rank_2 = &Board::replace_nth_char(ranks[rank as usize], (old_file - 1) as usize, '1')[..];\n\n ranks[rank as usize] = new_rank_2;\n\n\n\n // this converts the halfmoves to an int, adds 1, parses back to a String, and then using the shorthand &(value)[..] converts to &str\n\n let mut halfmoves = &((&mut fields[4]).parse::<i32>().unwrap() + 1).to_string()[..];\n\n if piece.kind != PieceType::Pawn {\n\n fields[4] = &mut halfmoves;\n\n }\n\n\n\n // this converts the fullmoves to an int, adds 1, parses back to a String, and then using the shorthand &(value)[..] converts to &str\n\n let mut fullmoves = &((&mut fields[5]).parse::<i32>().unwrap() + 1).to_string()[..];\n\n if piece.color != Color::Black {\n\n fields[5] = &mut fullmoves;\n\n }\n\n\n\n // update \"to move\" value\n\n if piece.color == Color::White {\n\n fields[2] = \"b\";\n\n } else {\n", "file_path": "src/lib.rs", "rank": 18, "score": 15.270175440546577 }, { "content": " }\n\n }\n\n\n\n // index positions of pieces in arr of pieces\n\n // 0: pawns\n\n // 1: knights\n\n // 2: bishops\n\n // 3: rooks\n\n // 4: queens\n\n // 5: kings\n\n\n\n // update missing pieces to have rank/file of 0 (lost)\n\n \n\n for i in 0..5 {\n\n while black_piece_counts[i] > 0 {\n\n let index = self.black_pieces[i].len() - black_piece_counts[i];\n\n self.black_pieces[i][index].rank = 0;\n\n self.black_pieces[i][index].file = 0;\n\n\n\n black_piece_counts[i] -= 1;\n", "file_path": "src/lib.rs", "rank": 19, "score": 14.511153669775254 }, { "content": " // 2: bishops\n\n // 3: rooks\n\n // 4: queens\n\n // 5: kings\n\n\n\n for ch in self.fen.chars() {\n\n white = ch.is_ascii_uppercase();\n\n \n\n let pieces;\n\n let counts;\n\n \n\n if white {\n\n pieces = &mut (self.white_pieces);\n\n counts = &mut white_piece_counts;\n\n } else {\n\n pieces = &mut (self.black_pieces);\n\n counts = &mut black_piece_counts;\n\n };\n\n\n\n if ch.is_ascii_digit() {\n", "file_path": "src/lib.rs", "rank": 20, "score": 14.503816067368236 }, { "content": " // check that given rank/file are in the list of valid moves\n\n // if not, return false\n\n let moves = self.get_valid_moves(piece);\n\n if !moves.iter().any(|&i| i == (rank, file)) { return false; }\n\n\n\n // separate the six whitespace-separated fields in the FEN string\n\n let mut fields = self.fen.split_whitespace().collect::<Vec<&str>>();\n\n \n\n // check if the correct side is trying to move\n\n // if not, return false\n\n if !(self.get_side_to_move() == piece.color) {\n\n return false;\n\n }\n\n // single out the first field, the position section\n\n let mut ranks = fields[0].split('/').collect::<Vec<&str>>();\n\n\n\n // save these to update \n\n let old_rank = piece.rank;\n\n let old_file = piece.file;\n\n \n", "file_path": "src/lib.rs", "rank": 21, "score": 14.29808234573137 }, { "content": "\n\n let castling = fields[2];\n\n \n\n return (castling.contains(king), castling.contains(queen));\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_get_castle_ability() {\n\n let mut board = Board::new(\"\".to_string());\n\n assert_eq!(board.get_castle_ability(Color::White), (true, true));\n\n assert_eq!(board.get_castle_ability(Color::Black), (true, true));\n\n\n\n board.set_fen(\"rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b Kq - 1 2\".to_string());\n\n assert_eq!(board.get_castle_ability(Color::White), (true, false));\n\n assert_eq!(board.get_castle_ability(Color::Black), (false, true));\n\n\n\n board.set_fen(\"rnbqkbnr/pp1ppppp/8/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R b - - 1 2\".to_string());\n\n assert_eq!(board.get_castle_ability(Color::White), (false, false));\n\n assert_eq!(board.get_castle_ability(Color::Black), (false, false));\n\n }\n\n}", "file_path": "src/lib.rs", "rank": 22, "score": 14.237285902210207 }, { "content": "\n\n // // takes in rank/file coordinates, and returns the optional tuple (white:boolean, index:i32)\n\n pub fn find_piece_by_coords(&self, rank:i32, file:i32) -> Option<&Piece> {\n\n for i in 0..6 {\n\n for p in self.white_pieces[i].iter() {\n\n if p.rank == rank && p.file == file { return Some(p); }\n\n }\n\n \n\n for p in self.black_pieces[i].iter() {\n\n if p.rank == rank && p.file == file { return Some(p); }\n\n }\n\n }\n\n\n\n return None;\n\n }\n\n\n\n // moves a given piece to the specified rank/file\n\n // eliminates any pieces that exist there, and updates the FEN\n\n // returns true if move is successfully made\n\n pub fn make_move(&mut self, piece:&mut Piece, rank:i32, file:i32) -> bool {\n", "file_path": "src/lib.rs", "rank": 23, "score": 13.519566411113885 }, { "content": " fn count_pieces(&self, pieces:&[Vec<Piece>; 6], counts:&mut [usize; 5]) {\n\n for i in 0..5 {\n\n counts[i] = pieces[i].len();\n\n }\n\n }\n\n\n\n // given a piece, return a vec of all valid squares for it\n\n fn get_valid_moves(&self, piece:&Piece) -> Vec<(i32, i32)> {\n\n let mut coords:Vec<(i32, i32)> = Vec::new();\n\n\n\n // if piece is not on the board, it has no valid moves\n\n if piece.rank == 0 || piece.file == 0 { return coords; }\n\n\n\n // if the king is in check, nobody else can move unless they're blocking check or attacker is eliminated\n\n // also must eliminate exposing king to check\n\n\n\n match piece.kind {\n\n PieceType::Pawn => {\n\n // team indicates pawn direction; pawns can only move forward and therefore movement is team dependent\n\n // white moves up the board, therefore direction is positive\n", "file_path": "src/lib.rs", "rank": 24, "score": 13.433096001603033 }, { "content": " fn get_side_to_move(&self) -> Color {\n\n // break up FEN into six whitespace-delimited sections\n\n let fields = self.fen.split_whitespace().collect::<Vec<&str>>();\n\n\n\n if fields[1] == \"w\" {\n\n return Color::White;\n\n } else {\n\n return Color::Black;\n\n }\n\n }\n\n\n\n fn get_castle_ability(&self, color:Color) -> (bool, bool) {\n\n let fields = self.fen.split_whitespace().collect::<Vec<&str>>();\n\n let mut king = 'K';\n\n let mut queen = 'Q';\n\n\n\n if color == Color::Black {\n\n king = 'k';\n\n queen = 'q';\n\n }\n", "file_path": "src/lib.rs", "rank": 25, "score": 12.947333114333613 }, { "content": " Vec::with_capacity(1), // max number of kings on the board\n\n ]; \n\n\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n", "file_path": "src/lib.rs", "rank": 26, "score": 11.879899546276516 }, { "content": "// mostly exists as a wrapper class so Javascript can interact with the board\n\n//\n\n// complex values (vec, arr, etc.) cannot be passed directly to javascript, \n\n// so no methods in this struct can use them directly as params or return values\n\n#[wasm_bindgen]\n\nimpl BoardSingleton {\n\n #[wasm_bindgen(constructor)]\n\n pub fn new() -> BoardSingleton {\n\n let singleton = BoardSingleton {\n\n board: Board::new(\"\".to_string())\n\n };\n\n\n\n return singleton;\n\n }\n\n\n\n pub fn set_fen(&mut self, fen: &str) {\n\n self.board.set_fen(fen.to_string());\n\n }\n\n\n\n pub fn get_board_string(&self) -> String {\n", "file_path": "src/lib.rs", "rank": 27, "score": 11.482981910587684 }, { "content": "use core::str;\n\n\n\nuse wasm_bindgen::prelude::*;\n\n\n\n// When the `wee_alloc` feature is enabled, use `wee_alloc` as the global\n\n// allocator.\n\n#[cfg(feature = \"wee_alloc\")]\n\n#[global_allocator]\n\nstatic ALLOC: wee_alloc::WeeAlloc = wee_alloc::WeeAlloc::INIT;\n\n\n\n#[wasm_bindgen]\n\n#[derive(Clone, Copy, PartialEq, Debug)]\n\npub enum PieceType {\n\n Pawn,\n\n Knight,\n\n Bishop,\n\n Rook,\n\n Queen,\n\n King\n\n}\n", "file_path": "src/lib.rs", "rank": 28, "score": 11.106499066419733 }, { "content": " }\n\n\n\n while white_piece_counts[i] > 0 {\n\n let index = self.white_pieces[i].len() - white_piece_counts[i];\n\n self.white_pieces[i][index].rank = 0;\n\n self.white_pieces[i][index].file = 0;\n\n\n\n white_piece_counts[i] -= 1;\n\n }\n\n }\n\n }\n\n\n\n // updates piece coordinates for set_piece_coords method\n\n fn update_piece_coordinates(pieces:&mut [Vec<Piece>; 6], counts:&mut [usize; 5], index:usize, white:bool, rank:i32, file:i32) {\n\n if counts[index] == 0 {\n\n let kind = pieces[index][0].kind.clone();\n\n\n\n let mut piece = Piece {\n\n kind,\n\n color: Color::Black,\n", "file_path": "src/lib.rs", "rank": 29, "score": 10.8303098399383 }, { "content": " },\n\n 'n' => {\n\n Board::update_piece_coordinates(pieces, counts, 1, white, rank, file);\n\n },\n\n 'b' => {\n\n Board::update_piece_coordinates(pieces, counts, 2, white, rank, file);\n\n },\n\n 'r' => {\n\n Board::update_piece_coordinates(pieces, counts, 3, white, rank, file);\n\n },\n\n 'q' => {\n\n Board::update_piece_coordinates(pieces, counts, 4, white, rank, file);\n\n },\n\n 'k' => {\n\n pieces[5][0].rank = rank;\n\n pieces[5][0].file = file;\n\n }\n\n ' ' => break, // space indicates the end of the position section\n\n _ => continue\n\n }\n", "file_path": "src/lib.rs", "rank": 30, "score": 10.738124671401476 }, { "content": " kind: PieceType::Bishop,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[4].push(Piece {\n\n kind: PieceType::Queen,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[5].push(Piece {\n\n kind: PieceType::King,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n\n\n black_pieces\n\n }\n", "file_path": "src/lib.rs", "rank": 31, "score": 10.37427877491464 }, { "content": " });\n\n white_pieces[2].push(Piece {\n\n kind: PieceType::Bishop,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[4].push(Piece {\n\n kind: PieceType::Queen,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[5].push(Piece {\n\n kind: PieceType::King,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n\n", "file_path": "src/lib.rs", "rank": 32, "score": 10.101038194857308 }, { "content": " rank: 0,\n\n file: 0\n\n };\n\n\n\n if white { piece.color = Color::White }\n\n\n\n\n\n pieces[index].push(piece);\n\n counts[index] += 1;\n\n\n\n println!(\"Added new piece of type: {:?}\", kind);\n\n }\n\n\n\n let index2 = pieces[index].len() - counts[index];\n\n counts[index] -= 1;\n\n pieces[index][index2].rank = rank;\n\n pieces[index][index2].file = file;\n\n }\n\n\n\n // counts array indexes: [pawns, rooks, knights, bishops, queen]\n", "file_path": "src/lib.rs", "rank": 33, "score": 10.068185922817086 }, { "content": " kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n", "file_path": "src/lib.rs", "rank": 34, "score": 9.7859200542416 }, { "content": "\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n", "file_path": "src/lib.rs", "rank": 35, "score": 9.713565324252318 }, { "content": " });\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[0].push(Piece {\n", "file_path": "src/lib.rs", "rank": 36, "score": 9.553307321522224 }, { "content": " // actually make the move here. Don't forget to increment the halfmove and full move counters (fields[4] and fields[5] respectively)\n\n piece.rank = rank;\n\n piece.file = file;\n\n\n\n // replaces old rank char with 1. numbers still need to be collapsed (i.e. 1,2 needs to be collapsed to 3)\n\n let new_rank = &Board::replace_nth_char(ranks[old_rank as usize], (old_file - 1) as usize, '1')[..];\n\n ranks[old_rank as usize] = new_rank;\n\n\n\n let new_position = ranks[rank as usize].chars().nth((file - 1) as usize).unwrap();\n\n\n\n if new_position.is_ascii_digit() {\n\n let num = new_position.to_digit(10).unwrap();\n\n if num > 1 {\n\n // needs to split the int into two sides, if the piece isn't moving to the edge of the space the int covers\n\n }\n\n } else {\n\n // capture a piece\n\n }\n\n\n\n // also needs to handle promotion\n", "file_path": "src/lib.rs", "rank": 37, "score": 9.507669855601197 }, { "content": " });\n\n black_pieces[1].push(Piece {\n\n kind: PieceType::Knight,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[1].push(Piece {\n\n kind: PieceType::Knight,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[2].push(Piece {\n\n kind: PieceType::Bishop,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[2].push(Piece {\n", "file_path": "src/lib.rs", "rank": 38, "score": 9.423633081275899 }, { "content": " rank: 0,\n\n file: 0\n\n });\n\n white_pieces[1].push(Piece {\n\n kind: PieceType::Knight,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[1].push(Piece {\n\n kind: PieceType::Knight,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[2].push(Piece {\n\n kind: PieceType::Bishop,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n", "file_path": "src/lib.rs", "rank": 39, "score": 9.413825409841811 }, { "content": " fields[2] = \"w\";\n\n }\n\n \n\n // rejoin ranks with '/' delimeter and assign to dereferenced position_section (fields[0])\n\n let new_position = ranks.join(\"/\");\n\n fields[0] = new_position.as_str();\n\n\n\n\n\n // rejoin FEN fields with a whitespace delimeter\n\n let fen = fields.join(\" \");\n\n self.set_fen(fen);\n\n\n\n return true;\n\n }\n\n\n\n // utility to replace the nth character in a &str\n\n fn replace_nth_char(s:&str, index:usize, newchar:char) -> String {\n\n s.chars().enumerate().map(|(i,c)| if i == index { newchar } else { c }).collect()\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 40, "score": 9.356896482071026 }, { "content": " kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[3].push(Piece {\n\n kind: PieceType::Rook,\n\n color: Color::White,\n\n rank: 0,\n\n file: 0\n\n });\n\n white_pieces[3].push(Piece {\n\n kind: PieceType::Rook,\n\n color: Color::White,\n", "file_path": "src/lib.rs", "rank": 41, "score": 8.980240349532064 }, { "content": " let mut string = \"<table><tbody>\".to_string();\n\n let mut char:char;\n\n \n\n for i in (1..9).rev() {\n\n string += \"<tr>\";\n\n \n\n for j in 1..9 {\n\n match self.board.find_piece_by_coords(i, j) {\n\n Some(x) => {\n\n match x.kind {\n\n PieceType::Pawn => {\n\n char = 'p';\n\n },\n\n PieceType::Rook => {\n\n char = 'r';\n\n },\n\n PieceType::Knight => {\n\n char = 'n';\n\n },\n\n PieceType::Bishop => {\n", "file_path": "src/lib.rs", "rank": 42, "score": 8.881716080734405 }, { "content": " rank: 0,\n\n file: 0\n\n });\n\n black_pieces[0].push(Piece {\n\n kind: PieceType::Pawn,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[3].push(Piece {\n\n kind: PieceType::Rook,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n\n });\n\n black_pieces[3].push(Piece {\n\n kind: PieceType::Rook,\n\n color: Color::Black,\n\n rank: 0,\n\n file: 0\n", "file_path": "src/lib.rs", "rank": 43, "score": 8.813355323517078 }, { "content": " // if char is a number, it represents empty spaces\n\n // thus, any number should increment the 'file' counter\n\n //\n\n // convert the char to an integer and add it to 'file'\n\n file += match ch.to_digit(10) {\n\n None => 0,\n\n Some(x) => x as i32, // x is now an integer value\n\n };\n\n } else {\n\n // index positions of pieces in arr of pieces\n\n \n\n file += 1; // all match arms except '/' do this\n\n\n\n match ch.to_ascii_lowercase() {\n\n '/' => {\n\n rank -= 1;\n\n file = 0;\n\n },\n\n 'p' => {\n\n Board::update_piece_coordinates(pieces, counts, 0, white, rank, file);\n", "file_path": "src/lib.rs", "rank": 44, "score": 8.433882227700176 }, { "content": " if y.color == piece.color {\n\n coords.push((piece.rank - x, piece.file));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {d = false;}\n\n else {coords.push((piece.rank - x, piece.file))}\n\n }\n\n }\n\n }\n\n },\n\n PieceType::Queen => {\n\n let mut r = true;\n\n let mut u = true;\n\n let mut l = true;\n\n let mut d = true;\n\n let mut quad_one = true;\n\n let mut quad_two = true;\n", "file_path": "src/lib.rs", "rank": 45, "score": 8.251549761075411 }, { "content": " let team;\n\n if piece.color == Color::White {\n\n team = 1;\n\n } else {\n\n team = -1;\n\n }\n\n\n\n // each var represents a possible movement of the pawn\n\n let one_space = (piece.rank + team, piece.file);\n\n let two_space = (piece.rank + 2 * team, piece.file);\n\n let capture_square_1 = (piece.rank + team, piece.file - 1);\n\n let capture_square_2 = (piece.rank + team, piece.file + 1);\n\n\n\n // for pawn movements, forward squares must be clear, must be on second or seventh rank for double movement, and diagonal squares must be occupied by enemy pieces for a move to be valid\n\n if Board::valid_square(one_space) && self.find_piece_by_coords(one_space.0, one_space.1).is_none() { coords.push(one_space); }\n\n if Board::valid_square(two_space) && piece.rank == (4.5 - 2.5 * (team as f64)) as i32 && self.find_piece_by_coords(one_space.0, one_space.1).is_none() && self.find_piece_by_coords(two_space.0, two_space.1).is_none() { coords.push(two_space); }\n\n if Board::valid_square(capture_square_1) && !self.find_piece_by_coords(capture_square_1.0, capture_square_1.1).is_none() && self.find_piece_by_coords(capture_square_1.0, capture_square_1.1).unwrap().color != piece.color { coords.push(capture_square_1); }\n\n if Board::valid_square(capture_square_2) && !self.find_piece_by_coords(capture_square_2.0, capture_square_2.1).is_none() && self.find_piece_by_coords(capture_square_2.0, capture_square_2.1).unwrap().color != piece.color { coords.push(capture_square_2); }\n\n },\n\n PieceType::Knight => {\n", "file_path": "src/lib.rs", "rank": 46, "score": 8.134111032985599 }, { "content": " string.push(char);\n\n string += \"</td>\";\n\n }\n\n \n\n string += \"</tr>\";\n\n }\n\n \n\n string += \"</tbody></table>\";\n\n \n\n return string;\n\n }\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(Clone)]\n\npub struct Board {\n\n fen: String,\n\n\n\n // White pieces\n\n white_pieces: [Vec<Piece>; 6],\n", "file_path": "src/lib.rs", "rank": 47, "score": 7.636737873007775 }, { "content": " char = 'b';\n\n }\n\n PieceType::Queen => {\n\n char = 'q';\n\n },\n\n PieceType::King => {\n\n char = 'k';\n\n },\n\n }\n\n \n\n if x.color == Color::White {\n\n char = char.to_ascii_uppercase();\n\n }\n\n },\n\n None => {\n\n char = '+';\n\n }\n\n }\n\n \n\n string += \"<td>\";\n", "file_path": "src/lib.rs", "rank": 48, "score": 7.5481748482168465 }, { "content": " for x in -2..3 {\n\n for y in -2..3 {\n\n // when x is +/- 1 and y is +/- 2, OR x is +/- 2 and y is +/- 1, and the target square is valid\n\n if (((x == 1 || x == -1) && (y == 2 || y == -2)) || ((x == 2 || x == -2) && (y == 1 || y == -1))) && Board::valid_square((piece.rank + x, piece.file + y)) {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file + y) {\n\n Some(x) => x.color == piece.color, // when the target square is not occupied by a same-color piece\n\n None => false,\n\n };\n\n if !same_team {coords.push((piece.rank + x, piece.file + y))} // add to coords\n\n }\n\n }\n\n }\n\n },\n\n PieceType::Bishop => {\n\n let mut quad_one = true;\n\n let mut quad_two = true;\n\n let mut quad_three = true;\n\n let mut quad_four = true;\n\n for x in 1..8 {\n\n if quad_one {\n", "file_path": "src/lib.rs", "rank": 49, "score": 7.3581213842423985 }, { "content": " let mut quad_three = true;\n\n let mut quad_four = true;\n\n for x in 1..8 {\n\n if r {\n\n if !Board::valid_square((piece.rank, piece.file + x)) {\n\n r = false;\n\n quad_one = false;\n\n quad_four = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank, piece.file + x) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank, piece.file + x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {r = false;}\n", "file_path": "src/lib.rs", "rank": 50, "score": 7.224207502951861 }, { "content": " if y.color != piece.color {\n\n coords.push((piece.rank + x, piece.file - x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_four = false;}\n\n else {coords.push((piece.rank + x, piece.file - x))}\n\n }\n\n }\n\n }\n\n },\n\n PieceType::Rook => {\n\n let mut r = true;\n\n let mut u = true;\n\n let mut l = true;\n\n let mut d = true;\n\n for x in 1..8 {\n\n if r {\n", "file_path": "src/lib.rs", "rank": 51, "score": 6.7628797573972115 }, { "content": " if !Board::valid_square((piece.rank, piece.file + x)) {\n\n r = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank, piece.file + x) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank, piece.file + x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {r = false;}\n\n else {coords.push((piece.rank, piece.file + x))}\n\n }\n\n }\n\n if u {\n\n if !Board::valid_square((piece.rank, piece.file + x)) {\n\n u = false;\n", "file_path": "src/lib.rs", "rank": 52, "score": 6.367797282397762 }, { "content": " pub fn get_position(&self) -> Coordinates {\n\n Coordinates {\n\n rank: self.rank,\n\n file: self.file\n\n }\n\n }\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(PartialEq, Debug)]\n\npub struct Coordinates {\n\n pub rank: i32,\n\n pub file: i32\n\n}\n\n\n\n#[wasm_bindgen]\n\npub struct BoardSingleton {\n\n board: Board\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 53, "score": 6.350263215810725 }, { "content": "\n\n#[wasm_bindgen]\n\n#[derive(Copy, Clone, PartialEq, Debug)]\n\npub enum Color {\n\n White,\n\n Black\n\n}\n\n\n\n#[wasm_bindgen]\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Piece {\n\n kind: PieceType,\n\n color: Color,\n\n rank: i32,\n\n file: i32\n\n}\n\n\n\n#[wasm_bindgen]\n\nimpl Piece {\n\n // gets tuple of (rank, file)\n", "file_path": "src/lib.rs", "rank": 54, "score": 6.262362012646693 }, { "content": " if !Board::valid_square((piece.rank + x, piece.file + x)) {\n\n quad_one = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file + x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n\n coords.push((piece.rank + x, piece.file + x));\n\n }\n\n true\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_one = false;}\n\n else {coords.push((piece.rank + x, piece.file + x))}\n\n }\n\n }\n\n if quad_two {\n\n if !Board::valid_square((piece.rank - x, piece.file + x)) {\n\n quad_two = false;\n", "file_path": "src/lib.rs", "rank": 55, "score": 6.1461338992806365 }, { "content": " }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank + x, piece.file));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {u = false;}\n\n else {coords.push((piece.rank + x, piece.file))}\n\n }\n\n }\n\n if l {\n\n if !Board::valid_square((piece.rank + x, piece.file)) {\n\n l = false;\n\n }\n\n else {\n", "file_path": "src/lib.rs", "rank": 56, "score": 5.85074209751744 }, { "content": " let same_team = match self.find_piece_by_coords(piece.rank, piece.file - x) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank, piece.file - x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {l = false;}\n\n else {coords.push((piece.rank, piece.file - x))}\n\n }\n\n }\n\n if d {\n\n if !Board::valid_square((piece.rank - x, piece.file)) {\n\n d = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank - x, piece.file) {\n\n Some(y) => {\n", "file_path": "src/lib.rs", "rank": 57, "score": 5.761870309499768 }, { "content": " else {coords.push((piece.rank - x, piece.file - x))}\n\n }\n\n if quad_four {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file - x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n\n coords.push((piece.rank + x, piece.file - x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_four = false;}\n\n else {coords.push((piece.rank + x, piece.file - x))}\n\n }\n\n \n\n }\n\n },\n\n PieceType::King => {\n\n let castles = self.get_castle_ability(piece.color);\n", "file_path": "src/lib.rs", "rank": 58, "score": 5.673873207157073 }, { "content": " else {coords.push((piece.rank + x, piece.file))}\n\n }\n\n }\n\n if l {\n\n if !Board::valid_square((piece.rank + x, piece.file)) {\n\n l = false;\n\n quad_two = false;\n\n quad_three = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank, piece.file - x) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank, piece.file - x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {l = false;}\n", "file_path": "src/lib.rs", "rank": 59, "score": 5.631387521171614 }, { "content": " }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank - x, piece.file + x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n\n coords.push((piece.rank - x, piece.file + x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_two = false;}\n\n else {coords.push((piece.rank - x, piece.file + x))}\n\n }\n\n }\n\n if quad_three {\n\n if !Board::valid_square((piece.rank - x, piece.file - x)) {\n\n quad_three = false;\n\n }\n\n else {\n", "file_path": "src/lib.rs", "rank": 60, "score": 5.631387521171614 }, { "content": " else {coords.push((piece.rank, piece.file + x))}\n\n }\n\n }\n\n if u {\n\n if !Board::valid_square((piece.rank, piece.file + x)) {\n\n u = false;\n\n quad_one = false;\n\n quad_two = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank + x, piece.file));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {u = false;}\n", "file_path": "src/lib.rs", "rank": 61, "score": 5.631387521171614 }, { "content": " else {coords.push((piece.rank, piece.file - x))}\n\n }\n\n }\n\n if d {\n\n if !Board::valid_square((piece.rank - x, piece.file)) {\n\n d = false;\n\n quad_three = false;\n\n quad_four = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank - x, piece.file) {\n\n Some(y) => {\n\n if y.color == piece.color {\n\n coords.push((piece.rank - x, piece.file));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {d = false;}\n", "file_path": "src/lib.rs", "rank": 62, "score": 5.631387521171614 }, { "content": " let same_team = match self.find_piece_by_coords(piece.rank - x, piece.file - x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n\n coords.push((piece.rank - x, piece.file - x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_three = false;}\n\n else {coords.push((piece.rank - x, piece.file - x))}\n\n }\n\n }\n\n if quad_four {\n\n if !Board::valid_square((piece.rank + x, piece.file - x)) {\n\n quad_four = false;\n\n }\n\n else {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file - x) {\n\n Some(y) => {\n", "file_path": "src/lib.rs", "rank": 63, "score": 5.583595131410551 }, { "content": " if castles.0 {\n\n if self.find_piece_by_coords(piece.rank, piece.file + 1).is_none() && self.find_piece_by_coords(piece.rank, piece.file + 2).is_none() {\n\n coords.push((piece.rank, piece.file + 2))\n\n }\n\n }\n\n if castles.1 {\n\n if self.find_piece_by_coords(piece.rank, piece.file - 1).is_none() && self.find_piece_by_coords(piece.rank, piece.file - 2).is_none() {\n\n coords.push((piece.rank, piece.file - 2))\n\n }\n\n }\n\n for i in -1..2 {\n\n for j in -1..2 {\n\n if i != 0 || j != 0 {\n\n let same_team = match self.find_piece_by_coords(piece.rank + i, piece.file + j) {\n\n Some(x) => {\n\n if x.color == piece.color {\n\n false\n\n }\n\n else {\n\n true\n", "file_path": "src/lib.rs", "rank": 64, "score": 4.723495804642746 }, { "content": " else {coords.push((piece.rank - x, piece.file))}\n\n }\n\n }\n\n if quad_one {\n\n let same_team = match self.find_piece_by_coords(piece.rank + x, piece.file + x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n\n coords.push((piece.rank + x, piece.file + x));\n\n }\n\n true\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_one = false;}\n\n else {coords.push((piece.rank + x, piece.file + x))}\n\n }\n\n if quad_two {\n\n let same_team = match self.find_piece_by_coords(piece.rank - x, piece.file + x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n", "file_path": "src/lib.rs", "rank": 65, "score": 4.5029457917176625 }, { "content": " coords.push((piece.rank - x, piece.file + x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_two = false;}\n\n else {coords.push((piece.rank - x, piece.file + x))}\n\n }\n\n if quad_three {\n\n let same_team = match self.find_piece_by_coords(piece.rank - x, piece.file - x) {\n\n Some(y) => {\n\n if y.color != piece.color {\n\n coords.push((piece.rank - x, piece.file - x));\n\n }\n\n y.color == piece.color\n\n },\n\n None => false,\n\n };\n\n if same_team {quad_three = false;}\n", "file_path": "src/lib.rs", "rank": 66, "score": 4.346679780115419 }, { "content": " }\n\n },\n\n None => true,\n\n };\n\n if same_team {coords.push((piece.rank + i, piece.file + j))}\n\n }\n\n }\n\n }\n\n },\n\n }\n\n\n\n return coords;\n\n }\n\n\n\n fn valid_square(coord:(i32,i32)) -> bool {\n\n if coord.0 > 8 || coord.0 < 1 { return false }\n\n if coord.1 > 8 || coord.1 < 1 { return false }\n\n\n\n return true;\n\n }\n", "file_path": "src/lib.rs", "rank": 67, "score": 3.188247590944666 }, { "content": "## 🔋 Batteries Included\n\n\n\n- `.gitignore`: ignores `node_modules`\n\n- `LICENSE-APACHE` and `LICENSE-MIT`: most Rust projects are licensed this way, so these are included for you\n\n- `README.md`: the file you are reading now!\n\n- `index.html`: a bare bones html document that includes the webpack bundle\n\n- `index.js`: example js file with a comment showing how to import and use a wasm pkg\n\n- `package.json` and `package-lock.json`:\n\n - pulls in devDependencies for using webpack:\n\n - [`webpack`](https://www.npmjs.com/package/webpack)\n\n - [`webpack-cli`](https://www.npmjs.com/package/webpack-cli)\n\n - [`webpack-dev-server`](https://www.npmjs.com/package/webpack-dev-server)\n\n - defines a `start` script to run `webpack-dev-server`\n\n- `webpack.config.js`: configuration file for bundling your js with webpack\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally\n\nsubmitted for inclusion in the work by you, as defined in the Apache-2.0\n\nlicense, shall be dual licensed as above, without any additional terms or\n\nconditions.\n", "file_path": "www/README.md", "rank": 68, "score": 2.6220390936490268 }, { "content": "// A dependency graph that contains any wasm must all be imported\n\n// asynchronously. This `bootstrap.js` file does the single async import, so\n\n// that no one else needs to worry about it again.\n\nimport(\"./index.js\")\n\n .catch(e => console.error(\"Error importing `index.js`:\", e));\n", "file_path": "www/bootstrap.js", "rank": 69, "score": 2.1914181374902384 }, { "content": "<div align=\"center\">\n\n\n\n <h1><code>create-wasm-app</code></h1>\n\n\n\n <strong>An <code>npm init</code> template for kick starting a project that uses NPM packages containing Rust-generated WebAssembly and bundles them with Webpack.</strong>\n\n\n\n <p>\n\n <a href=\"https://travis-ci.org/rustwasm/create-wasm-app\"><img src=\"https://img.shields.io/travis/rustwasm/create-wasm-app.svg?style=flat-square\" alt=\"Build Status\" /></a>\n\n </p>\n\n\n\n <h3>\n\n <a href=\"#usage\">Usage</a>\n\n <span> | </span>\n\n <a href=\"https://discordapp.com/channels/442252698964721669/443151097398296587\">Chat</a>\n\n </h3>\n\n\n\n <sub>Built with 🦀🕸 by <a href=\"https://rustwasm.github.io/\">The Rust and WebAssembly Working Group</a></sub>\n\n</div>\n\n\n\n## About\n\n\n\nThis template is designed for depending on NPM packages that contain\n\nRust-generated WebAssembly and using them to create a Website.\n\n\n\n* Want to create an NPM package with Rust and WebAssembly? [Check out\n\n `wasm-pack-template`.](https://github.com/rustwasm/wasm-pack-template)\n\n* Want to make a monorepo-style Website without publishing to NPM? Check out\n\n [`rust-webpack-template`](https://github.com/rustwasm/rust-webpack-template)\n\n and/or\n\n [`rust-parcel-template`](https://github.com/rustwasm/rust-parcel-template).\n\n\n\n## 🚴 Usage\n\n\n\n```\n\nnpm init wasm-app\n\n```\n\n\n", "file_path": "www/README.md", "rank": 70, "score": 2.1448505096423407 }, { "content": "## Important Commands\n\n\n\n### Run Tests (one at a time)\n\n`cargo test -- --test-threads=1`\n\n\n\n<div align=\"center\">\n\n\n\n <h1><code>wasm-pack-template</code></h1>\n\n\n\n <strong>A template for kick starting a Rust and WebAssembly project using <a href=\"https://github.com/rustwasm/wasm-pack\">wasm-pack</a>.</strong>\n\n\n\n <p>\n\n <a href=\"https://travis-ci.org/rustwasm/wasm-pack-template\"><img src=\"https://img.shields.io/travis/rustwasm/wasm-pack-template.svg?style=flat-square\" alt=\"Build Status\" /></a>\n\n </p>\n\n\n\n <h3>\n\n <a href=\"https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html\">Tutorial</a>\n\n <span> | </span>\n\n <a href=\"https://discordapp.com/channels/442252698964721669/443151097398296587\">Chat</a>\n\n </h3>\n\n\n\n <sub>Built with 🦀🕸 by <a href=\"https://rustwasm.github.io/\">The Rust and WebAssembly Working Group</a></sub>\n\n</div>\n\n\n\n## About\n\n\n\n[**📚 Read this template tutorial! 📚**][template-docs]\n\n\n\nThis template is designed for compiling Rust libraries into WebAssembly and\n\npublishing the resulting package to NPM.\n\n\n\nBe sure to check out [other `wasm-pack` tutorials online][tutorials] for other\n\ntemplates and usages of `wasm-pack`.\n\n\n\n[tutorials]: https://rustwasm.github.io/docs/wasm-pack/tutorials/index.html\n\n[template-docs]: https://rustwasm.github.io/docs/wasm-pack/tutorials/npm-browser-packages/index.html\n\n\n\n## 🚴 Usage\n\n\n\n### 🛠️ Build with `wasm-pack build`\n\n\n\n```\n\nwasm-pack build\n\n```\n\n\n\n### 🔬 Test in Headless Browsers with `wasm-pack test`\n\n\n\n```\n\nwasm-pack test --headless --firefox\n\n```\n\n\n\n### 🎁 Publish to NPM with `wasm-pack publish`\n\n\n\n```\n\nwasm-pack publish\n\n```\n\n\n\n## 🔋 Batteries Included\n\n\n\n* [`wasm-bindgen`](https://github.com/rustwasm/wasm-bindgen) for communicating\n\n between WebAssembly and JavaScript.\n\n* [`console_error_panic_hook`](https://github.com/rustwasm/console_error_panic_hook)\n\n for logging panic messages to the developer console.\n\n* [`wee_alloc`](https://github.com/rustwasm/wee_alloc), an allocator optimized\n\n for small code size.\n", "file_path": "README.md", "rank": 71, "score": 2.1019434110416455 } ]
Rust
src/file_link.rs
vikigenius/dlm
d449e0cc490b999a41c5d73cbf364437593cafae
use crate::dlm_error::DlmError; use crate::dlm_error::DlmError::Other; use std::str; pub struct FileLink { pub url: String, pub file_name_no_extension: String, pub extension: String, pub file_name: String, } const NO_EXT: &str = ".NO_EXT"; impl FileLink { pub fn new(url: String) -> Result<FileLink, DlmError> { let trimmed = url.trim(); if trimmed.is_empty() { Err(Other { message: "FileLink cannot be built from an empty URL".to_string(), }) } else if trimmed.ends_with('/') { let message = format!( "FileLink cannot be built with an invalid extension '{}'", trimmed ); Err(Other { message }) } else { let url_decoded = url_decode(url.as_str())?; let last_segment_rev: String = url_decoded.chars().rev().take_while(|c| c != &'/').collect(); let (extension, file_name_no_extension) = if last_segment_rev.contains('.') { let ext_rev: String = last_segment_rev.chars().take_while(|c| c != &'.').collect(); let ext: String = ext_rev.chars().rev().collect(); let tmp: String = url_decoded .chars() .rev() .skip(ext.len()) .take_while(|c| c != &'/') .collect(); let file_name_no_extension: String = tmp.chars().rev().collect(); (ext, file_name_no_extension) } else { let file_name_no_extension: String = last_segment_rev.chars().rev().collect(); (NO_EXT.to_string(), file_name_no_extension) }; let file_name = format!("{}{}", file_name_no_extension, extension); let file_link = FileLink { url, file_name_no_extension, extension, file_name, }; Ok(file_link) } } pub fn full_path(&self, output_dir: &str) -> String { format!("{}/{}", output_dir, self.file_name) } } fn url_decode(data: &str) -> Result<String, DlmError> { let mut unescaped_bytes: Vec<u8> = Vec::new(); let mut bytes = data.bytes(); while let Some(b) = bytes.next() { match b as char { '%' => { let bytes_to_decode = &[bytes.next().unwrap(), bytes.next().unwrap()]; let hex_str = str::from_utf8(bytes_to_decode).unwrap(); unescaped_bytes.push(u8::from_str_radix(hex_str, 16).unwrap()); } _ => { unescaped_bytes.push(b); } } } String::from_utf8(unescaped_bytes).map_err(|e| DlmError::UrlDecodeError { message: e.to_string(), }) } #[cfg(test)] mod file_link_tests { use crate::dlm_error::DlmError; use crate::file_link::*; #[test] fn no_empty_string() { match FileLink::new("".to_string()) { Err(DlmError::Other { message }) => assert_eq!( message, "FileLink cannot be built from an empty URL".to_string() ), _ => assert_eq!(true, false), } } #[test] fn happy_case() { let url = "http://www.google.com/area51.txt".to_string(); match FileLink::new(url.clone()) { Ok(fl) => { assert_eq!(fl.url, url); assert_eq!(fl.file_name, "area51.txt".to_string()); assert_eq!(fl.extension, "txt".to_string()); assert_eq!(fl.file_name_no_extension, "area51.".to_string()); } _ => assert_eq!(true, false), } } #[test] fn full_path() { let url = "http://www.google.com/area51.txt".to_string(); let fl = FileLink::new(url).unwrap(); let full_path = fl.full_path("/secret-folder"); assert_eq!(full_path, "/secret-folder/area51.txt".to_string()) } #[test] fn trailing_slash() { let url = "http://www.google.com/area51/".to_string(); match FileLink::new(url.clone()) { Err(DlmError::Other { message }) => assert_eq!( message, "FileLink cannot be built with an invalid extension 'http://www.google.com/area51/'".to_string() ), _ => assert_eq!(true, false), } } #[test] fn no_extension() { let url = "http://www.google.com/area51".to_string(); let fl = FileLink::new(url).unwrap(); let full_path = fl.full_path("/secret-folder"); assert_eq!(full_path, "/secret-folder/area51.NO_EXT".to_string()) } }
use crate::dlm_error::DlmError; use crate::dlm_error::DlmError::Other; use std::str; pub struct FileLink { pub url: String, pub file_name_no_extension: String, pub extension: String, pub file_name: String, } const NO_EXT: &str = ".NO_EXT"; impl FileLink { pub fn new(url: String) -> Result<FileLink, DlmError> { let trimmed = url.trim(); if trimmed.is_empty() { Err(Other { message: "FileLink cannot be built from an empty URL".to_string(), }) } else if trimmed.ends_with('/') { let message = format!( "FileLink cannot be built with an invalid extension '{}'", trimmed ); Err(Other { message }) } else { let url_decoded = url_decode(url.as_str())?; let last_segment_rev: String = url_decoded.chars().rev().take_while(|c| c != &'/').collect(); let (extension, file_name_no_extension) = if last_segment_rev.contains('.') { let ext_rev: String = last_segment_rev.chars().take_while(|c| c != &'.').collect(); let ext: String = ext_rev.chars().rev().collect(); let tmp: String = url_decoded .chars() .rev() .skip(ext.len()) .take_while(|c| c != &'/') .collect(); let file_name_no_extension: String = tmp.chars().rev().collect(); (ext, file_name_no_extension) } else { let file_name_no_extension: String = last_segment_rev.chars().rev().collect(); (NO_EXT.to_string(), file_name_no_extension) }; l
; assert_eq!(fl.file_name_no_extension, "area51.".to_string()); } _ => assert_eq!(true, false), } } #[test] fn full_path() { let url = "http://www.google.com/area51.txt".to_string(); let fl = FileLink::new(url).unwrap(); let full_path = fl.full_path("/secret-folder"); assert_eq!(full_path, "/secret-folder/area51.txt".to_string()) } #[test] fn trailing_slash() { let url = "http://www.google.com/area51/".to_string(); match FileLink::new(url.clone()) { Err(DlmError::Other { message }) => assert_eq!( message, "FileLink cannot be built with an invalid extension 'http://www.google.com/area51/'".to_string() ), _ => assert_eq!(true, false), } } #[test] fn no_extension() { let url = "http://www.google.com/area51".to_string(); let fl = FileLink::new(url).unwrap(); let full_path = fl.full_path("/secret-folder"); assert_eq!(full_path, "/secret-folder/area51.NO_EXT".to_string()) } }
et file_name = format!("{}{}", file_name_no_extension, extension); let file_link = FileLink { url, file_name_no_extension, extension, file_name, }; Ok(file_link) } } pub fn full_path(&self, output_dir: &str) -> String { format!("{}/{}", output_dir, self.file_name) } } fn url_decode(data: &str) -> Result<String, DlmError> { let mut unescaped_bytes: Vec<u8> = Vec::new(); let mut bytes = data.bytes(); while let Some(b) = bytes.next() { match b as char { '%' => { let bytes_to_decode = &[bytes.next().unwrap(), bytes.next().unwrap()]; let hex_str = str::from_utf8(bytes_to_decode).unwrap(); unescaped_bytes.push(u8::from_str_radix(hex_str, 16).unwrap()); } _ => { unescaped_bytes.push(b); } } } String::from_utf8(unescaped_bytes).map_err(|e| DlmError::UrlDecodeError { message: e.to_string(), }) } #[cfg(test)] mod file_link_tests { use crate::dlm_error::DlmError; use crate::file_link::*; #[test] fn no_empty_string() { match FileLink::new("".to_string()) { Err(DlmError::Other { message }) => assert_eq!( message, "FileLink cannot be built from an empty URL".to_string() ), _ => assert_eq!(true, false), } } #[test] fn happy_case() { let url = "http://www.google.com/area51.txt".to_string(); match FileLink::new(url.clone()) { Ok(fl) => { assert_eq!(fl.url, url); assert_eq!(fl.file_name, "area51.txt".to_string()); assert_eq!(fl.extension, "txt".to_string())
random
[ { "content": "pub fn get_args() -> (String, usize, String) {\n\n let app = app();\n\n let matches = app.get_matches();\n\n\n\n let max_concurrent_downloads = matches.value_of_t(\"maxConcurrentDownloads\")\n\n .expect(\"maxConcurrentDownloads was not an integer\");\n\n if max_concurrent_downloads == 0 {\n\n panic!(\"invalid maxConcurrentDownloads - must be a positive integer\")\n\n }\n\n\n\n let input_file = matches.value_of(\"inputFile\").expect(\"impossible\");\n\n if !Path::new(input_file).is_file() {\n\n panic!(\"inputFile does not exist\")\n\n }\n\n\n\n let output_dir = matches.value_of(\"outputDir\").expect(\"impossible\");\n\n if !Path::new(output_dir).is_dir() {\n\n panic!(\"outputDir does not exist\")\n\n }\n\n\n", "file_path": "src/args.rs", "rank": 1, "score": 79427.58632503815 }, { "content": "fn pretty_file_size(len: u64) -> String {\n\n let float_len = len as f64;\n\n let (unit, value) = if float_len > GIGABYTE {\n\n (\"GiB\", float_len / GIGABYTE)\n\n } else if float_len > MEGABYTE {\n\n (\"MiB\", float_len / MEGABYTE)\n\n } else if float_len > KILOBYTE {\n\n (\"KiB\", float_len / KILOBYTE)\n\n } else {\n\n (\"bytes\", float_len)\n\n };\n\n format!(\"{:.2}{}\", value, unit)\n\n}\n\n\n\nasync fn try_hard_to_extract_headers(\n\n head_headers: &HeaderMap,\n\n url: &str,\n\n client: &Client,\n\n) -> Result<(Option<u64>, Option<String>), DlmError> {\n\n let tuple = match content_length(head_headers) {\n", "file_path": "src/downloader.rs", "rank": 2, "score": 44941.22655539446 }, { "content": "fn retry_handler(e: &DlmError, pbm: &ProgressBarManager, link: &str) -> bool {\n\n let should_retry = is_network_error(e);\n\n if should_retry {\n\n let msg = format!(\"Retrying {} after error {:?}\", link, e);\n\n pbm.log_above_progress_bars(msg)\n\n }\n\n should_retry\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 44353.11702754047 }, { "content": "fn accept_ranges(headers: &HeaderMap) -> Option<String> {\n\n headers\n\n .get(\"accept-ranges\")\n\n .and_then(|ct_len| ct_len.to_str().ok())\n\n .map(|v| v.to_string())\n\n}\n\n\n\nasync fn compute_query_range(\n\n pb: &ProgressBar,\n\n content_length: Option<u64>,\n\n accept_ranges: Option<String>,\n\n tmp_name: &str,\n\n) -> Result<Option<String>, DlmError> {\n\n if Path::new(&tmp_name).exists() {\n\n // get existing file size\n\n let tmp_size = tfs::File::open(&tmp_name).await?.metadata().await?.len();\n\n match (accept_ranges, content_length) {\n\n (Some(range), Some(cl)) if range == \"bytes\" => {\n\n pb.set_position(tmp_size);\n\n let range_msg = format!(\"bytes={}-{}\", tmp_size, cl);\n", "file_path": "src/downloader.rs", "rank": 4, "score": 42585.68099339242 }, { "content": "fn is_network_error(e: &DlmError) -> bool {\n\n matches!(e, DlmError::ConnectionClosed\n\n | DlmError::ResponseBodyError\n\n | DlmError::DeadLineElapsedTimeout)\n\n}\n", "file_path": "src/main.rs", "rank": 5, "score": 30667.27801208965 }, { "content": "fn app() -> clap::App<'static> {\n\n App::new(\"dlm\")\n\n .version(\"0.2.0\")\n\n .author(\"Arnaud Gourlay <[email protected]>\")\n\n .about(\"Minimal download manager\")\n\n .arg(\n\n Arg::new(\"maxConcurrentDownloads\")\n\n .help(\"used to limit the number of downloads in flight\")\n\n .long(\"maxConcurrentDownloads\")\n\n .short('M')\n\n .takes_value(true)\n\n .required(true),\n\n )\n\n .arg(\n\n Arg::new(\"inputFile\")\n\n .help(\"input file with links\")\n\n .long(\"inputFile\")\n\n .short('i')\n\n .takes_value(true)\n\n .required(true),\n", "file_path": "src/args.rs", "rank": 6, "score": 21751.20825067421 }, { "content": "fn content_length(headers: &HeaderMap) -> Option<u64> {\n\n headers\n\n .get(\"content-length\")\n\n .and_then(|ct_len| ct_len.to_str().ok())\n\n .and_then(|ct_len| ct_len.parse().ok())\n\n}\n\n\n", "file_path": "src/downloader.rs", "rank": 7, "score": 19258.382215710462 }, { "content": "use tokio::task::JoinError;\n\nuse tokio::time::error::Elapsed;\n\n\n\n#[derive(Debug)]\n\npub enum DlmError {\n\n ConnectionClosed,\n\n ConnectionTimeout,\n\n ResponseBodyError,\n\n DeadLineElapsedTimeout,\n\n ResponseStatusNotSuccess { message: String },\n\n UrlDecodeError { message: String },\n\n StdIoError { e: std::io::Error },\n\n TaskError { e: JoinError},\n\n ChannelError { e: async_channel::RecvError },\n\n Other { message: String },\n\n}\n\n\n\nconst CONNECTION_CLOSED: &str = \"connection closed before message completed\";\n\nconst CONNECTION_TIMEOUT: &str = \"error trying to connect: operation timed out\";\n\nconst BODY_ERROR: &str = \"error reading a body from connection\";\n", "file_path": "src/dlm_error.rs", "rank": 12, "score": 9.897803224343075 }, { "content": " }\n\n\n\n pub async fn finish_all(&self) -> Result<(), DlmError> {\n\n for _ in 0..self.file_pb_count {\n\n let pb = self.rx.recv().await?;\n\n pb.finish();\n\n }\n\n self.main_pb.finish();\n\n Ok(())\n\n }\n\n\n\n pub fn increment_global_progress(&self) {\n\n self.main_pb.inc(1)\n\n }\n\n\n\n pub fn message_progress_bar(s: &str) -> String {\n\n let max = 35; // arbitrary limit\n\n let count = s.chars().count();\n\n\n\n match count.cmp(&max) {\n", "file_path": "src/progress_bar_manager.rs", "rank": 14, "score": 8.306408012214582 }, { "content": " Ordering::Greater => s.chars().take(max).collect(),\n\n Ordering::Equal => s.to_string(),\n\n Ordering::Less => format!(\"{}{}\", s, \" \".repeat(max - count)),\n\n }\n\n }\n\n\n\n pub fn log_above_progress_bars(&self, msg: String) {\n\n ProgressBarManager::log_above_progress_bar(&self.main_pb, msg)\n\n }\n\n\n\n pub fn log_above_progress_bar(pb: &ProgressBar, msg: String) {\n\n pb.println(format!(\"[{}] {}\", Local::now().naive_local().format(\"%Y-%m-%d %H:%M:%S\"), msg));\n\n }\n\n\n\n pub fn reset_progress_bar(pb: &ProgressBar) {\n\n pb.reset();\n\n pb.set_message(ProgressBarManager::message_progress_bar(PENDING));\n\n }\n\n}\n", "file_path": "src/progress_bar_manager.rs", "rank": 15, "score": 8.1474711254693 }, { "content": "use hyper::HeaderMap;\n\nuse indicatif::ProgressBar;\n\nuse reqwest::Client;\n\nuse std::path::Path;\n\nuse tokio::fs as tfs;\n\nuse tokio::io::AsyncWriteExt;\n\nuse tokio::time::{timeout, Duration};\n\n\n\nuse crate::dlm_error::DlmError;\n\nuse crate::file_link::FileLink;\n\nuse crate::ProgressBarManager;\n\n\n\npub async fn download_link(\n\n raw_link: &str,\n\n client: &Client,\n\n output_dir: &str,\n\n pb: &ProgressBar,\n\n) -> Result<String, DlmError> {\n\n let file_link = FileLink::new(raw_link.to_string())?;\n\n let final_name = &file_link.full_path(output_dir);\n", "file_path": "src/downloader.rs", "rank": 16, "score": 8.013455510972937 }, { "content": "use chrono::Local;\n\nuse indicatif::{MultiProgress, ProgressBar, ProgressStyle};\n\nuse std::cmp::Ordering;\n\nuse async_channel::{Receiver, Sender};\n\nuse tokio::task::JoinHandle;\n\nuse crate::DlmError;\n\n\n\nconst PENDING: &str = \"pending\";\n\n\n\npub struct ProgressBarManager {\n\n main_pb: ProgressBar,\n\n file_pb_count: usize,\n\n pub tx: Sender<ProgressBar>,\n\n pub rx: Receiver<ProgressBar>\n\n}\n\n\n\nimpl ProgressBarManager {\n\n\n\n pub async fn init(max_concurrent_downloads: usize, main_pb_len: u64) -> (JoinHandle<()>, ProgressBarManager) {\n\n let mp = MultiProgress::new();\n", "file_path": "src/progress_bar_manager.rs", "rank": 17, "score": 6.918133449642573 }, { "content": "\n\nimpl std::convert::From<reqwest::Error> for DlmError {\n\n fn from(e: reqwest::Error) -> Self {\n\n //TODO use Reqwest's types instead of guessing from strings https://github.com/seanmonstar/reqwest/issues/757\n\n let e_string = e.to_string();\n\n if e_string.contains(BODY_ERROR) {\n\n DlmError::ResponseBodyError\n\n } else if e_string.contains(CONNECTION_CLOSED) {\n\n DlmError::ConnectionClosed\n\n } else if e_string.contains(CONNECTION_TIMEOUT) {\n\n DlmError::ConnectionTimeout\n\n } else {\n\n DlmError::Other { message: e_string }\n\n }\n\n }\n\n}\n\n\n\nimpl std::convert::From<std::io::Error> for DlmError {\n\n fn from(e: std::io::Error) -> Self {\n\n DlmError::StdIoError { e }\n", "file_path": "src/dlm_error.rs", "rank": 18, "score": 6.561613733659582 }, { "content": " if Path::new(final_name).exists() {\n\n let final_file_size = tfs::File::open(&final_name).await?.metadata().await?.len();\n\n let msg = format!(\n\n \"Skipping {} because the file is already completed [{}]\",\n\n file_link.file_name,\n\n pretty_file_size(final_file_size)\n\n );\n\n Ok(msg)\n\n } else {\n\n let url = file_link.url.as_str();\n\n let head_result = client.head(url).send().await?;\n\n if !head_result.status().is_success() {\n\n let message = format!(\"{} {}\", url, head_result.status());\n\n Err(DlmError::ResponseStatusNotSuccess { message })\n\n } else {\n\n let (content_length, accept_ranges) =\n\n try_hard_to_extract_headers(head_result.headers(), url, client).await?;\n\n // setup progress bar for the file\n\n pb.set_message(ProgressBarManager::message_progress_bar(&file_link.file_name));\n\n if let Some(total_size) = content_length {\n", "file_path": "src/downloader.rs", "rank": 19, "score": 5.121320225510766 }, { "content": "mod args;\n\nmod dlm_error;\n\nmod downloader;\n\nmod file_link;\n\nmod progress_bar_manager;\n\n\n\nuse crate::args::get_args;\n\nuse crate::dlm_error::DlmError;\n\nuse crate::downloader::download_link;\n\nuse crate::progress_bar_manager::ProgressBarManager;\n\nuse futures_util::stream::StreamExt;\n\nuse reqwest::Client;\n\nuse std::time::Duration;\n\nuse tokio::fs as tfs;\n\nuse tokio::io::AsyncBufReadExt;\n\nuse tokio_retry::RetryIf;\n\nuse tokio_retry::strategy::{ExponentialBackoff, jitter};\n\nuse tokio_stream::wrappers::LinesStream;\n\n\n\n#[tokio::main]\n", "file_path": "src/main.rs", "rank": 21, "score": 4.38256263433774 }, { "content": " let mut request = client.get(url);\n\n if let Some(range) = query_range {\n\n request = request.header(\"Range\", range)\n\n }\n\n\n\n // initiate file download\n\n let mut dl_response = request.send().await?;\n\n if !dl_response.status().is_success() {\n\n let message = format!(\"{} {}\", url, dl_response.status());\n\n Err(DlmError::ResponseStatusNotSuccess { message })\n\n } else {\n\n // incremental save chunk by chunk into part file\n\n let chunk_timeout = Duration::from_secs(60);\n\n while let Some(chunk) = timeout(chunk_timeout, dl_response.chunk()).await?? {\n\n file.write_all(&chunk).await?;\n\n pb.inc(chunk.len() as u64);\n\n }\n\n let final_file_size = file.metadata().await?.len();\n\n // rename part file to final\n\n tfs::rename(&tmp_name, &final_name).await?;\n", "file_path": "src/downloader.rs", "rank": 22, "score": 4.235541297212038 }, { "content": " let msg_count = format!(\"Found {} URLs in input file {}\", nb_of_lines, input_file);\n\n pbm.log_above_progress_bars(msg_count);\n\n\n\n // start streaming lines from file\n\n let file = tfs::File::open(input_file).await?;\n\n let file_reader = tokio::io::BufReader::new(file);\n\n let line_stream = LinesStream::new(file_reader.lines());\n\n line_stream\n\n .for_each_concurrent(max_concurrent_downloads, |link_res| async move {\n\n let message = match link_res {\n\n Err(e) => format!(\"Error with links iterator {}\", e),\n\n Ok(link) if link.trim().is_empty() => \"Skipping empty line\".to_string(),\n\n Ok(link) => {\n\n // claim a progress bar for the upcoming download\n\n let dl_pb = pbm_ref.rx.recv().await.expect(\"claiming progress bar should not fail\");\n\n\n\n // exponential backoff retries for network errors\n\n let retry_strategy = ExponentialBackoff::from_millis(1000)\n\n .map(jitter) // add jitter to delays\n\n .take(10); // limit to 10 retries\n", "file_path": "src/main.rs", "rank": 23, "score": 4.187662318580645 }, { "content": " (\n\n input_file.to_string(),\n\n max_concurrent_downloads,\n\n output_dir.to_string(),\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod args_tests {\n\n use crate::args::app;\n\n\n\n #[test]\n\n fn verify_app() {\n\n app().debug_assert();\n\n }\n\n}", "file_path": "src/args.rs", "rank": 24, "score": 3.9527129104027345 }, { "content": "\n\n // main progress bar\n\n let main_style = ProgressStyle::default_bar().template(\"{bar:133} {pos}/{len}\");\n\n let main_pb = mp.add(ProgressBar::new(0));\n\n main_pb.set_style(main_style);\n\n main_pb.set_length(main_pb_len);\n\n\n\n // If you need a multi-producer multi-consumer channel where only one consumer sees each message, you can use the async-channel crate.\n\n // There are also channels for use outside of asynchronous Rust, such as std::sync::mpsc and crossbeam::channel.\n\n // These channels wait for messages by blocking the thread, which is not allowed in asynchronous code.\n\n // ref: https://tokio.rs/tokio/tutorial/channels\n\n let (tx, rx): (Sender<ProgressBar>, Receiver<ProgressBar>) = async_channel::bounded(max_concurrent_downloads);\n\n\n\n let dl_style = ProgressStyle::default_bar()\n\n .template(\"{msg} [{elapsed_precise}] [{bar:40.cyan/blue}] {bytes}/{total_bytes} (speed:{bytes_per_sec}) (eta:{eta})\")\n\n .progress_chars(\"#>-\");\n\n\n\n // `max_concurrent_downloads` progress bars are shared between the threads at anytime\n\n for _ in 0..max_concurrent_downloads {\n\n let file_pb = mp.add(ProgressBar::new(0));\n", "file_path": "src/progress_bar_manager.rs", "rank": 25, "score": 3.338690004828182 }, { "content": "use clap::{App, Arg};\n\nuse std::path::Path;\n\n\n", "file_path": "src/args.rs", "rank": 26, "score": 2.543841799694801 }, { "content": " pb.set_length(total_size);\n\n };\n\n\n\n let tmp_name = format!(\"{}/{}part\", output_dir, file_link.file_name_no_extension);\n\n let query_range =\n\n compute_query_range(pb, content_length, accept_ranges, &tmp_name).await?;\n\n\n\n // create/open file.part\n\n let mut file = match query_range {\n\n Some(_) => {\n\n tfs::OpenOptions::new()\n\n .append(true)\n\n .create(false)\n\n .open(&tmp_name)\n\n .await?\n\n }\n\n None => tfs::File::create(&tmp_name).await?,\n\n };\n\n\n\n // building the request\n", "file_path": "src/downloader.rs", "rank": 27, "score": 2.463185647170052 }, { "content": " }\n\n}\n\n\n\nimpl std::convert::From<Elapsed> for DlmError {\n\n fn from(_: Elapsed) -> Self {\n\n DlmError::DeadLineElapsedTimeout\n\n }\n\n}\n\n\n\nimpl std::convert::From<JoinError> for DlmError {\n\n fn from(e: JoinError) -> Self {\n\n DlmError::TaskError { e }\n\n }\n\n}\n\n\n\nimpl std::convert::From<async_channel::RecvError> for DlmError {\n\n fn from(e: async_channel::RecvError) -> Self {\n\n DlmError::ChannelError { e }\n\n }\n\n}", "file_path": "src/dlm_error.rs", "rank": 28, "score": 2.422212917253645 }, { "content": " Ok(Some(range_msg))\n\n }\n\n _ => {\n\n let log = format!(\n\n \"Found part file {} with size {} but it will be overridden because the server does not support resuming the download (range bytes)\",\n\n tmp_name, tmp_size\n\n );\n\n ProgressBarManager::log_above_progress_bar(pb, log);\n\n Ok(None)\n\n }\n\n }\n\n } else {\n\n if accept_ranges.is_none() {\n\n let log = format!(\n\n \"The download of file {} should not be interrupted because the server does not support resuming the download (range bytes)\",\n\n tmp_name\n\n );\n\n ProgressBarManager::log_above_progress_bar(pb, log);\n\n };\n\n Ok(None)\n", "file_path": "src/downloader.rs", "rank": 29, "score": 2.255366847300648 }, { "content": " file_pb.set_style(dl_style.clone());\n\n file_pb.set_message(ProgressBarManager::message_progress_bar(PENDING));\n\n tx.send(file_pb).await.expect(\"channel should not fail\");\n\n }\n\n\n\n // Render MultiProgress bar async. in a dedicated blocking thread\n\n let h = tokio::task::spawn_blocking(move || {\n\n match mp.join_and_clear() {\n\n Ok(_) => (),\n\n Err(e) => println!(\"Error while rendering progress bars {}\", e)\n\n }\n\n });\n\n\n\n let pbm = ProgressBarManager {\n\n main_pb,\n\n file_pb_count: max_concurrent_downloads,\n\n rx,\n\n tx\n\n };\n\n (h, pbm)\n", "file_path": "src/progress_bar_manager.rs", "rank": 30, "score": 1.693223490922053 }, { "content": " Some(0) => {\n\n // if \"content-length\": \"0\" then it is likely the server does not support HEAD, let's try harder with a GET\n\n let get_result = client.get(url).send().await?;\n\n let get_headers = get_result.headers();\n\n (content_length(get_headers), accept_ranges(get_headers))\n\n }\n\n ct_option @ Some(_) => (ct_option, accept_ranges(head_headers)),\n\n _ => (None, None),\n\n };\n\n Ok(tuple)\n\n}\n\n\n", "file_path": "src/downloader.rs", "rank": 31, "score": 1.4956231991631923 }, { "content": " })\n\n .await;\n\n\n\n // cleanup phase\n\n pbm_ref.finish_all().await?;\n\n rendering_handle.await?;\n\n Ok(())\n\n}\n\n\n\nasync fn count_lines(input_file: &str) -> Result<i32, DlmError> {\n\n let file = tfs::File::open(input_file).await?;\n\n let file_reader = tokio::io::BufReader::new(file);\n\n let stream = LinesStream::new(file_reader.lines());\n\n let line_nb = stream.fold(0, |acc, _| async move { acc + 1 }).await;\n\n Ok(line_nb)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 32, "score": 1.3334375658323059 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod downloader_tests {\n\n use crate::downloader::*;\n\n\n\n #[test]\n\n fn pretty_file_size_gb() {\n\n let size: u64 = 1_200_000_000;\n\n assert_eq!(pretty_file_size(size), \"1.12GiB\");\n\n }\n\n\n\n #[test]\n\n fn pretty_file_size_mb() {\n\n let size: u64 = 1_200_000;\n\n assert_eq!(pretty_file_size(size), \"1.14MiB\");\n\n }\n\n\n\n #[test]\n\n fn pretty_file_size_kb() {\n\n let size: u64 = 1_200;\n\n assert_eq!(pretty_file_size(size), \"1.17KiB\");\n\n }\n\n}\n", "file_path": "src/downloader.rs", "rank": 33, "score": 1.3054038719052041 }, { "content": "# dlm\n\n[![Build](https://github.com/agourlay/dlm/actions/workflows/ci.yml/badge.svg)](https://github.com/agourlay/dlm/actions/workflows/ci.yml)\n\n\n\nA minimal HTTP download manager that works just fine.\n\n\n\n## features\n\n\n\n- read URLs from a text file (one entry per line)\n\n- control maximum number of concurrent downloads\n\n- resume interrupted downloads if possible (using HTTP range)\n\n- automatically retry re-establishing download in case of timeout or hanging connection\n\n- multi progress bars (made with [indicatif](https://github.com/mitsuhiko/indicatif))\n\n\n\n```\n\n./dlm --help\n\ndlm 0.2.0\n\nArnaud Gourlay <[email protected]>\n\nMinimal download manager\n\n\n\nUSAGE:\n\n dlm --inputFile <inputFile> --maxConcurrentDownloads <maxConcurrentDownloads> --outputDir <outputDir>\n\n\n\nFLAGS:\n\n -h, --help Prints help information\n\n -V, --version Prints version information\n\n\n\nOPTIONS:\n\n -i, --inputFile <inputFile> input file with links\n\n -M, --maxConcurrentDownloads <maxConcurrentDownloads> used to limit the number of downloads in flight\n\n -o, --outputDir <outputDir> output directory for downloads\n\n```\n\n\n\nExample:\n\n\n\n```\n\n./dlm --inputFile ~/dlm/links.txt --outputDir ~/dlm/output --maxConcurrentDownloads 2\n", "file_path": "README.md", "rank": 34, "score": 1.1615865192349766 }, { "content": "\n\n let processed = RetryIf::spawn(\n\n retry_strategy,\n\n || download_link(&link, c_ref, od_ref, &dl_pb),\n\n |e: &DlmError| retry_handler(e, pbm_ref, &link,)\n\n ).await;\n\n\n\n // reset & release progress bar\n\n ProgressBarManager::reset_progress_bar(&dl_pb);\n\n pbm_ref.tx.send(dl_pb).await.expect(\"releasing progress bar should not fail\");\n\n\n\n // extract result\n\n match processed {\n\n Ok(info) => info,\n\n Err(e) => format!(\"Unrecoverable error while processing {}: {:?}\", link, e),\n\n }\n\n }\n\n };\n\n pbm_ref.log_above_progress_bars(message);\n\n pbm_ref.increment_global_progress();\n", "file_path": "src/main.rs", "rank": 35, "score": 1.130246885721388 } ]
Rust
services/pool/src/service.rs
tiagolobocastro/Mayastor
c9cf777e7776f529a2433d29b8513d1c601684a6
#![allow(clippy::unit_arg)] use super::*; use common::wrapper::v0::*; #[derive(Clone, Debug, Default)] pub(super) struct PoolSvc { registry: Registry<NodeWrapperPool>, } impl PoolSvc { pub fn new(period: std::time::Duration) -> Self { let obj = Self { registry: Registry::new(period), }; obj.start(); obj } fn start(&self) { self.registry.start(); } async fn get_node_pools( &self, node_id: Option<NodeId>, ) -> Result<Vec<Pool>, SvcError> { Ok(match node_id { None => self.registry.list_pools().await, Some(node_id) => self.registry.list_node_pools(&node_id).await, }) } async fn get_node_replicas( &self, node_id: Option<NodeId>, ) -> Result<Vec<Replica>, SvcError> { Ok(match node_id { None => self.registry.list_replicas().await, Some(node_id) => self.registry.list_node_replicas(&node_id).await, }) } #[tracing::instrument(level = "debug", err)] pub(super) async fn get_pools( &self, request: &GetPools, ) -> Result<Pools, SvcError> { let filter = request.filter.clone(); Ok(Pools(match filter { Filter::None => self.get_node_pools(None).await?, Filter::Node(node_id) => self.get_node_pools(Some(node_id)).await?, Filter::NodePool(node_id, pool_id) => { let pools = self.get_node_pools(Some(node_id)).await?; pools.iter().filter(|&p| p.id == pool_id).cloned().collect() } Filter::Pool(pool_id) => { let pools = self.get_node_pools(None).await?; pools.iter().filter(|&p| p.id == pool_id).cloned().collect() } _ => { return Err(SvcError::InvalidFilter { filter, }) } })) } #[tracing::instrument(level = "debug", err)] pub(super) async fn get_replicas( &self, request: &GetReplicas, ) -> Result<Replicas, SvcError> { let filter = request.filter.clone(); Ok(Replicas(match filter { Filter::None => self.get_node_replicas(None).await?, Filter::Node(node_id) => { self.get_node_replicas(Some(node_id)).await? } Filter::NodePool(node_id, pool_id) => { let replicas = self.get_node_replicas(Some(node_id)).await?; replicas .iter() .filter(|&p| p.pool == pool_id) .cloned() .collect() } Filter::Pool(pool_id) => { let replicas = self.get_node_replicas(None).await?; replicas .iter() .filter(|&p| p.pool == pool_id) .cloned() .collect() } Filter::NodePoolReplica(node_id, pool_id, replica_id) => { let replicas = self.get_node_replicas(Some(node_id)).await?; replicas .iter() .filter(|&p| p.pool == pool_id && p.uuid == replica_id) .cloned() .collect() } Filter::NodeReplica(node_id, replica_id) => { let replicas = self.get_node_replicas(Some(node_id)).await?; replicas .iter() .filter(|&p| p.uuid == replica_id) .cloned() .collect() } Filter::PoolReplica(pool_id, replica_id) => { let replicas = self.get_node_replicas(None).await?; replicas .iter() .filter(|&p| p.pool == pool_id && p.uuid == replica_id) .cloned() .collect() } Filter::Replica(replica_id) => { let replicas = self.get_node_replicas(None).await?; replicas .iter() .filter(|&p| p.uuid == replica_id) .cloned() .collect() } _ => { return Err(SvcError::InvalidFilter { filter, }) } })) } #[tracing::instrument(level = "debug", err)] pub(super) async fn create_replica( &self, request: &CreateReplica, ) -> Result<Replica, SvcError> { self.registry.create_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn destroy_replica( &self, request: &DestroyReplica, ) -> Result<(), SvcError> { self.registry.destroy_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn share_replica( &self, request: &ShareReplica, ) -> Result<String, SvcError> { self.registry.share_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn unshare_replica( &self, request: &UnshareReplica, ) -> Result<(), SvcError> { self.registry.unshare_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn create_pool( &self, request: &CreatePool, ) -> Result<Pool, SvcError> { self.registry.create_pool(request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn destroy_pool( &self, request: &DestroyPool, ) -> Result<(), SvcError> { self.registry.destroy_pool(request).await } }
#![allow(clippy::unit_arg)] use super::*; use common::wrapper::v0::*; #[derive(Clone, Debug, Default)] pub(super) struct PoolSvc { registry: Registry<NodeWrapperPool>, } impl PoolSvc {
fn start(&self) { self.registry.start(); } async fn get_node_pools( &self, node_id: Option<NodeId>, ) -> Result<Vec<Pool>, SvcError> { Ok(match node_id { None => self.registry.list_pools().await, Some(node_id) => self.registry.list_node_pools(&node_id).await, }) } async fn get_node_replicas( &self, node_id: Option<NodeId>, ) -> Result<Vec<Replica>, SvcError> { Ok(match node_id { None => self.registry.list_replicas().await, Some(node_id) => self.registry.list_node_replicas(&node_id).await, }) } #[tracing::instrument(level = "debug", err)] pub(super) async fn get_pools( &self, request: &GetPools, ) -> Result<Pools, SvcError> { let filter = request.filter.clone(); Ok(Pools(match filter { Filter::None => self.get_node_pools(None).await?, Filter::Node(node_id) => self.get_node_pools(Some(node_id)).await?, Filter::NodePool(node_id, pool_id) => { let pools = self.get_node_pools(Some(node_id)).await?; pools.iter().filter(|&p| p.id == pool_id).cloned().collect() } Filter::Pool(pool_id) => { let pools = self.get_node_pools(None).await?; pools.iter().filter(|&p| p.id == pool_id).cloned().collect() } _ => { return Err(SvcError::InvalidFilter { filter, }) } })) } #[tracing::instrument(level = "debug", err)] pub(super) async fn get_replicas( &self, request: &GetReplicas, ) -> Result<Replicas, SvcError> { let filter = request.filter.clone(); Ok(Replicas(match filter { Filter::None => self.get_node_replicas(None).await?, Filter::Node(node_id) => { self.get_node_replicas(Some(node_id)).await? } Filter::NodePool(node_id, pool_id) => { let replicas = self.get_node_replicas(Some(node_id)).await?; replicas .iter() .filter(|&p| p.pool == pool_id) .cloned() .collect() } Filter::Pool(pool_id) => { let replicas = self.get_node_replicas(None).await?; replicas .iter() .filter(|&p| p.pool == pool_id) .cloned() .collect() } Filter::NodePoolReplica(node_id, pool_id, replica_id) => { let replicas = self.get_node_replicas(Some(node_id)).await?; replicas .iter() .filter(|&p| p.pool == pool_id && p.uuid == replica_id) .cloned() .collect() } Filter::NodeReplica(node_id, replica_id) => { let replicas = self.get_node_replicas(Some(node_id)).await?; replicas .iter() .filter(|&p| p.uuid == replica_id) .cloned() .collect() } Filter::PoolReplica(pool_id, replica_id) => { let replicas = self.get_node_replicas(None).await?; replicas .iter() .filter(|&p| p.pool == pool_id && p.uuid == replica_id) .cloned() .collect() } Filter::Replica(replica_id) => { let replicas = self.get_node_replicas(None).await?; replicas .iter() .filter(|&p| p.uuid == replica_id) .cloned() .collect() } _ => { return Err(SvcError::InvalidFilter { filter, }) } })) } #[tracing::instrument(level = "debug", err)] pub(super) async fn create_replica( &self, request: &CreateReplica, ) -> Result<Replica, SvcError> { self.registry.create_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn destroy_replica( &self, request: &DestroyReplica, ) -> Result<(), SvcError> { self.registry.destroy_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn share_replica( &self, request: &ShareReplica, ) -> Result<String, SvcError> { self.registry.share_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn unshare_replica( &self, request: &UnshareReplica, ) -> Result<(), SvcError> { self.registry.unshare_replica(&request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn create_pool( &self, request: &CreatePool, ) -> Result<Pool, SvcError> { self.registry.create_pool(request).await } #[tracing::instrument(level = "debug", err)] pub(super) async fn destroy_pool( &self, request: &DestroyPool, ) -> Result<(), SvcError> { self.registry.destroy_pool(request).await } }
pub fn new(period: std::time::Duration) -> Self { let obj = Self { registry: Registry::new(period), }; obj.start(); obj }
function_block-full_function
[ { "content": "#[derive(Debug)]\n\nstruct RebuildTask {\n\n buffer: DmaBuf,\n\n sender: mpsc::Sender<TaskResult>,\n\n error: Option<TaskResult>,\n\n}\n\n\n\n/// Pool of rebuild tasks and progress tracking\n\n/// Each task uses a clone of the sender allowing the management task to poll a\n\n/// single receiver\n\n#[derive(Debug)]\n\npub(super) struct RebuildTasks {\n\n tasks: Vec<RebuildTask>,\n\n\n\n channel: (mpsc::Sender<TaskResult>, mpsc::Receiver<TaskResult>),\n\n active: usize,\n\n total: usize,\n\n\n\n segments_done: u64,\n\n}\n\n\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 0, "score": 133214.89828148053 }, { "content": "#[derive(Debug, Clone)]\n\nstruct TaskResult {\n\n /// block that was being rebuilt\n\n blk: u64,\n\n /// id of the task\n\n id: usize,\n\n /// encountered error, if any\n\n error: Option<RebuildError>,\n\n}\n\n\n\n/// Number of concurrent copy tasks per rebuild job\n\nconst SEGMENT_TASKS: usize = 16;\n\n/// Size of each segment used by the copy task\n\npub const SEGMENT_SIZE: u64 = SPDK_BDEV_LARGE_BUF_MAX_SIZE as u64;\n\n\n\n/// Each rebuild task needs a unique buffer to read/write from source to target\n\n/// A mpsc channel is used to communicate with the management task\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 1, "score": 133214.85403884726 }, { "content": "// Custom struct used to format trace events.\n\nstruct CustomFormat {\n\n ansi: bool,\n\n}\n\n\n\n// Format a trace event.\n\nimpl<S, N> FormatEvent<S, N> for CustomFormat\n\nwhere\n\n S: tracing_core::subscriber::Subscriber + for<'s> LookupSpan<'s>,\n\n N: for<'w> FormatFields<'w> + 'static,\n\n{\n\n fn format_event(\n\n &self,\n\n context: &FmtContext<'_, S, N>,\n\n writer: &mut dyn std::fmt::Write,\n\n event: &Event<'_>,\n\n ) -> std::fmt::Result {\n\n let normalized = event.normalized_metadata();\n\n let meta = normalized.as_ref().unwrap_or_else(|| event.metadata());\n\n\n\n write!(\n", "file_path": "mayastor/src/logger.rs", "rank": 2, "score": 87450.22743659635 }, { "content": "#[derive(Debug)]\n\nstruct MacroCallback {\n\n macros: Arc<RwLock<HashSet<String>>>,\n\n}\n\n\n\nimpl ParseCallbacks for MacroCallback {\n\n fn will_parse_macro(&self, name: &str) -> MacroParsingBehavior {\n\n self.macros.write().unwrap().insert(name.into());\n\n\n\n if name == \"IPPORT_RESERVED\" {\n\n return MacroParsingBehavior::Ignore;\n\n }\n\n\n\n MacroParsingBehavior::Default\n\n }\n\n}\n\n\n", "file_path": "spdk-sys/build.rs", "rank": 3, "score": 87444.6805742355 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct Preamble {\n\n pub(crate) id: MessageId,\n\n}\n\n\n\n/// Unsolicited (send) messages carry the message identifier, the sender\n\n/// identifier and finally the message payload itself\n", "file_path": "mbus-api/src/lib.rs", "rank": 4, "score": 87444.5927594114 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct EmptyArgs {}\n\n\n\n/// The main test work horse. It runs a setup before the unit test and tear-down\n\n/// after the unit test. The setup involves starting a unix domain socket\n\n/// server. It is customizable by providing two closures:\n\n///\n\n/// 1) handler for constructing reply from the server and\n\n/// 2) test callback evaluating a return value from the json-rpc client call\n\n///\n\n/// Beware that rust executes the tests in parallel so whatever is done in this\n\n/// function must preserve independence of the tests on each other.\n\nasync fn run_test<A, R, H, T>(method: &str, arg: A, handler: H, test: T)\n\nwhere\n\n A: serde::ser::Serialize + Send,\n\n R: 'static + serde::de::DeserializeOwned + panic::UnwindSafe + Send,\n\n H: FnOnce(Request) -> Vec<u8> + 'static + Send,\n\n T: FnOnce(Result<R, Error>) + panic::UnwindSafe,\n\n{\n\n let sock = format!(\"{}.{:?}\", SOCK_PATH, std::thread::current().id());\n\n let sock_path = Path::new(&sock);\n", "file_path": "jsonrpc/src/test.rs", "rank": 5, "score": 87444.5927594114 }, { "content": "struct CSIServer {}\n\n\n\nimpl CSIServer {\n\n pub async fn run(csi_socket: &str, node_name: &str) -> Result<(), ()> {\n\n let mut uds_sock = UnixListener::bind(csi_socket).unwrap();\n\n info!(\"CSI plugin bound to {}\", csi_socket);\n\n\n\n if let Err(e) = Server::builder()\n\n .add_service(NodeServer::new(Node {\n\n node_name: node_name.into(),\n\n filesystems: probe_filesystems(),\n\n }))\n\n .add_service(IdentityServer::new(Identity {}))\n\n .serve_with_incoming(uds_sock.incoming().map_ok(UnixStream))\n\n .await\n\n {\n\n error!(\"CSI server failed with error: {}\", e);\n\n return Err(());\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "csi/src/server.rs", "rank": 6, "score": 87438.88565620699 }, { "content": "#[derive(Clone)]\n\nstruct Watchdog {\n\n deadline: std::time::Duration,\n\n pet_chan: tokio::sync::mpsc::Sender<()>,\n\n}\n\n\n\nimpl Watchdog {\n\n /// new empty watchdog with a timeout\n\n pub fn new(deadline: std::time::Duration) -> Self {\n\n Self {\n\n deadline,\n\n pet_chan: tokio::sync::mpsc::channel(1).0,\n\n }\n\n }\n\n\n\n /// arm watchdog with self timeout and execute error callback if\n\n /// the deadline is not met\n\n pub fn arm<T>(&mut self, on_timeout: T)\n\n where\n\n T: std::future::Future + Send + 'static,\n\n T::Output: Send + 'static,\n", "file_path": "services/node/src/server.rs", "rank": 7, "score": 87438.88565620699 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n nats: String,\n\n\n\n /// The period at which the registry updates its cache of all\n\n /// resources from all nodes\n\n #[structopt(long, short, default_value = \"20s\")]\n\n period: humantime::Duration,\n\n}\n\n\n\n/// Needed so we can implement the ServiceSubscriber trait for\n\n/// the message types external to the crate\n", "file_path": "services/pool/src/server.rs", "rank": 8, "score": 86136.11710149105 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n nats: String,\n\n\n\n /// The period at which the registry updates its cache of all\n\n /// resources from all nodes\n\n #[structopt(long, short, default_value = \"20s\")]\n\n period: humantime::Duration,\n\n}\n\n\n\n/// Needed so we can implement the ServiceSubscriber trait for\n\n/// the message types external to the crate\n", "file_path": "services/volume/src/server.rs", "rank": 9, "score": 86136.11710149105 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Rest Server hostname to connect to\n\n /// Default: localhost:8080\n\n #[structopt(long, short, default_value = \"localhost:8080\")]\n\n rest: String,\n\n\n\n /// Polling period\n\n #[structopt(long, short, default_value = \"30s\")]\n\n period: humantime::Duration,\n\n\n\n /// Trace rest requests to the Jaeger endpoint agent\n\n #[structopt(long, short)]\n\n jaeger: Option<String>,\n\n}\n\n\n\n#[derive(CustomResource, Deserialize, Serialize, Clone, Debug)]\n\n#[kube(\n\n group = \"openebs.io\",\n\n version = \"v1alpha1\",\n\n kind = \"MayastorNode\",\n", "file_path": "operators/node/src/main.rs", "rank": 10, "score": 86136.11710149105 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Rest Server address to bind to\n\n /// Default: 0.0.0.0:8080\n\n #[structopt(long, short, default_value = \"0.0.0.0:8080\")]\n\n rest: String,\n\n /// The Nats Server URL or address to connect to\n\n /// Default: nats://0.0.0.0:4222\n\n #[structopt(long, short, default_value = \"nats://0.0.0.0:4222\")]\n\n nats: String,\n\n\n\n /// Trace rest requests to the Jaeger endpoint agent\n\n #[structopt(long, short)]\n\n jaeger: Option<String>,\n\n}\n\n\n\nuse actix_web_opentelemetry::RequestTracing;\n\nuse opentelemetry::{\n\n global,\n\n sdk::{propagation::TraceContextPropagator, trace::Tracer},\n\n};\n\nuse opentelemetry_jaeger::Uninstall;\n\n\n", "file_path": "rest/service/src/main.rs", "rank": 11, "score": 86136.11710149105 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n url: String,\n\n}\n\n\n\n/// Needed so we can implement the ServiceSubscriber trait for\n\n/// the message types external to the crate\n", "file_path": "services/kiiss/src/server.rs", "rank": 12, "score": 86136.11710149105 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n nats: String,\n\n /// Deadline for the mayastor instance keep alive registration\n\n /// Default: 10s\n\n #[structopt(long, short, default_value = \"10s\")]\n\n deadline: humantime::Duration,\n\n}\n\n\n\n/// Needed so we can implement the ServiceSubscriber trait for\n\n/// the message types external to the crate\n", "file_path": "services/node/src/server.rs", "rank": 13, "score": 86136.11710149105 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n url: String,\n\n\n\n /// Act as a Server or a test client\n\n #[structopt(long, short)]\n\n client: bool,\n\n}\n\n\n\n/// Needed so we can implement the ServiceSubscriber trait for\n\n/// the message types external to the crate\n", "file_path": "services/examples/service/main.rs", "rank": 14, "score": 86136.11710149105 }, { "content": "#[derive(Default)]\n\nstruct ConfigState {\n\n state: Mutex<HashMap<SenderId, HashMap<Config, Vec<u8>>>>,\n\n}\n\n\n\nlazy_static! {\n\n static ref CONFIGS: ConfigState = Default::default();\n\n}\n\n\n\n#[async_trait]\n\nimpl ServiceSubscriber for ServiceHandler<ConfigUpdate> {\n\n async fn handler(&self, args: Arguments<'_>) -> Result<(), Error> {\n\n let data: ConfigUpdate = args.request.inner()?;\n\n info!(\"Received: {:?}\", data);\n\n\n\n let msg: ReceivedMessageExt<ConfigUpdate, ()> =\n\n args.request.try_into()?;\n\n let config = msg.inner();\n\n\n\n let mut state = CONFIGS.state.lock().await;\n\n\n", "file_path": "services/kiiss/src/server.rs", "rank": 15, "score": 86130.44267466373 }, { "content": "#[derive(Default, Clone)]\n\nstruct NodeStore {\n\n inner: std::sync::Arc<NodeStoreInner>,\n\n}\n", "file_path": "services/node/src/server.rs", "rank": 16, "score": 86130.39788414698 }, { "content": "#[derive(Debug)]\n\nstruct Io {\n\n /// buffer we read/write from/to\n\n buf: DmaBuf,\n\n /// type of IO we are supposed to issue\n\n iot: IoType,\n\n /// current offset where we are reading or writing\n\n offset: u64,\n\n /// pointer to our the job we belong too\n\n job: NonNull<Job>,\n\n}\n\n\n\nimpl Io {\n\n /// start submitting\n\n fn run(&mut self, job: *mut Job) {\n\n self.job = NonNull::new(job).unwrap();\n\n match self.iot {\n\n IoType::READ => self.read(0),\n\n IoType::WRITE => self.write(0),\n\n };\n\n }\n", "file_path": "mayastor/src/core/io_driver.rs", "rank": 17, "score": 86130.37091266457 }, { "content": "#[derive(Debug)]\n\nstruct SubsystemCtx {\n\n rpc: CString,\n\n sender: futures::channel::oneshot::Sender<bool>,\n\n}\n\n\n\nstatic MAYASTOR_DEFAULT_ENV: OnceCell<MayastorEnvironment> = OnceCell::new();\n\nimpl MayastorEnvironment {\n\n pub fn new(args: MayastorCliArgs) -> Self {\n\n Self {\n\n grpc_endpoint: Some(grpc::endpoint(args.grpc_endpoint)),\n\n mbus_endpoint: subsys::mbus_endpoint(args.mbus_endpoint),\n\n node_name: args.node_name.unwrap_or_else(|| \"mayastor-node\".into()),\n\n mayastor_config: args.mayastor_config,\n\n child_status_config: args.child_status_config,\n\n log_component: args.log_components,\n\n mem_size: args.mem_size,\n\n no_pci: args.no_pci,\n\n reactor_mask: args.reactor_mask,\n\n rpc_addr: args.rpc_address,\n\n hugedir: args.hugedir,\n", "file_path": "mayastor/src/core/env.rs", "rank": 18, "score": 86130.37091266457 }, { "content": "struct Factory {}\n\nimpl HttpServiceFactory for Factory {\n\n fn register(self, config: &mut AppService) {\n\n get_nexuses.register(config);\n\n get_nexus.register(config);\n\n get_node_nexuses.register(config);\n\n get_node_nexus.register(config);\n\n put_node_nexus.register(config);\n\n del_node_nexus.register(config);\n\n del_nexus.register(config);\n\n put_node_nexus_share.register(config);\n\n del_node_nexus_share.register(config);\n\n }\n\n}\n\npub(crate) fn factory() -> impl HttpServiceFactory {\n\n Factory {}\n\n}\n\n\n\n#[get(\"/v0/nexuses\")]\n\nasync fn get_nexuses() -> impl Responder {\n", "file_path": "rest/service/src/v0/nexuses.rs", "rank": 19, "score": 86124.57599463605 }, { "content": "struct Factory {}\n\nimpl HttpServiceFactory for Factory {\n\n fn register(self, config: &mut AppService) {\n\n get_pools.register(config);\n\n get_pool.register(config);\n\n get_node_pools.register(config);\n\n get_node_pool.register(config);\n\n put_node_pool.register(config);\n\n del_node_pool.register(config);\n\n del_pool.register(config);\n\n }\n\n}\n\npub(crate) fn factory() -> impl HttpServiceFactory {\n\n Factory {}\n\n}\n\n\n\n#[get(\"/v0/pools\")]\n\nasync fn get_pools() -> impl Responder {\n\n RestRespond::result(MessageBus::get_pools(Filter::None).await)\n\n}\n", "file_path": "rest/service/src/v0/pools.rs", "rank": 20, "score": 86124.57599463605 }, { "content": "struct Factory {}\n\nimpl HttpServiceFactory for Factory {\n\n fn register(self, config: &mut AppService) {\n\n get_nexus_children.register(config);\n\n get_nexus_child.register(config);\n\n get_node_nexus_children.register(config);\n\n get_node_nexus_child.register(config);\n\n add_nexus_child.register(config);\n\n add_node_nexus_child.register(config);\n\n delete_nexus_child.register(config);\n\n delete_node_nexus_child.register(config);\n\n }\n\n}\n\npub(crate) fn factory() -> impl HttpServiceFactory {\n\n Factory {}\n\n}\n\n\n\n#[get(\"/v0/nexuses/{nexus_id}/children\")]\n\nasync fn get_nexus_children(\n\n web::Path(nexus_id): web::Path<NexusId>,\n", "file_path": "rest/service/src/v0/children.rs", "rank": 21, "score": 86124.57599463605 }, { "content": "struct Factory {}\n\nimpl HttpServiceFactory for Factory {\n\n fn register(self, config: &mut AppService) {\n\n get_node.register(config);\n\n get_nodes.register(config);\n\n }\n\n}\n\npub(crate) fn factory() -> impl HttpServiceFactory {\n\n Factory {}\n\n}\n\n\n\n#[get(\"/v0/nodes\")]\n\nasync fn get_nodes() -> impl Responder {\n\n RestRespond::result(MessageBus::get_nodes().await)\n\n}\n\n#[get(\"/v0/nodes/{id}\")]\n\nasync fn get_node(web::Path(node_id): web::Path<NodeId>) -> impl Responder {\n\n RestRespond::result(MessageBus::get_node(&node_id).await)\n\n}\n", "file_path": "rest/service/src/v0/nodes.rs", "rank": 22, "score": 86124.57599463605 }, { "content": "#[derive(Clone)]\n\nstruct Configuration {\n\n /// Id of the node that mayastor is running on\n\n node: NodeId,\n\n /// gRPC endpoint of the server provided by mayastor\n\n grpc_endpoint: String,\n\n /// heartbeat interval (how often the register message is sent)\n\n hb_interval: Duration,\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Registration {\n\n /// Configuration of the registration\n\n config: Configuration,\n\n /// Receive channel for messages and termination\n\n rcv_chan: smol::channel::Receiver<()>,\n\n /// Termination channel\n\n fini_chan: smol::channel::Sender<()>,\n\n}\n\n\n\nstatic MESSAGE_BUS_REG: OnceCell<Registration> = OnceCell::new();\n", "file_path": "mayastor/src/subsys/mbus/registration.rs", "rank": 23, "score": 86124.57599463605 }, { "content": "struct Factory {}\n\nimpl HttpServiceFactory for Factory {\n\n fn register(self, config: &mut AppService) {\n\n get_replicas.register(config);\n\n get_replica.register(config);\n\n get_replica.register(config);\n\n get_node_replicas.register(config);\n\n get_node_pool_replicas.register(config);\n\n get_node_pool_replica.register(config);\n\n put_node_pool_replica.register(config);\n\n put_pool_replica.register(config);\n\n del_node_pool_replica.register(config);\n\n del_pool_replica.register(config);\n\n put_node_pool_replica_share.register(config);\n\n put_pool_replica_share.register(config);\n\n del_node_pool_replica_share.register(config);\n\n del_pool_replica_share.register(config);\n\n }\n\n}\n\npub(crate) fn factory() -> impl HttpServiceFactory {\n", "file_path": "rest/service/src/v0/replicas.rs", "rank": 24, "score": 86124.57599463605 }, { "content": "struct Factory {}\n\nimpl HttpServiceFactory for Factory {\n\n fn register(self, config: &mut AppService) {\n\n get_volumes.register(config);\n\n get_volume.register(config);\n\n get_node_volumes.register(config);\n\n get_node_volume.register(config);\n\n put_volume.register(config);\n\n del_volume.register(config);\n\n }\n\n}\n\npub(crate) fn factory() -> impl HttpServiceFactory {\n\n Factory {}\n\n}\n\n\n\n#[get(\"/v0/volumes\")]\n\nasync fn get_volumes() -> impl Responder {\n\n RestRespond::result(MessageBus::get_volumes(Filter::None).await)\n\n}\n\n\n", "file_path": "rest/service/src/v0/volumes.rs", "rank": 25, "score": 86124.57599463605 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n url: String,\n\n}\n\n\n", "file_path": "services/examples/node-client/main.rs", "rank": 26, "score": 84888.02243966339 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n url: String,\n\n}\n\n\n", "file_path": "services/examples/kiiss-client/main.rs", "rank": 27, "score": 84888.02243966339 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n url: String,\n\n\n\n /// Channel to listen on\n\n #[structopt(long, short, default_value = \"v0/default\")]\n\n channel: Channel,\n\n\n\n /// Receiver version\n\n #[structopt(long, short, default_value = \"1\")]\n\n version: Version,\n\n}\n\n\n", "file_path": "mbus-api/examples/server/main.rs", "rank": 28, "score": 84888.02243966339 }, { "content": "#[derive(Debug, StructOpt)]\n\nstruct CliArgs {\n\n /// The Nats Server URL to connect to\n\n /// (supports the nats schema)\n\n /// Default: nats://127.0.0.1:4222\n\n #[structopt(long, short, default_value = \"nats://127.0.0.1:4222\")]\n\n url: String,\n\n\n\n /// Channel to send to\n\n #[structopt(long, short, default_value = \"v0/default\")]\n\n channel: Channel,\n\n\n\n /// With server in this binary\n\n #[structopt(long, short)]\n\n server: bool,\n\n}\n\n\n", "file_path": "mbus-api/examples/client/main.rs", "rank": 29, "score": 84888.02243966339 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\nstruct DummyReply {\n\n name: String,\n\n}\n\n\n\n// note: in this example we use the default message id\n\n// because we're adding the message types outside of the\n\n// library which should not be done so we have to fake\n\n// out the message id as `Default`.\n\nbus_impl_message_all!(DummyRequest, Default, DummyReply, Default);\n\n\n\nasync fn start_server_side() {\n\n let cli_args = CliArgs::from_args();\n\n\n\n let mut sub = bus().subscribe(cli_args.channel).await.unwrap();\n\n\n\n tokio::spawn(async move {\n\n // server side\n\n let mut count = 1;\n\n loop {\n\n let message = &sub.next().await.unwrap();\n", "file_path": "mbus-api/examples/client/main.rs", "rank": 30, "score": 84887.79477254848 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\nstruct GetSvcName {}\n\n\n", "file_path": "services/examples/service/main.rs", "rank": 31, "score": 84887.79477254848 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\nstruct DummyRequest {}\n\n\n", "file_path": "mbus-api/examples/client/main.rs", "rank": 32, "score": 84887.79477254848 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\nstruct DummyRequest {}\n\n\n", "file_path": "mbus-api/examples/server/main.rs", "rank": 33, "score": 84887.79477254848 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\nstruct DummyReply {\n\n name: String,\n\n}\n\n\n\n// note: in this example we use the default message id\n\n// because we're adding the message types outside of the\n\n// library which should not be done so we have to fake\n\n// out the message id as `Default`.\n\nbus_impl_message_all!(DummyRequest, Default, DummyReply, Default);\n\n\n\n#[tokio::main]\n\nasync fn main() {\n\n env_logger::init_from_env(\n\n env_logger::Env::default()\n\n .filter_or(env_logger::DEFAULT_FILTER_ENV, \"info\"),\n\n );\n\n let cli_args = CliArgs::from_args();\n\n log::info!(\"Using args: {:?}\", cli_args);\n\n log::info!(\"CH: {}\", Channel::v0(v0::ChannelVs::Default).to_string());\n\n\n", "file_path": "mbus-api/examples/server/main.rs", "rank": 34, "score": 84887.79477254848 }, { "content": "struct NodeStoreInner {\n\n state: Mutex<HashMap<NodeId, (Node, Watchdog)>>,\n\n deadline: std::time::Duration,\n\n}\n\nimpl Default for NodeStoreInner {\n\n fn default() -> Self {\n\n Self {\n\n deadline: CliArgs::from_args().deadline.into(),\n\n state: Default::default(),\n\n }\n\n }\n\n}\n\n\n\nimpl NodeStore {\n\n /// Register a new node through the register information\n\n async fn register(&self, registration: Register) {\n\n let mut state = self.inner.state.lock().await;\n\n\n\n let mut watchdog = Watchdog::new(self.inner.deadline);\n\n let id = registration.id.clone();\n", "file_path": "services/node/src/server.rs", "rank": 35, "score": 84876.48133280838 }, { "content": "#[derive(Clone)]\n\nstruct ShareableContext {\n\n ctx: Rc<RefCell<RangeContext>>,\n\n ch: Rc<RefCell<IoChannel>>,\n\n}\n\n\n\nimpl ShareableContext {\n\n /// Create a new Shareable Context\n\n pub fn new(offset: u64, len: u64) -> ShareableContext {\n\n let nexus = Bdev::open_by_name(NEXUS_NAME, true).unwrap();\n\n Self {\n\n ctx: Rc::new(RefCell::new(RangeContext::new(offset, len))),\n\n ch: Rc::new(RefCell::new(nexus.get_channel().unwrap())),\n\n }\n\n }\n\n\n\n /// Mutably borrow the RangeContext\n\n pub fn borrow_mut_ctx(&self) -> RefMut<RangeContext> {\n\n self.ctx.borrow_mut()\n\n }\n\n\n\n /// Immutably borrow the IoChannel\n\n pub fn borrow_ch(&self) -> Ref<IoChannel> {\n\n self.ch.borrow()\n\n }\n\n}\n\n\n", "file_path": "mayastor/tests/lock_lba_range.rs", "rank": 36, "score": 84876.48133280838 }, { "content": "#[derive(Copy, Clone, Debug, Default, Deserialize, PartialEq, Serialize)]\n\nstruct MbrEntry {\n\n /// attributes of this MBR partition we set these all to zero, which\n\n /// includes the boot flag.\n\n attributes: u8,\n\n /// start in CHS format\n\n chs_start: [u8; 3],\n\n /// type of partition, in our case always 0xEE\n\n ent_type: u8,\n\n /// end of the partition\n\n chs_last: [u8; 3],\n\n /// lba start\n\n lba_start: u32,\n\n /// last sector of this partition\n\n num_sectors: u32,\n\n}\n\n\n\nimpl Pmbr {\n\n /// converts a slice into a MBR and validates the signature\n\n pub fn from_slice(slice: &[u8]) -> Result<Pmbr, LabelError> {\n\n let mut reader = Cursor::new(slice);\n", "file_path": "mayastor/src/bdev/nexus/nexus_label.rs", "rank": 37, "score": 83700.78641256334 }, { "content": "struct NvmeCreateContext {\n\n trid: spdk_nvme_transport_id,\n\n hostid: spdk_nvme_host_id,\n\n names: [*const c_char; MAX_NAMESPACES],\n\n prchk_flags: u32,\n\n count: u32,\n\n}\n\n\n\nunsafe impl Send for NvmeCreateContext {}\n\n\n\nimpl NvmeCreateContext {\n\n pub fn new(nvmf: &Nvmf) -> NvmeCreateContext {\n\n let port = format!(\"{}\", nvmf.port);\n\n let protocol = \"TCP\";\n\n\n\n let mut trid = spdk_nvme_transport_id::default();\n\n\n\n unsafe {\n\n copy_nonoverlapping(\n\n protocol.as_ptr() as *const c_void,\n", "file_path": "mayastor/src/bdev/dev/nvmf.rs", "rank": 38, "score": 83689.72074781852 }, { "content": "#[derive(Clone)]\n\nstruct NatsMessageBus {\n\n timeout_options: TimeoutOptions,\n\n connection: Connection,\n\n}\n\nimpl NatsMessageBus {\n\n pub async fn connect(server: &str) -> Connection {\n\n info!(\"Connecting to the nats server {}...\", server);\n\n // We retry in a loop until successful. Once connected the nats\n\n // library will handle reconnections for us.\n\n let interval = std::time::Duration::from_millis(500);\n\n let mut log_error = true;\n\n loop {\n\n match BusOptions::new()\n\n .max_reconnects(None)\n\n .connect_async(server)\n\n .await\n\n {\n\n Ok(connection) => {\n\n info!(\n\n \"Successfully connected to the nats server {}\",\n", "file_path": "mbus-api/src/mbus_nats.rs", "rank": 39, "score": 83689.72074781852 }, { "content": "struct NvmeCreateContext {\n\n trid: spdk_nvme_transport_id,\n\n hostid: spdk_nvme_host_id,\n\n names: [*const c_char; MAX_NAMESPACES],\n\n prchk_flags: u32,\n\n count: u32,\n\n}\n\n\n\nunsafe impl Send for NvmeCreateContext {}\n\n\n\nimpl NvmeCreateContext {\n\n pub fn new(nvme: &NVMe) -> NvmeCreateContext {\n\n let mut trid = spdk_nvme_transport_id::default();\n\n unsafe {\n\n copy_nonoverlapping(\n\n nvme.name.as_ptr() as *const c_void,\n\n &mut trid.traddr[0] as *const _ as *mut c_void,\n\n nvme.name.len(),\n\n );\n\n }\n", "file_path": "mayastor/src/bdev/dev/nvme.rs", "rank": 40, "score": 83689.72074781852 }, { "content": "struct LabelData {\n\n offset: u64,\n\n buf: DmaBuf,\n\n}\n\n\n\nimpl Nexus {\n\n /// Partition Type GUID for our \"MayaMeta\" partition.\n\n pub const METADATA_PARTITION_TYPE_ID: &'static str =\n\n \"27663382-e5e6-11e9-81b4-ca5ca5ca5ca5\";\n\n\n\n /// Generate a new nexus label based on the nexus configuration.\n\n /// The meta partition is fixed in size and aligned to a 1MB boundary.\n\n pub(crate) fn generate_label(&mut self) -> NexusLabel {\n\n let block_size: u32 = self.bdev.block_len();\n\n let num_blocks: u64 = self.min_num_blocks();\n\n\n\n //\n\n // (Protective) MBR\n\n let mut pmbr = Pmbr::default();\n\n\n", "file_path": "mayastor/src/bdev/nexus/nexus_label.rs", "rank": 41, "score": 83689.72074781852 }, { "content": "// Custom struct used to format a callsite location (filename and line number)\n\nstruct Location<'a> {\n\n meta: &'a Metadata<'a>,\n\n}\n\n\n\nimpl<'a> Location<'a> {\n\n fn new(meta: &'a Metadata<'a>) -> Self {\n\n Self {\n\n meta,\n\n }\n\n }\n\n}\n\n\n\n// Display callsite location (filename and line number) from metadata\n\nimpl std::fmt::Display for Location<'_> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n if let Some(file) = self.meta.file() {\n\n if let Some(line) = self.meta.line() {\n\n write!(f, \":{}:{}\", basename(file), line)?;\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "mayastor/src/logger.rs", "rank": 42, "score": 83523.66693596863 }, { "content": "#[derive(Debug)]\n\nstruct Filter<'a> {\n\n key: &'a str,\n\n value: &'a str,\n\n}\n\n\n", "file_path": "csi/src/findmnt.rs", "rank": 43, "score": 83518.36846929001 }, { "content": "// Custom struct used to format the log/trace LEVEL\n\nstruct FormatLevel<'a> {\n\n level: &'a tracing::Level,\n\n ansi: bool,\n\n}\n\n\n\nimpl<'a> FormatLevel<'a> {\n\n fn new(level: &'a tracing::Level, ansi: bool) -> Self {\n\n Self {\n\n level,\n\n ansi,\n\n }\n\n }\n\n}\n\n\n\n// Display trace LEVEL.\n\nimpl std::fmt::Display for FormatLevel<'_> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n const TRACE: &str = \"TRACE\";\n\n const DEBUG: &str = \"DEBUG\";\n\n const INFO: &str = \" INFO\";\n", "file_path": "mayastor/src/logger.rs", "rank": 44, "score": 82137.89371591713 }, { "content": "struct GpEntryNameVisitor;\n\n\n\nimpl<'de> Deserialize<'de> for GptName {\n\n fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n deserializer.deserialize_tuple_struct(\"GptName\", 36, GpEntryNameVisitor)\n\n }\n\n}\n\n\n\nimpl Serialize for GptName {\n\n fn serialize<S>(\n\n &self,\n\n serializer: S,\n\n ) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n // we can't use serialize_type_struct here as we want exactly 72 bytes\n", "file_path": "mayastor/src/bdev/nexus/nexus_label.rs", "rank": 45, "score": 81482.96435722352 }, { "content": "#[derive(Serialize, Deserialize, Debug, Default, Clone)]\n\nstruct SvcName(String);\n\n\n\nbus_impl_message_all!(GetSvcName, Default, SvcName, Default);\n\n\n\n#[async_trait]\n\nimpl ServiceSubscriber for ServiceHandler<GetSvcName> {\n\n async fn handler(&self, args: Arguments<'_>) -> Result<(), Error> {\n\n let msg: ReceivedMessage<GetSvcName> = args.request.try_into()?;\n\n\n\n let reply = SvcName(\"example\".into());\n\n\n\n println!(\"Received {:?} and replying {:?}\", msg.inner(), reply);\n\n\n\n msg.reply(reply).await\n\n }\n\n fn filter(&self) -> Vec<MessageId> {\n\n vec![GetSvcName::default().id()]\n\n }\n\n}\n\n\n", "file_path": "services/examples/service/main.rs", "rank": 46, "score": 79575.54507725342 }, { "content": "#[derive(Clone, Default)]\n\nstruct ServiceHandler<T> {\n\n data: PhantomData<T>,\n\n}\n\n\n", "file_path": "services/kiiss/src/server.rs", "rank": 47, "score": 79570.05352702425 }, { "content": "#[derive(Clone, Default)]\n\nstruct ServiceHandler<T> {\n\n data: PhantomData<T>,\n\n}\n\n\n", "file_path": "services/examples/service/main.rs", "rank": 48, "score": 79570.05352702425 }, { "content": "#[derive(Clone, Default)]\n\nstruct ServiceHandler<T> {\n\n data: PhantomData<T>,\n\n}\n\n\n\n/// Watchdog with which must be pet within the deadline, otherwise\n\n/// it triggers the `on_timeout` future\n", "file_path": "services/node/src/server.rs", "rank": 49, "score": 79570.05352702425 }, { "content": "#[derive(Clone, Default)]\n\nstruct ServiceHandler<T> {\n\n data: PhantomData<T>,\n\n}\n\n\n\nmacro_rules! impl_service_handler {\n\n // RequestType is the message bus request type\n\n // ServiceFnName is the name of the service function to route the request\n\n // into\n\n ($RequestType:ident, $ServiceFnName:ident) => {\n\n #[async_trait]\n\n impl ServiceSubscriber for ServiceHandler<$RequestType> {\n\n async fn handler(&self, args: Arguments<'_>) -> Result<(), Error> {\n\n let request: ReceivedMessage<$RequestType> =\n\n args.request.try_into()?;\n\n\n\n let service: &PoolSvc = args.context.get_state()?;\n\n let reply = service\n\n .$ServiceFnName(&request.inner())\n\n .await\n\n .map_err(|error| Error::ServiceError {\n", "file_path": "services/pool/src/server.rs", "rank": 50, "score": 79570.05352702425 }, { "content": "#[derive(Clone, Default)]\n\nstruct ServiceHandler<T> {\n\n data: PhantomData<T>,\n\n}\n\n\n\nmacro_rules! impl_service_handler {\n\n // RequestType is the message bus request type\n\n // ServiceFnName is the name of the service function to route the request\n\n // into\n\n ($RequestType:ident, $ServiceFnName:ident) => {\n\n #[async_trait]\n\n impl ServiceSubscriber for ServiceHandler<$RequestType> {\n\n async fn handler(&self, args: Arguments<'_>) -> Result<(), Error> {\n\n let request: ReceivedMessage<$RequestType> =\n\n args.request.try_into()?;\n\n\n\n let service: &VolumeSvc = args.context.get_state()?;\n\n let reply = service\n\n .$ServiceFnName(&request.inner())\n\n .await\n\n .map_err(|error| Error::ServiceError {\n", "file_path": "services/volume/src/server.rs", "rank": 51, "score": 79570.05352702425 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct SendPayload<T> {\n\n pub(crate) id: MessageId,\n\n pub(crate) sender: SenderId,\n\n pub(crate) data: T,\n\n}\n\n\n\n/// Error type which is returned over the bus\n\n/// for any other operation\n\n#[derive(Serialize, Deserialize, Debug, Snafu, strum_macros::AsRefStr)]\n\n#[allow(missing_docs)]\n\npub enum ReplyError {\n\n #[snafu(display(\"Generic Failure, message={}\", message))]\n\n WithMessage { message: String },\n\n #[snafu(display(\"Failed to deserialize the request: '{}'\", message))]\n\n DeserializeReq { message: String },\n\n #[snafu(display(\"Failed to process the request: '{}'\", message))]\n\n Process { message: String },\n\n}\n\n\n\n/// Payload returned to the sender\n", "file_path": "mbus-api/src/lib.rs", "rank": 52, "score": 79564.23163751332 }, { "content": "// Custom struct used to format trace context (span) information\n\nstruct CustomContext<'a, S, N>\n\nwhere\n\n S: tracing_core::subscriber::Subscriber + for<'s> LookupSpan<'s>,\n\n N: for<'w> FormatFields<'w> + 'static,\n\n{\n\n context: &'a FmtContext<'a, S, N>,\n\n span: Option<&'a tracing_core::span::Id>,\n\n ansi: bool,\n\n}\n\n\n\nimpl<'a, S, N> CustomContext<'a, S, N>\n\nwhere\n\n S: tracing_core::subscriber::Subscriber + for<'s> LookupSpan<'s>,\n\n N: for<'w> FormatFields<'w> + 'static,\n\n{\n\n fn new(\n\n context: &'a FmtContext<'a, S, N>,\n\n span: Option<&'a tracing_core::span::Id>,\n\n ansi: bool,\n\n ) -> Self {\n", "file_path": "mayastor/src/logger.rs", "rank": 53, "score": 72538.0500184803 }, { "content": "/// Type specific Message Bus api used to send a message of type `S` over the\n\n/// message bus with an additional type `R` use for request/reply semantics\n\n/// # Example:\n\n/// ```\n\n/// let msg = RequestToSend::<S, R>::new(payload, channel, bus);\n\n/// msg.request().await.unwrap();\n\n/// ```\n\nstruct SendMessage<'a, S, R> {\n\n payload: SendPayload<&'a S>,\n\n bus: DynBus,\n\n channel: Channel,\n\n reply_type: PhantomData<R>,\n\n}\n\n\n\nimpl<'a, S, R> SendMessage<'a, S, R>\n\nwhere\n\n S: Message + Serialize,\n\n for<'de> R: Deserialize<'de> + 'a,\n\n{\n\n /// each client needs a unique identification\n\n /// should this be a creation argument?\n\n fn name() -> SenderId {\n\n match std::env::var(\"NODE_NAME\") {\n\n Ok(val) => val,\n\n _ => \"default\".into(),\n\n }\n\n }\n", "file_path": "mbus-api/src/send.rs", "rank": 54, "score": 71345.87118709314 }, { "content": "#[derive(Debug)]\n\nstruct UnixStream(tokio::net::UnixStream);\n\n\n\nimpl Connected for UnixStream {}\n\n\n\nimpl AsyncRead for UnixStream {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<std::io::Result<usize>> {\n\n Pin::new(&mut self.0).poll_read(cx, buf)\n\n }\n\n}\n\n\n\nimpl AsyncWrite for UnixStream {\n\n fn poll_write(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<std::io::Result<usize>> {\n", "file_path": "csi/src/server.rs", "rank": 55, "score": 70216.07016873712 }, { "content": "#[derive(Clone, Debug)]\n\nstruct Pg(*mut spdk_nvmf_poll_group);\n\n\n\n#[repr(C)]\n\n#[derive(Clone, Debug)]\n\npub(crate) struct PollGroup {\n\n pub thread: Mthread,\n\n group: Pg,\n\n}\n\n\n\nimpl PollGroup {\n\n pub fn new(tgt: *mut spdk_nvmf_tgt, mt: Mthread) -> Self {\n\n Self {\n\n thread: mt,\n\n group: Pg(unsafe { spdk_nvmf_poll_group_create(tgt) }),\n\n }\n\n }\n\n\n\n pub fn group_ptr(&self) -> *mut spdk_nvmf_poll_group {\n\n self.group.0\n\n }\n\n}\n", "file_path": "mayastor/src/subsys/nvmf/poll_groups.rs", "rank": 56, "score": 70202.28874915288 }, { "content": "struct Usage<'a>(&'a libc::rusage);\n\n\n\nimpl From<Usage<'_>> for ResourceUsage {\n\n fn from(usage: Usage) -> ResourceUsage {\n\n let rusage = usage.0;\n\n ResourceUsage {\n\n soft_faults: rusage.ru_minflt,\n\n hard_faults: rusage.ru_majflt,\n\n swaps: rusage.ru_nswap,\n\n in_block_ops: rusage.ru_inblock,\n\n out_block_ops: rusage.ru_oublock,\n\n ipc_msg_send: rusage.ru_msgsnd,\n\n ipc_msg_rcv: rusage.ru_msgrcv,\n\n signals: rusage.ru_nsignals,\n\n vol_csw: rusage.ru_nvcsw,\n\n invol_csw: rusage.ru_nivcsw,\n\n }\n\n }\n\n}\n\n\n\n/// Obtain resource usage statistics for the current process.\n\npub async fn get_resource_usage() -> Result<ResourceUsage, Error> {\n\n let rusage = getrusage(libc::RUSAGE_SELF)?;\n\n Ok(Usage(&rusage).into())\n\n}\n", "file_path": "mayastor/src/host/resource.rs", "rank": 57, "score": 70076.02868722616 }, { "content": "#[derive(Debug)]\n\nstruct UnixStream(tokio::net::UnixStream);\n\n\n\n#[derive(Debug)]\n\npub struct MayastorSvc;\n\n\n\n#[tonic::async_trait]\n\nimpl mayastor_server::Mayastor for MayastorSvc {\n\n #[instrument(level = \"debug\", err)]\n\n\n\n async fn create_pool(\n\n &self,\n\n request: Request<CreatePoolRequest>,\n\n ) -> GrpcResult<Pool> {\n\n let args = request.into_inner();\n\n\n\n if args.disks.is_empty() {\n\n return Err(Status::invalid_argument(\"Missing devices\"));\n\n }\n\n\n\n sync_config(pool_grpc::create(args)).await\n", "file_path": "mayastor/src/grpc/mayastor_grpc.rs", "rank": 58, "score": 68111.52462808506 }, { "content": "#[async_trait(? Send)]\n\npub trait Share: std::fmt::Debug {\n\n type Error;\n\n type Output: std::fmt::Display + std::fmt::Debug;\n\n async fn share_iscsi(&self) -> Result<Self::Output, Self::Error>;\n\n async fn share_nvmf(&self) -> Result<Self::Output, Self::Error>;\n\n async fn unshare(&self) -> Result<Self::Output, Self::Error>;\n\n fn shared(&self) -> Option<Protocol>;\n\n fn share_uri(&self) -> Option<String>;\n\n fn bdev_uri(&self) -> Option<String>;\n\n}\n", "file_path": "mayastor/src/core/share.rs", "rank": 59, "score": 67972.9911110317 }, { "content": "// Struct representing a property value in a udev::Device struct (and possibly\n\n// elsewhere). It is used to provide conversions via various \"From\" trait\n\n// implementations below.\n\nstruct Property<'a>(Option<&'a OsStr>);\n\n\n\nimpl From<Property<'_>> for String {\n\n fn from(property: Property) -> Self {\n\n String::from(property.0.map(|s| s.to_str()).flatten().unwrap_or(\"\"))\n\n }\n\n}\n\n\n\nimpl From<Property<'_>> for Option<String> {\n\n fn from(property: Property) -> Self {\n\n property.0.map(|s| s.to_str()).flatten().map(String::from)\n\n }\n\n}\n\n\n\nimpl From<Property<'_>> for Option<u32> {\n\n fn from(property: Property) -> Self {\n\n Option::<String>::from(property)\n\n .map(|s| s.parse().ok())\n\n .flatten()\n\n }\n", "file_path": "mayastor/src/host/blk_device.rs", "rank": 60, "score": 67770.98738196364 }, { "content": "/// structure holding our function and context\n\nstruct PollCtx<'a>(Box<dyn FnMut() -> i32 + 'a>);\n\n\n\n/// indirection to avoid raw pointers at upper layers\n\n#[inline(always)]\n\nextern \"C\" fn _cb(ctx: *mut c_void) -> i32 {\n\n let poll = unsafe { &mut *(ctx as *mut PollCtx) };\n\n (poll.0)()\n\n}\n\n\n\n/// Poller structure that allows us to pause, stop, resume periodic tasks\n\npub struct Poller<'a> {\n\n inner: NonNull<spdk_poller>,\n\n ctx: NonNull<PollCtx<'a>>,\n\n stopped: bool,\n\n}\n\n\n\nimpl<'a> Poller<'a> {\n\n /// stop the given poller and consumes self\n\n pub fn stop(mut self) {\n\n unsafe {\n", "file_path": "mayastor/src/core/poller.rs", "rank": 61, "score": 61990.42073546187 }, { "content": "class Registry extends EventEmitter {\n\n constructor () {\n\n super();\n\n this.nodes = {}; // node objects indexed by name\n\n // This gives a chance to override Node class used for creating new\n\n // node objects, which is useful for testing of the registry.\n\n this.Node = Node;\n\n }\n\n\n\n // Disconnect all nodes.\n\n close () {\n\n const self = this;\n\n Object.keys(this.nodes).forEach((name) => {\n\n self.removeNode(name);\n\n });\n\n }\n\n\n\n // Add mayastor node to the list of nodes and subscribe to events\n\n // emitted by the node to relay them further. It can be called also for\n\n // existing nodes to update their grpc endpoint.\n\n //\n\n // @param {string} name Name of the node.\n\n // @param {string} endpoint Endpoint for gRPC communication.\n\n addNode (name, endpoint) {\n\n let node = this.nodes[name];\n\n if (node) {\n\n // if grpc endpoint has not changed, then this will not do anything\n\n if (node.endpoint !== endpoint) {\n\n node.connect(endpoint);\n\n this.emit('node', {\n\n eventType: 'mod',\n\n object: node\n\n });\n\n }\n\n } else {\n\n node = new this.Node(name);\n\n node.connect(endpoint);\n\n this.emit('node', {\n\n eventType: 'new',\n\n object: node\n\n });\n\n this._registerNode(node);\n\n }\n\n }\n\n\n\n // Register node object in registry and listen to events on it.\n\n //\n\n // NOTE: This would be normally done in addNode() but for testing it's easier\n\n // to have a separate methods because in the tests we like to create our own\n\n // nodes.\n\n //\n\n // @param {object} node Node object to register.\n\n _registerNode (node) {\n\n assert(!this.nodes[node.name]);\n\n this.nodes[node.name] = node;\n\n\n\n log.info(\n\n `mayastor on node \"${node.name}\" and endpoint \"${node.endpoint}\" just joined`\n\n );\n\n\n\n eventObjects.forEach((objType) => {\n\n node.on(objType, (ev) => this.emit(objType, ev));\n\n });\n\n }\n\n\n\n // Remove mayastor node from the list of nodes and unsubscribe events.\n\n //\n\n // @param {string} name Name of the node to remove.\n\n removeNode (name) {\n\n const node = this.nodes[name];\n\n if (!node) return;\n\n delete this.nodes[name];\n\n node.disconnect();\n\n node.unbind();\n\n\n\n log.info(`mayastor on node \"${name}\" left`);\n\n this.emit('node', {\n\n eventType: 'del',\n\n object: node\n\n });\n\n\n\n eventObjects.forEach((objType) => {\n\n node.removeAllListeners(objType);\n\n });\n\n }\n\n\n\n // Get specified mayastor node or list of all mayastor nodes if called\n\n // without argument.\n\n //\n\n // @param {string} name Name of the node to return.\n\n // @returns {(object|Array)} Node object or null if not found or list of all objects.\n\n getNode (name) {\n\n if (name) {\n\n return this.nodes[name] || null;\n\n } else {\n\n return Object.values(this.nodes);\n\n }\n\n }\n\n\n\n // Get specified storage pool or list of all storage pools if called\n\n // without argument.\n\n //\n\n // @param {string} [name] Name of the storage pool.\n\n // @returns {(object|object[])} Pool object (null if not found) or list of all objects.\n\n getPool (name) {\n\n const pools = Object.values(this.nodes).reduce(\n\n (acc, node) => acc.concat(node.pools),\n\n []\n\n );\n\n if (name) {\n\n return pools.find((p) => p.name === name) || null;\n\n } else {\n\n return pools;\n\n }\n\n }\n\n\n\n // Get specified nexus object or list of nexus objects if called without\n\n // argument.\n\n //\n\n // @param {string} [uuid] ID of the nexus.\n\n // @returns {(object|object[])} Nexus object (null if not found) or list of all objects.\n\n getNexus (uuid) {\n\n const nexus = Object.values(this.nodes).reduce(\n\n (acc, node) => acc.concat(node.nexus),\n\n []\n\n );\n\n if (uuid) {\n\n return nexus.find((n) => n.uuid === uuid) || null;\n\n } else {\n\n return nexus;\n\n }\n\n }\n\n\n\n // Get replica objects with specified uuid or all replicas if called without\n\n // argument.\n\n //\n\n // @param {string} [uuid] Replica ID.\n\n // @returns {object[]} Array of matching replicas.\n\n getReplicaSet (uuid) {\n\n const replicas = Object.values(this.nodes).reduce(\n\n (acc, node) => acc.concat(node.getReplicas()),\n\n []\n\n );\n\n if (uuid) {\n\n return replicas.filter((r) => r.uuid === uuid);\n\n } else {\n\n return replicas;\n\n }\n\n }\n\n\n\n // Return total capacity of all pools summed together or capacity of pools on\n\n // a single node if node name is specified.\n\n //\n\n // @param {string} [nodeName] Name of the node to get the capacity for.\n\n // @returns {number} Total capacity in bytes.\n\n //\n\n getCapacity (nodeName) {\n\n let pools;\n\n\n\n if (nodeName) {\n\n pools = this.getPool().filter((p) => p.node.name === nodeName);\n\n } else {\n\n pools = this.getPool();\n\n }\n\n return pools\n\n .filter((p) => p.isAccessible())\n\n .reduce((acc, p) => acc + (p.capacity - p.used), 0);\n\n }\n\n\n\n // Return ordered list of storage pools suitable for new volume creation\n\n // sorted by preference (only a single pool from each node).\n\n //\n\n // The rules are simple:\n\n // 1) must be online (or degraded if there are no online pools)\n\n // 2) must have sufficient space\n\n // 3) the least busy pools first\n\n //\n\n choosePools (requiredBytes, mustNodes, shouldNodes) {\n\n let pools = this.getPool().filter((p) => {\n\n return (\n\n p.isAccessible() &&\n\n p.capacity - p.used >= requiredBytes &&\n\n (mustNodes.length === 0 || mustNodes.indexOf(p.node.name) >= 0)\n\n );\n\n });\n\n\n\n pools.sort((a, b) => {\n\n // Rule #1: User preference\n\n if (shouldNodes.length > 0) {\n\n if (\n\n shouldNodes.indexOf(a.node.name) >= 0 &&\n\n shouldNodes.indexOf(b.node.name) < 0\n\n ) {\n\n return -1;\n\n } else if (\n\n shouldNodes.indexOf(a.node.name) < 0 &&\n\n shouldNodes.indexOf(b.node.name) >= 0\n\n ) {\n\n return 1;\n\n }\n\n }\n\n\n\n // Rule #2: Avoid degraded pools whenever possible\n\n if (a.state === 'POOL_ONLINE' && b.state !== 'POOL_ONLINE') {\n\n return -1;\n\n } else if (a.state !== 'POOL_ONLINE' && b.state === 'POOL_ONLINE') {\n\n return 1;\n\n }\n\n\n\n // Rule #3: Use the least busy pool (with fewer replicas)\n\n if (a.replicas.length < b.replicas.length) {\n\n return -1;\n\n } else if (a.replicas.length > b.replicas.length) {\n\n return 1;\n\n }\n\n\n\n // Rule #4: Pools with more free space take precedence\n\n const aFree = a.capacity - a.used;\n\n const bFree = b.capacity - b.used;\n\n return bFree - aFree;\n\n });\n\n\n\n // only one pool from each node\n\n const nodes = [];\n\n pools = pools.filter((p) => {\n\n if (nodes.indexOf(p.node) < 0) {\n\n nodes.push(p.node);\n\n return true;\n\n } else {\n\n return false;\n\n }\n\n });\n\n\n\n return pools;\n\n }\n", "file_path": "csi/moac/registry.js", "rank": 62, "score": 60567.02342790021 }, { "content": "/// try to read an env variable or returns the default when not found\n\nfn try_from_env<T>(name: &str, default: T) -> T\n\nwhere\n\n T: FromStr + Display + Copy,\n\n <T as FromStr>::Err: Debug + Display,\n\n{\n\n std::env::var(name).map_or_else(\n\n |_| default,\n\n |v| {\n\n match v.parse::<T>() {\n\n Ok(val) => {\n\n info!(\"Overriding {} value to '{}'\", name, val);\n\n val\n\n },\n\n Err(e) => {\n\n error!(\"Invalid value: {} (error {}) specified for {}. Reverting to default ({})\", v, e, name, default);\n\n default\n\n }\n\n }\n\n },\n\n )\n", "file_path": "mayastor/src/subsys/config/opts.rs", "rank": 63, "score": 60267.17815899851 }, { "content": "#[async_trait]\n\n#[clonable]\n\npub trait NodeReplicaTrait: Send + Sync + Debug + Clone {\n\n /// Fetch replicas on all pools via gRPC or MBUS\n\n async fn fetch_replicas(&self) -> Result<Vec<Replica>, SvcError>;\n\n\n\n /// Create a replica on a pool via gRPC or MBUS\n\n async fn create_replica(\n\n &self,\n\n request: &CreateReplica,\n\n ) -> Result<Replica, SvcError>;\n\n\n\n /// Share a replica on a pool via gRPC or MBUS\n\n async fn share_replica(\n\n &self,\n\n request: &ShareReplica,\n\n ) -> Result<String, SvcError>;\n\n\n\n /// Unshare a replica on a pool via gRPC or MBUS\n\n async fn unshare_replica(\n\n &self,\n\n request: &UnshareReplica,\n", "file_path": "services/common/src/wrapper/v0/node_traits.rs", "rank": 64, "score": 60165.52705324485 }, { "content": "#[async_trait]\n\n#[clonable]\n\n#[allow(unused_variables)]\n\npub trait NodeNexusTrait: Send + Sync + Debug + Clone {\n\n /// Get the internal nexuses\n\n fn nexuses(&self) -> Vec<Nexus> {\n\n vec![]\n\n }\n\n\n\n /// Fetch all nexuses via gRPC or MBUS\n\n async fn fetch_nexuses(&self) -> Result<Vec<Nexus>, SvcError> {\n\n Err(SvcError::NotImplemented {})\n\n }\n\n\n\n /// Create a nexus on a node via gRPC or MBUS\n\n async fn create_nexus(\n\n &self,\n\n request: &CreateNexus,\n\n ) -> Result<Nexus, SvcError> {\n\n Err(SvcError::NotImplemented {})\n\n }\n\n\n\n /// Destroy a nexus on a node via gRPC or MBUS\n", "file_path": "services/common/src/wrapper/v0/node_traits.rs", "rank": 65, "score": 60165.52705324485 }, { "content": "#[async_trait]\n\n#[clonable]\n\npub trait NodePoolTrait: Send + Sync + Debug + Clone {\n\n /// Fetch all pools via gRPC or MBUS\n\n async fn fetch_pools(&self) -> Result<Vec<Pool>, SvcError>;\n\n\n\n /// Create a pool on a node via gRPC or MBUS\n\n async fn create_pool(&self, request: &CreatePool)\n\n -> Result<Pool, SvcError>;\n\n\n\n /// Destroy a pool on a node via gRPC or MBUS\n\n async fn destroy_pool(&self, request: &DestroyPool)\n\n -> Result<(), SvcError>;\n\n\n\n /// Update internal pool list following a create\n\n async fn on_create_pool(&mut self, pool: &Pool, replicas: &[Replica]);\n\n /// Update internal pool list following a destroy\n\n fn on_destroy_pool(&mut self, pool: &PoolId);\n\n}\n\n\n\n/// Trait for a Node Nexus which can be implemented to interact with mayastor\n\n/// node nexuses either via gRPC or MBUS or with a service via MBUS\n", "file_path": "services/common/src/wrapper/v0/node_traits.rs", "rank": 66, "score": 60165.52705324485 }, { "content": " constructor () {\n\n super();\n\n this.nodes = {}; // node objects indexed by name\n\n // This gives a chance to override Node class used for creating new\n\n // node objects, which is useful for testing of the registry.\n\n this.Node = Node;\n", "file_path": "csi/moac/registry.js", "rank": 67, "score": 59625.25891936448 }, { "content": " close () {\n\n const self = this;\n\n Object.keys(this.nodes).forEach((name) => {\n\n self.removeNode(name);\n\n });\n", "file_path": "csi/moac/registry.js", "rank": 68, "score": 59625.25891936448 }, { "content": "#[async_trait]\n\n#[clonable]\n\n#[allow(unused_variables)]\n\npub trait NodeNexusChildTrait: Send + Sync + Debug + Clone {\n\n /// Fetch all children via gRPC or MBUS\n\n async fn fetch_children(&self) -> Result<Vec<Child>, SvcError> {\n\n Err(SvcError::NotImplemented {})\n\n }\n\n\n\n /// Add a child to a nexus via gRPC or MBUS\n\n async fn add_child(\n\n &self,\n\n request: &AddNexusChild,\n\n ) -> Result<Child, SvcError> {\n\n Err(SvcError::NotImplemented {})\n\n }\n\n\n\n /// Remove a child from a nexus via gRPC or MBUS\n\n async fn remove_child(\n\n &self,\n\n request: &RemoveNexusChild,\n\n ) -> Result<(), SvcError> {\n\n Err(SvcError::NotImplemented {})\n\n }\n\n\n\n /// Update internal nexus children following a create\n\n fn on_add_child(&mut self, nexus: &NexusId, child: &Child) {}\n\n /// Update internal nexus children following a remove\n\n fn on_remove_child(&mut self, request: &RemoveNexusChild) {}\n\n}\n\n\n\n/// Trait for a Node which can be implemented to interact with mayastor\n\n/// node replicas either via gRPC or MBUS or with a service via MBUS\n", "file_path": "services/common/src/wrapper/v0/node_traits.rs", "rank": 69, "score": 59338.417500877666 }, { "content": " addNode (name, endpoint) {\n\n let node = this.nodes[name];\n\n if (node) {\n\n // if grpc endpoint has not changed, then this will not do anything\n\n if (node.endpoint !== endpoint) {\n\n node.connect(endpoint);\n\n this.emit('node', {\n\n eventType: 'mod',\n\n object: node\n\n });\n\n }\n\n } else {\n\n node = new this.Node(name);\n\n node.connect(endpoint);\n\n this.emit('node', {\n\n eventType: 'new',\n\n object: node\n\n });\n\n this._registerNode(node);\n\n }\n", "file_path": "csi/moac/registry.js", "rank": 70, "score": 58712.33322945265 }, { "content": " getNode (name) {\n\n if (name) {\n\n return this.nodes[name] || null;\n\n } else {\n\n return Object.values(this.nodes);\n\n }\n", "file_path": "csi/moac/registry.js", "rank": 71, "score": 58712.33322945265 }, { "content": " removeNode (name) {\n\n const node = this.nodes[name];\n\n if (!node) return;\n\n delete this.nodes[name];\n\n node.disconnect();\n\n node.unbind();\n\n\n\n log.info(`mayastor on node \"${name}\" left`);\n\n this.emit('node', {\n\n eventType: 'del',\n\n object: node\n\n });\n\n\n\n eventObjects.forEach((objType) => {\n\n node.removeAllListeners(objType);\n\n });\n", "file_path": "csi/moac/registry.js", "rank": 72, "score": 58712.33322945265 }, { "content": " getCapacity (nodeName) {\n\n let pools;\n\n\n\n if (nodeName) {\n\n pools = this.getPool().filter((p) => p.node.name === nodeName);\n\n } else {\n\n pools = this.getPool();\n\n }\n\n return pools\n\n .filter((p) => p.isAccessible())\n\n .reduce((acc, p) => acc + (p.capacity - p.used), 0);\n", "file_path": "csi/moac/registry.js", "rank": 73, "score": 58712.33322945265 }, { "content": " choosePools (requiredBytes, mustNodes, shouldNodes) {\n\n let pools = this.getPool().filter((p) => {\n\n return (\n\n p.isAccessible() &&\n\n p.capacity - p.used >= requiredBytes &&\n\n (mustNodes.length === 0 || mustNodes.indexOf(p.node.name) >= 0)\n\n );\n\n });\n\n\n\n pools.sort((a, b) => {\n\n // Rule #1: User preference\n\n if (shouldNodes.length > 0) {\n\n if (\n\n shouldNodes.indexOf(a.node.name) >= 0 &&\n\n shouldNodes.indexOf(b.node.name) < 0\n\n ) {\n\n return -1;\n\n } else if (\n\n shouldNodes.indexOf(a.node.name) < 0 &&\n\n shouldNodes.indexOf(b.node.name) >= 0\n\n ) {\n\n return 1;\n\n }\n\n }\n\n\n\n // Rule #2: Avoid degraded pools whenever possible\n\n if (a.state === 'POOL_ONLINE' && b.state !== 'POOL_ONLINE') {\n\n return -1;\n\n } else if (a.state !== 'POOL_ONLINE' && b.state === 'POOL_ONLINE') {\n\n return 1;\n\n }\n\n\n\n // Rule #3: Use the least busy pool (with fewer replicas)\n\n if (a.replicas.length < b.replicas.length) {\n\n return -1;\n\n } else if (a.replicas.length > b.replicas.length) {\n\n return 1;\n\n }\n\n\n\n // Rule #4: Pools with more free space take precedence\n\n const aFree = a.capacity - a.used;\n\n const bFree = b.capacity - b.used;\n\n return bFree - aFree;\n\n });\n\n\n\n // only one pool from each node\n\n const nodes = [];\n\n pools = pools.filter((p) => {\n\n if (nodes.indexOf(p.node) < 0) {\n\n nodes.push(p.node);\n\n return true;\n\n } else {\n\n return false;\n\n }\n\n });\n\n\n\n return pools;\n", "file_path": "csi/moac/registry.js", "rank": 74, "score": 58712.33322945265 }, { "content": " getNexus (uuid) {\n\n const nexus = Object.values(this.nodes).reduce(\n\n (acc, node) => acc.concat(node.nexus),\n\n []\n\n );\n\n if (uuid) {\n\n return nexus.find((n) => n.uuid === uuid) || null;\n\n } else {\n\n return nexus;\n\n }\n", "file_path": "csi/moac/registry.js", "rank": 75, "score": 58712.33322945265 }, { "content": "const Registry = require('../registry');\n", "file_path": "csi/moac/test/registry_test.js", "rank": 76, "score": 58712.33322945265 }, { "content": " _registerNode (node) {\n\n assert(!this.nodes[node.name]);\n\n this.nodes[node.name] = node;\n\n\n\n log.info(\n\n `mayastor on node \"${node.name}\" and endpoint \"${node.endpoint}\" just joined`\n\n );\n\n\n\n eventObjects.forEach((objType) => {\n\n node.on(objType, (ev) => this.emit(objType, ev));\n\n });\n", "file_path": "csi/moac/registry.js", "rank": 77, "score": 58712.33322945265 }, { "content": " getPool (name) {\n\n const pools = Object.values(this.nodes).reduce(\n\n (acc, node) => acc.concat(node.pools),\n\n []\n\n );\n\n if (name) {\n\n return pools.find((p) => p.name === name) || null;\n\n } else {\n\n return pools;\n\n }\n", "file_path": "csi/moac/registry.js", "rank": 78, "score": 58712.33322945265 }, { "content": "pub trait BdevCreateDestroy: CreateDestroy + GetName + std::fmt::Debug {}\n\n\n\nimpl<T: CreateDestroy + GetName + std::fmt::Debug> BdevCreateDestroy for T {}\n\n\n\n#[async_trait(?Send)]\n", "file_path": "mayastor/src/bdev/mod.rs", "rank": 79, "score": 58492.331095938905 }, { "content": " getReplicaSet (uuid) {\n\n const replicas = Object.values(this.nodes).reduce(\n\n (acc, node) => acc.concat(node.getReplicas()),\n\n []\n\n );\n\n if (uuid) {\n\n return replicas.filter((r) => r.uuid === uuid);\n\n } else {\n\n return replicas;\n\n }\n", "file_path": "csi/moac/registry.js", "rank": 80, "score": 57826.941675910435 }, { "content": "\n\nuse super::rebuild_api::*;\n\n\n\n/// Global list of rebuild jobs using a static OnceCell\n\npub(super) struct RebuildInstances {\n\n inner: UnsafeCell<HashMap<String, Box<RebuildJob>>>,\n\n}\n\n\n\nunsafe impl Sync for RebuildInstances {}\n\nunsafe impl Send for RebuildInstances {}\n\n\n\n/// Result returned by each segment task worker\n\n/// used to communicate with the management task indicating that the\n\n/// segment task worker is ready to copy another segment\n\n#[derive(Debug, Clone)]\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 81, "score": 50917.73763053884 }, { "content": " error: job.locked_copy_one(id, blk).await.err(),\n\n };\n\n\n\n let task = &mut job.task_pool.tasks[id];\n\n if let Err(e) = task.sender.start_send(r) {\n\n error!(\"Failed to notify job of segment id: {} blk: {} completion, err: {}\", id, blk, e.verbose());\n\n }\n\n });\n\n\n\n Some(next)\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub(super) struct RebuildStates {\n\n /// Current state of the rebuild job\n\n pub current: RebuildState,\n\n\n\n /// Pending state for the rebuild job\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 82, "score": 50909.428110710935 }, { "content": " pending: Option<RebuildState>,\n\n}\n\n\n\nimpl std::fmt::Display for RebuildStates {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n\n\n\nimpl Default for RebuildState {\n\n fn default() -> Self {\n\n RebuildState::Init\n\n }\n\n}\n\n\n\nimpl RebuildStates {\n\n /// Set's the next pending state\n\n /// if one is already set then override only if flag is set\n\n pub(self) fn set_pending(\n\n &mut self,\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 83, "score": 50907.328001294496 }, { "content": "#![warn(missing_docs)]\n\n#![allow(clippy::unknown_clippy_lints)]\n\n\n\nuse std::{cell::UnsafeCell, collections::HashMap};\n\n\n\nuse crossbeam::channel::unbounded;\n\nuse futures::{\n\n channel::{mpsc, oneshot},\n\n StreamExt,\n\n};\n\nuse once_cell::sync::OnceCell;\n\nuse snafu::ResultExt;\n\n\n\nuse spdk_sys::{spdk_get_thread, SPDK_BDEV_LARGE_BUF_MAX_SIZE};\n\n\n\nuse crate::{\n\n bdev::VerboseError,\n\n core::{Bdev, BdevHandle, DmaBuf, RangeContext, Reactors},\n\n nexus_uri::bdev_get_name,\n\n};\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 84, "score": 50903.49343903282 }, { "content": " })\n\n }\n\n\n\n async fn await_all_tasks(&mut self) {\n\n debug!(\n\n \"Awaiting all active tasks({}) for rebuild {}\",\n\n self.task_pool.active, self.destination\n\n );\n\n while self.task_pool.active > 0 {\n\n if self.await_one_task().await.is_none() {\n\n error!(\"Failed to wait for {} rebuild tasks due mpsc channel failure.\", self.task_pool.active);\n\n self.fail();\n\n return;\n\n }\n\n }\n\n debug!(\n\n \"Finished awaiting all tasks for rebuild {}\",\n\n self.destination\n\n );\n\n }\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 85, "score": 50902.46046325827 }, { "content": " })?,\n\n read_write,\n\n claim,\n\n )\n\n .context(NoBdevHandle {\n\n bdev: uri,\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 86, "score": 50902.26511832927 }, { "content": " );\n\n }\n\n };\n\n\n\n if job.reconcile_to_state(RebuildState::Running) {\n\n job.run().await;\n\n }\n\n });\n\n }\n\n _ => {}\n\n }\n\n }\n\n\n\n /// Get the rebuild job instances container, we ensure that this can only\n\n /// ever be called on a properly allocated thread\n\n pub(super) fn get_instances() -> &'static mut HashMap<String, Box<Self>> {\n\n let thread = unsafe { spdk_get_thread() };\n\n if thread.is_null() {\n\n panic!(\"not called from SPDK thread\")\n\n }\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 87, "score": 50901.30109765168 }, { "content": " /// a change to `state` is pending\n\n fn pending_equals(&self, state: RebuildState) -> bool {\n\n self.pending == Some(state)\n\n }\n\n\n\n /// reconcile the pending state into the current state\n\n fn reconcile(&mut self) -> RebuildState {\n\n if let Some(pending) = self.pending {\n\n self.current = pending;\n\n self.pending = None;\n\n }\n\n\n\n self.current\n\n }\n\n}\n\n\n\nimpl RebuildJob {\n\n /// Client operations are now allowed to skip over previous operations\n\n fn exec_client_op(\n\n &mut self,\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 88, "score": 50901.26669884482 }, { "content": " if rebuild_list.contains_key(&self.destination) {\n\n Err(RebuildError::JobAlreadyExists {\n\n job: self.destination,\n\n })\n\n } else {\n\n let _ =\n\n rebuild_list.insert(self.destination.clone(), Box::new(self));\n\n Ok(())\n\n }\n\n }\n\n\n\n /// Returns a new rebuild job based on the parameters\n\n #[allow(clippy::same_item_push)]\n\n pub(super) fn new(\n\n nexus: &str,\n\n source: &str,\n\n destination: &str,\n\n range: std::ops::Range<u64>,\n\n notify_fn: fn(String, String) -> (),\n\n ) -> Result<Self, RebuildError> {\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 89, "score": 50901.052733064105 }, { "content": " states: Default::default(),\n\n complete_chan: Vec::new(),\n\n error: None,\n\n })\n\n }\n\n\n\n // Runs the management async task that kicks off N rebuild copy tasks and\n\n // awaits each completion. When any task completes it kicks off another\n\n // until the bdev is fully rebuilt\n\n async fn run(&mut self) {\n\n self.start_all_tasks();\n\n while self.task_pool.active > 0 {\n\n match self.await_one_task().await {\n\n Some(r) => match r.error {\n\n None => {\n\n match self.states.pending {\n\n None | Some(RebuildState::Running) => {\n\n self.start_task_by_id(r.id);\n\n }\n\n _ => {\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 90, "score": 50900.99649053317 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{:?}\", self)\n\n }\n\n}\n\n\n\nimpl ClientOperations for RebuildJob {\n\n fn stats(&self) -> RebuildStats {\n\n let blocks_total = self.range.end - self.range.start;\n\n\n\n // segment size may not be aligned to the total size\n\n let blocks_recovered = std::cmp::min(\n\n self.task_pool.segments_done * self.segment_size_blks,\n\n blocks_total,\n\n );\n\n\n\n let progress = (blocks_recovered * 100) / blocks_total;\n\n\n\n info!(\n\n \"State: {}, Src: {}, Dst: {}, range: {:?}, next: {}, \\\n\n block_size: {}, segment_sz: {}, recovered_blks: {}, progress: {}%\",\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 91, "score": 50900.81585527098 }, { "content": " ///\n\n /// The lock and unlock functions internally reference the RangeContext as a\n\n /// raw pointer, so rust cannot correctly manage its lifetime. The\n\n /// RangeContext MUST NOT be dropped until after the lock and unlock have\n\n /// completed.\n\n ///\n\n /// The use of RangeContext here is safe because it is stored on the stack\n\n /// for the duration of the calls to lock and unlock.\n\n async fn locked_copy_one(\n\n &mut self,\n\n id: usize,\n\n blk: u64,\n\n ) -> Result<(), RebuildError> {\n\n let len = self.get_segment_size_blks(blk);\n\n // The nexus children have metadata and data partitions, whereas the\n\n // nexus has a data partition only. Because we are locking the range on\n\n // the nexus, we need to calculate the offset from the start of the data\n\n // partition.\n\n let mut ctx = RangeContext::new(blk - self.range.start, len);\n\n let ch = self\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 92, "score": 50900.48105869039 }, { "content": " self.nexus_descriptor\n\n .unlock_lba_range(&mut ctx, &ch)\n\n .await\n\n .context(RangeUnLockError {\n\n blk,\n\n len,\n\n })?;\n\n\n\n result\n\n }\n\n\n\n /// Copies one segment worth of data from source into destination.\n\n async fn copy_one(\n\n &mut self,\n\n id: usize,\n\n blk: u64,\n\n ) -> Result<(), RebuildError> {\n\n let mut copy_buffer: DmaBuf;\n\n let source_hdl = RebuildJob::open_handle(&self.source, false, false)?;\n\n let destination_hdl =\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 93, "score": 50896.806185285 }, { "content": " // await all active tasks as we might still have\n\n // ongoing IO. do we need a timeout?\n\n self.await_all_tasks().await;\n\n break;\n\n }\n\n }\n\n }\n\n Some(e) => {\n\n error!(\"Failed to rebuild segment id {} block {} with error: {}\", r.id, r.blk, e);\n\n self.fail();\n\n self.await_all_tasks().await;\n\n self.error = Some(e);\n\n break;\n\n }\n\n },\n\n None => {\n\n // all senders have disconnected, out of place termination?\n\n error!(\"Out of place termination with potentially {} active tasks\", self.task_pool.active);\n\n let _ = self.terminate();\n\n break;\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 94, "score": 50896.806185285 }, { "content": " let source_hdl = RebuildJob::open_handle(source, false, false)?;\n\n let destination_hdl =\n\n RebuildJob::open_handle(destination, true, false)?;\n\n\n\n if !Self::validate(\n\n &source_hdl.get_bdev(),\n\n &destination_hdl.get_bdev(),\n\n &range,\n\n ) {\n\n return Err(RebuildError::InvalidParameters {});\n\n };\n\n\n\n // validation passed, block size is the same for both\n\n let block_size = destination_hdl.get_bdev().block_len() as u64;\n\n let segment_size_blks = (SEGMENT_SIZE / block_size) as u64;\n\n\n\n let mut tasks = RebuildTasks {\n\n tasks: Vec::new(),\n\n // only sending one message per channel at a time so we don't need\n\n // the extra buffer\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 95, "score": 50896.806185285 }, { "content": " }\n\n }\n\n }\n\n self.reconcile();\n\n }\n\n\n\n /// Return the size of the segment to be copied.\n\n fn get_segment_size_blks(&self, blk: u64) -> u64 {\n\n // Adjust the segments size for the last segment\n\n if (blk + self.segment_size_blks) > self.range.end {\n\n return self.range.end - blk;\n\n }\n\n self.segment_size_blks\n\n }\n\n\n\n /// Copies one segment worth of data from source into destination. During\n\n /// this time the LBA range being copied is locked so that there cannot be\n\n /// front end I/O to the same LBA range.\n\n ///\n\n /// # Safety\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 96, "score": 50896.806185285 }, { "content": " nexus.to_string(),\n\n );\n\n\n\n let nexus_descriptor =\n\n Bdev::open_by_name(&nexus, false).context(BdevNotFound {\n\n bdev: nexus.to_string(),\n\n })?;\n\n\n\n Ok(Self {\n\n nexus,\n\n nexus_descriptor,\n\n source,\n\n destination,\n\n next: range.start,\n\n range,\n\n block_size,\n\n segment_size_blks,\n\n task_pool: tasks,\n\n notify_fn,\n\n notify_chan: unbounded::<RebuildState>(),\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 97, "score": 50896.806185285 }, { "content": " .nexus_descriptor\n\n .get_channel()\n\n .expect(\"Failed to get nexus channel\");\n\n\n\n // Wait for LBA range to be locked.\n\n // This prevents other I/Os being issued to this LBA range whilst it is\n\n // being rebuilt.\n\n self.nexus_descriptor\n\n .lock_lba_range(&mut ctx, &ch)\n\n .await\n\n .context(RangeLockError {\n\n blk,\n\n len,\n\n })?;\n\n\n\n // Perform the copy\n\n let result = self.copy_one(id, blk).await;\n\n\n\n // Wait for the LBA range to be unlocked.\n\n // This allows others I/Os to be issued to this LBA range once again.\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 98, "score": 50896.806185285 }, { "content": " channel: mpsc::channel(0),\n\n active: 0,\n\n total: SEGMENT_TASKS,\n\n segments_done: 0,\n\n };\n\n\n\n for _ in 0 .. tasks.total {\n\n let copy_buffer = destination_hdl\n\n .dma_malloc(segment_size_blks * block_size)\n\n .context(NoCopyBuffer {})?;\n\n tasks.tasks.push(RebuildTask {\n\n buffer: copy_buffer,\n\n sender: tasks.channel.0.clone(),\n\n error: None,\n\n });\n\n }\n\n\n\n let (source, destination, nexus) = (\n\n source.to_string(),\n\n destination.to_string(),\n", "file_path": "mayastor/src/rebuild/rebuild_impl.rs", "rank": 99, "score": 50896.806185285 } ]
Rust
src/day24.rs
Strackeror/aoc_2021_rust
076582e489dd9ea33b9cf4846626a81f1dcbde4b
use std::{cell::RefCell, collections::HashMap}; use itertools::Itertools; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Param { Number(i64), Variable(char), } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Expr { Input(char), Add(char, Param), Mul(char, Param), Div(char, Param), Mod(char, Param), Eql(char, Param), } peg::parser! { grammar instructions_parser() for str { rule identifier() -> char = c:$(['a'..='z']) {c.as_bytes()[0] as char} rule number() -> Param = n:$("-"? ['0'..='9']+) { Param::Number(n.parse().unwrap())} rule variable() -> Param = i:identifier() { Param::Variable(i) } rule param() -> Param = number() / variable() rule input() -> Expr = "inp" " " t:identifier() {Expr::Input(t)} rule add() -> Expr = "add" " " t:identifier() " " p:param() {Expr::Add(t, p)} rule mul() -> Expr = "mul" " " t:identifier() " " p:param() {Expr::Mul(t, p)} rule div() -> Expr = "div" " " t:identifier() " " p:param() {Expr::Div(t, p)} rule modu() -> Expr = "mod" " " t:identifier() " " p:param() {Expr::Mod(t, p)} rule eql() -> Expr = "eql" " " t:identifier() " " p:param() {Expr::Eql(t, p)} rule inst() -> Expr = input() / add() / mul() / div() / modu() / eql() pub rule list() -> Vec<Expr> = l:inst() ** "\n" {l} } } fn cid(c: char) -> usize { c as usize - 'w' as usize } fn val(state: [i64; 4], p: Param) -> i64 { match p { Param::Number(n) => n, Param::Variable(c) => state[cid(c)], } } fn process_expr(expr: Expr, mut state: [i64; 4]) -> [i64; 4] { match expr { Expr::Add(c, b) => { state[cid(c)] = state[cid(c)] + val(state, b); } Expr::Mul(c, b) => { state[cid(c)] = state[cid(c)] * val(state, b); } Expr::Div(c, b) => { state[cid(c)] = state[cid(c)] / val(state, b); } Expr::Mod(c, b) => { state[cid(c)] = state[cid(c)] % val(state, b); } Expr::Eql(c, b) => { state[cid(c)] = (state[cid(c)] == val(state, b)) as _; } _ => unreachable!(), }; state } fn processr(exprs: &[Expr]) -> Option<i64> { fn cid(c: char) -> usize { c as usize - 'w' as usize } fn param(p: Param, state: &[i64]) -> i64 { match p { Param::Number(n) => n, Param::Variable(c) => state[cid(c)], } } fn recursive( idx: usize, total: i64, state: [i64; 4], exprs: &[Expr], mem: &mut HashMap<(usize, [i64; 4]), Option<i64>>, ) -> Option<i64> { if idx == exprs.len() { return if state[3] == 0 { dbg!(Some(total)) } else { None }; } if let Some(o) = mem.get(&(idx, state)) { return *o; } let mut nstate = state; let result = match exprs[idx] { Expr::Input(n) => (0..=9) .rev() .filter_map(|d| { nstate[cid(n)] = d; recursive(idx + 1, total * 10 + d, nstate, exprs, mem) }) .next(), Expr::Eql(c, p) => { nstate[cid(c)] = (nstate[cid(c)] == param(p, &nstate)) as i64; recursive(idx + 1, total, nstate, exprs, mem) } Expr::Add(c, p) => { nstate[cid(c)] = nstate[cid(c)] + param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } Expr::Mul(c, p) => { nstate[cid(c)] = nstate[cid(c)] * param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } Expr::Div(c, p) => { nstate[cid(c)] = nstate[cid(c)] / param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } Expr::Mod(c, p) => { nstate[cid(c)] = nstate[cid(c)] % param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } }; mem.insert((idx, state), result); result } recursive(0, 0, [0, 0, 0, 0], exprs, &mut HashMap::new()) } #[test] fn example1() { let parsed = instructions_parser::list(&std::fs::read_to_string("input/day24/example1.txt").unwrap()); dbg!(&parsed); processr(&parsed.unwrap()); } #[test] fn example2() { let parsed = instructions_parser::list(&std::fs::read_to_string("input/day24/example2.txt").unwrap()); dbg!(&parsed); processr(&parsed.unwrap()); } #[test] fn input() { let parsed = instructions_parser::list(&std::fs::read_to_string("input/day24/input.txt").unwrap()); dbg!(&parsed); processr(&parsed.unwrap()); }
use std::{cell::RefCell, collections::HashMap}; use itertools::Itertools; #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Param { Number(i64), Variable(char), } #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Expr { Input(char), Add(char, Param), Mul(char, Param), Div(char, Param), Mod(char, Param), Eql(char, Param), } peg::parser! { grammar instructions_parser() for str { rule identifier() -> char = c:$(['a'..='z']) {c.as_bytes()[0] as char} rule number() -> Param = n:$("-"? ['0'..='9']+) { Param::Number(n.parse().unwrap())} rule variable() -> Param = i:identifier() { Param::Variable(i) } rule param() -> Param = number() / variable() rule input() -> Expr = "inp" " " t:identifier() {Expr::Input(t)} rule add() -> Expr = "add" " " t:identifier() " " p:param() {Expr::Add(t, p)} rule mul() -> Expr = "mul" " " t:identifier() " " p:param() {Expr::Mul(t, p)} rule div() -> Expr = "div" " " t:identifier() " " p:param() {Expr::Div(t, p)} rule modu() -> Expr = "mod" " " t:identifier() " " p:param() {Expr::Mod(t, p)} rule eql() -> Expr = "eql" " " t:identifier() " " p:param() {Expr::Eql(t, p)} rule inst() -> Expr = input() / add() / mul() / div() / modu() / eql() pub rule list() -> Vec<Expr> = l:inst() ** "\n" {l} } } fn cid(c: char) -> usize { c as usize - 'w' as usize }
fn process_expr(expr: Expr, mut state: [i64; 4]) -> [i64; 4] { match expr { Expr::Add(c, b) => { state[cid(c)] = state[cid(c)] + val(state, b); } Expr::Mul(c, b) => { state[cid(c)] = state[cid(c)] * val(state, b); } Expr::Div(c, b) => { state[cid(c)] = state[cid(c)] / val(state, b); } Expr::Mod(c, b) => { state[cid(c)] = state[cid(c)] % val(state, b); } Expr::Eql(c, b) => { state[cid(c)] = (state[cid(c)] == val(state, b)) as _; } _ => unreachable!(), }; state } fn processr(exprs: &[Expr]) -> Option<i64> { fn cid(c: char) -> usize { c as usize - 'w' as usize } fn param(p: Param, state: &[i64]) -> i64 { match p { Param::Number(n) => n, Param::Variable(c) => state[cid(c)], } } fn recursive( idx: usize, total: i64, state: [i64; 4], exprs: &[Expr], mem: &mut HashMap<(usize, [i64; 4]), Option<i64>>, ) -> Option<i64> { if idx == exprs.len() { return if state[3] == 0 { dbg!(Some(total)) } else { None }; } if let Some(o) = mem.get(&(idx, state)) { return *o; } let mut nstate = state; let result = match exprs[idx] { Expr::Input(n) => (0..=9) .rev() .filter_map(|d| { nstate[cid(n)] = d; recursive(idx + 1, total * 10 + d, nstate, exprs, mem) }) .next(), Expr::Eql(c, p) => { nstate[cid(c)] = (nstate[cid(c)] == param(p, &nstate)) as i64; recursive(idx + 1, total, nstate, exprs, mem) } Expr::Add(c, p) => { nstate[cid(c)] = nstate[cid(c)] + param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } Expr::Mul(c, p) => { nstate[cid(c)] = nstate[cid(c)] * param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } Expr::Div(c, p) => { nstate[cid(c)] = nstate[cid(c)] / param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } Expr::Mod(c, p) => { nstate[cid(c)] = nstate[cid(c)] % param(p, &nstate); recursive(idx + 1, total, nstate, exprs, mem) } }; mem.insert((idx, state), result); result } recursive(0, 0, [0, 0, 0, 0], exprs, &mut HashMap::new()) } #[test] fn example1() { let parsed = instructions_parser::list(&std::fs::read_to_string("input/day24/example1.txt").unwrap()); dbg!(&parsed); processr(&parsed.unwrap()); } #[test] fn example2() { let parsed = instructions_parser::list(&std::fs::read_to_string("input/day24/example2.txt").unwrap()); dbg!(&parsed); processr(&parsed.unwrap()); } #[test] fn input() { let parsed = instructions_parser::list(&std::fs::read_to_string("input/day24/input.txt").unwrap()); dbg!(&parsed); processr(&parsed.unwrap()); }
fn val(state: [i64; 4], p: Param) -> i64 { match p { Param::Number(n) => n, Param::Variable(c) => state[cid(c)], } }
function_block-full_function
[ { "content": "pub fn run(input: &str) -> Result<()> {\n\n let (a, b) = input.split_once(',').context(\"\")?;\n\n\n\n let p1_state = PlayerState {\n\n position: a.parse::<u64>()? - 1,\n\n score: 0,\n\n rollcount: 0,\n\n };\n\n\n\n let p2_state = PlayerState {\n\n position: b.parse::<u64>()? - 1,\n\n score: 0,\n\n rollcount: 0,\n\n };\n\n\n\n let mut states = HashMap::from([(\n\n GameState {\n\n states: [p1_state, p2_state],\n\n },\n\n 1,\n", "file_path": "src/day21.rs", "rank": 0, "score": 166916.1026131735 }, { "content": "fn id(a: &str, i: usize) -> char {\n\n a.chars().nth(i).unwrap()\n\n}\n\n\n", "file_path": "src/day14.rs", "rank": 1, "score": 147921.13023958364 }, { "content": "pub fn run(path: &str) -> Result<()> {\n\n let content = std::fs::read_to_string(path)?;\n\n let steps: Vec<_> = content.lines().map(parse).try_collect()?;\n\n\n\n let init = steps[0];\n\n let final_volumes = steps[1..].iter().fold(vec![init.1], |acc, &(on, volume)| {\n\n let mut next = acc.into_iter().flat_map(|v| v.cut(volume)).collect_vec();\n\n if on {\n\n next.push(volume);\n\n }\n\n let count = next.iter().fold(0, |acc, v| acc + v.count());\n\n dbg!(count);\n\n next\n\n });\n\n let count = final_volumes.iter().fold(0, |acc, v| acc + v.count());\n\n dbg!(count);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 3, "score": 121771.14616895962 }, { "content": "pub fn day13(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let points: HashSet<(i32, i32)> = file\n\n .lines()\n\n .take_while(|l| !l.is_empty())\n\n .filter_map(|l| l.split_once(','))\n\n .map(|l| (l.0.parse().unwrap(), l.1.parse().unwrap()))\n\n .collect();\n\n\n\n let instructions: Vec<(&str, i32)> = file\n\n .lines()\n\n .skip_while(|l| !l.is_empty())\n\n .skip(1)\n\n .filter_map(|l| l.split(' ').nth(2)?.split_once('='))\n\n .map(|(l, i)| (l, i.parse().unwrap()))\n\n .collect_vec();\n\n\n\n let point_count = step(points.clone(), &instructions[0]).len();\n\n dbg!(point_count);\n\n let final_points = instructions.iter().fold(points, step);\n\n show(&final_points);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day13.rs", "rank": 4, "score": 121771.14616895962 }, { "content": "pub fn day14(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n dbg!(&file);\n\n let (template, rules) = file.split_once(\"\\n\\n\").unwrap();\n\n let rules: HashMap<_, _> = rules\n\n .lines()\n\n .map(|line| line.split_once(\" -> \").unwrap())\n\n .map(|(a, b)| (id(a, 0), id(a, 1), id(b, 0)))\n\n .map(|(a, b, c)| ((a, b), ((a, c), (c, b))))\n\n .collect();\n\n dbg!(&rules);\n\n\n\n let mut counts: HashMap<(char, char), usize> = template.chars().tuple_windows().counts();\n\n dbg!(&counts);\n\n\n\n for _ in 0..40 {\n\n let mut ncounts: HashMap<_, _> = HashMap::new();\n\n for (pair, count) in &counts {\n\n ncounts.entry(rules[pair].0).or_insert(0).add_assign(count);\n\n ncounts.entry(rules[pair].1).or_insert(0).add_assign(count);\n", "file_path": "src/day14.rs", "rank": 5, "score": 121771.14616895962 }, { "content": "pub fn run(instructions: &str) -> Result<()> {\n\n let packet = parse_str(instructions)?;\n\n\n\n dbg!(&packet);\n\n dbg!(version_sum(&packet));\n\n dbg!(eval_packet(&packet));\n\n Ok(())\n\n}\n", "file_path": "src/day16.rs", "rank": 6, "score": 121771.14616895962 }, { "content": "pub fn run(path: &str) -> Result<()> {\n\n let content = std::fs::read_to_string(path)?;\n\n let scanner_list: Vec<Vec<Vector>> = content\n\n .split(\"\\n\\n\")\n\n .map(|section| {\n\n section\n\n .lines()\n\n .filter(|line| !line.starts_with(\"---\"))\n\n .map(|coord_string| {\n\n coord_string\n\n .split(',')\n\n .map(|s| str::parse(s).unwrap())\n\n .collect_tuple()\n\n .unwrap()\n\n })\n\n .map(|(a, b, c)| Vector(a, b, c))\n\n .collect_vec()\n\n })\n\n .collect_vec();\n\n\n", "file_path": "src/day19.rs", "rank": 7, "score": 121771.14616895962 }, { "content": "pub fn run(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let (bitrepo, charmap) = file.split_once(\"\\n\\n\").context(\"initial split\")?;\n\n let bitrepo = bitrepo.as_bytes().iter().map(|&c| c == b'#').collect_vec();\n\n let mut charmap = charmap\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(y, line)| {\n\n line.as_bytes()\n\n .iter()\n\n .enumerate()\n\n .map(move |(x, &c)| ((x as isize, y as isize), c == b'#'))\n\n })\n\n .collect::<HashMap<(isize, isize), _>>();\n\n let mut default = false;\n\n for _ in 0..50 {\n\n charmap = step(&bitrepo, charmap, default);\n\n default = bitrepo[if default { 511 } else { 0 }]\n\n }\n\n\n\n let count = charmap.values().filter(|v| **v).count();\n\n dbg!(count);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day20.rs", "rank": 8, "score": 121771.14616895962 }, { "content": "pub fn run(instruction: &str) -> Result<()> {\n\n let (xstart, xend, ystart, yend) = instruction\n\n .split(',')\n\n .map(|n| dbg!(n))\n\n .map(str::parse)\n\n .map(Result::unwrap)\n\n .next_tuple()\n\n .context(\"parsefailed\")?;\n\n let xrange = xstart..=xend;\n\n let yrange = ystart..=yend;\n\n\n\n let results = (0..=xend * 4)\n\n .cartesian_product(0..=xend * 4)\n\n .filter(|(step, x)| xrange.contains(&xpos(*x, *step)))\n\n .cartesian_product(-10000..10000)\n\n .filter(|((step, _), y)| yrange.contains(&ypos(*y, *step)))\n\n .map(|((step, x), y)| (step, (x, y)))\n\n .unique_by(|(_, coord)| coord.clone())\n\n .sorted_by_key(|(_, coord)| coord.clone())\n\n .collect_vec();\n\n dbg!(results.len());\n\n dbg!(gauss(results.iter().max_by_key(|f| f.1 .1).unwrap().1 .1));\n\n\n\n dbg!(-yend * ((-yend - 1) / 2));\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day17.rs", "rank": 9, "score": 121771.14616895962 }, { "content": "pub fn run(path: &str) -> Result<()> {\n\n let input = std::fs::read_to_string(path)?;\n\n let numbers = input\n\n .lines()\n\n .filter_map(|mut line| dbg!(parse(&mut line)).ok())\n\n .collect_vec();\n\n\n\n let part1 = numbers\n\n .clone()\n\n .into_iter()\n\n .reduce(|acc, elem| reduce(Pair::npair(acc, elem)))\n\n .context(\"empty\")?;\n\n dbg!(&part1, magnitude(&part1));\n\n\n\n let part2 = numbers\n\n .into_iter()\n\n .permutations(2)\n\n .map(|perm| reduce(Pair::npair(perm[0].clone(), perm[1].clone())))\n\n .max_by_key(magnitude)\n\n .context(\"part2 not found\")?;\n\n dbg!(&part2, magnitude(&part2));\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day18.rs", "rank": 10, "score": 121771.14616895962 }, { "content": "pub fn day07(path: &str) -> anyhow::Result<()> {\n\n let inputs: Vec<i32> = std::fs::read_to_string(path)?\n\n .trim()\n\n .split(',')\n\n .map(str::parse::<i32>)\n\n .collect::<Result<_, _>>()?;\n\n let max = *inputs.iter().max().context(\"no max\")?;\n\n\n\n // ex1 median\n\n let median = inputs\n\n .iter()\n\n .sorted()\n\n .nth(inputs.len() / 2)\n\n .context(\"no median\")?;\n\n let fuel = inputs.iter().map(|i| (*i - median).abs()).sum::<i32>();\n\n dbg!(fuel);\n\n\n\n let dist = |i| i * (i + 1) / 2;\n\n //ex2 no precalc\n\n let minfuel = (0..max)\n\n .map(|t| inputs.iter().map(|i| dist((*i - t).abs())).sum::<i32>())\n\n .min();\n\n dbg!(minfuel);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/day07.rs", "rank": 11, "score": 116022.91468301316 }, { "content": "pub fn run(path: &str) -> anyhow::Result<()> {\n\n let map = std::fs::read_to_string(path)?;\n\n let map: HashMap<Coords, usize> = map\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(y, line)| {\n\n line.chars()\n\n .enumerate()\n\n .map(move |(x, c)| ((x as _, y as _), c as usize - '0' as usize))\n\n })\n\n .collect();\n\n\n\n let result = lowest(&map, (0, 0), *map.keys().max().unwrap());\n\n dbg!(result);\n\n _show(&map);\n\n\n\n let (xsize, ysize) = *map.keys().max().unwrap();\n\n let xsize = xsize + 1;\n\n let ysize = ysize + 1;\n\n\n", "file_path": "src/day15.rs", "rank": 12, "score": 116022.91468301316 }, { "content": "pub fn day08(path: &str) -> anyhow::Result<()> {\n\n let lines: Vec<(Vec<String>, Vec<String>)> = std::fs::read_to_string(path)?\n\n .lines()\n\n .map(|l| {\n\n l.split('|')\n\n .map(|chars| chars.trim().split(' ').map(String::from).collect())\n\n .next_tuple()\n\n .unwrap()\n\n })\n\n .collect();\n\n // 1 -> 2 segments, 7 -> 3 segments, 4 -> 4 segments, 8 -> 7 segments\n\n let count: usize = [2, 3, 4, 7]\n\n .iter()\n\n .map(|segment_count| {\n\n lines\n\n .iter()\n\n .map(|segment_list| {\n\n segment_list\n\n .1\n\n .iter()\n", "file_path": "src/day08.rs", "rank": 13, "score": 116022.91468301316 }, { "content": "pub fn day06(path: &str) -> anyhow::Result<()> {\n\n let input: Vec<usize> = std::fs::read_to_string(path)?\n\n .split(',')\n\n .map(|l| l.parse().unwrap())\n\n .collect();\n\n\n\n let step = |v: Vec<_>, _| vec![v[1], v[2], v[3], v[4], v[5], v[6], v[7] + v[0], v[8], v[0]];\n\n let count: usize = input\n\n .iter()\n\n .map(|f| {\n\n let mut init = vec![0usize; 9];\n\n init[*f] = 1;\n\n (0..256).fold(init, step).iter().sum::<usize>()\n\n })\n\n .sum();\n\n println!(\"count:{}\", count);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day06.rs", "rank": 14, "score": 116022.91468301316 }, { "content": "pub fn _day07_old(path: &str) -> anyhow::Result<()> {\n\n let inputs: Vec<i32> = std::fs::read_to_string(path)?\n\n .trim()\n\n .split(',')\n\n .map(str::parse::<i32>)\n\n .collect::<Result<_, _>>()?;\n\n\n\n // ex1\n\n let max = *inputs.iter().max().context(\"no max\")?;\n\n let minfuel = (0..max)\n\n .map(|t| inputs.iter().map(|i| (*i - t).abs()).sum::<i32>())\n\n .min()\n\n .context(\"no min\")?;\n\n dbg!(max, minfuel);\n\n\n\n // ex2\n\n // precalc triangle numbers\n\n let distances = (0..=max)\n\n .scan(0, |acc, i| {\n\n *acc += i;\n", "file_path": "src/day07.rs", "rank": 15, "score": 113535.59139646453 }, { "content": "#[test]\n\nfn input() {\n\n dbg!(run(\"input/day22/input.txt\"));\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 17, "score": 99151.46928714018 }, { "content": "#[test]\n\nfn input() {\n\n dbg!(run([[C, B], [D, A], [A, D], [B, C]]));\n\n}\n", "file_path": "src/day23.rs", "rank": 18, "score": 99151.46928714018 }, { "content": "#[test]\n\nfn input() {\n\n run(\"input/day25/input.txt\");\n\n}\n", "file_path": "src/day25.rs", "rank": 19, "score": 99151.46928714018 }, { "content": "fn read_char(reader: &mut &str) -> u8 {\n\n let (next, nreader) = reader.split_at(1);\n\n *reader = nreader;\n\n next.as_bytes()[0]\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 20, "score": 99015.61460063362 }, { "content": "fn step(set: HashSet<(i32, i32)>, inst: &(&str, i32)) -> HashSet<(i32, i32)> {\n\n set.into_iter()\n\n .map(|(x, y)| {\n\n if inst.0 == \"x\" {\n\n (inst.1 - (inst.1 - x).abs(), y)\n\n } else {\n\n (x, inst.1 - (inst.1 - y).abs())\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 22, "score": 91641.18961720879 }, { "content": "fn parse_str(instructions: &str) -> Result<Packet> {\n\n let mut bits = instructions\n\n .trim()\n\n .chars()\n\n .flat_map(|c| {\n\n let i = i8::from_str_radix(&String::from_iter([c]), 16).unwrap();\n\n (0..4).map(move |bit| (i & (8 >> bit)) != 0)\n\n })\n\n .collect::<VecDeque<bool>>();\n\n parse(&mut bits)\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 25, "score": 84009.09365451365 }, { "content": "fn run(path: &str) -> Result<()> {\n\n let content = std::fs::read_to_string(path)?;\n\n\n\n let mut map = content\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(y, line)| {\n\n line.as_bytes()\n\n .iter()\n\n .enumerate()\n\n .map(move |(x, &b)| ((x as isize, y as isize), b))\n\n })\n\n .collect::<HashMap<_, _>>();\n\n\n\n for i in 0.. {\n\n let mut hmap = map.clone();\n\n for (&(x, y), _) in map.iter().filter(|&p| *p.1 == b'>') {\n\n let ncell = if map.get(&(x + 1, y)).is_some() {\n\n (x + 1, y)\n\n } else {\n", "file_path": "src/day25.rs", "rank": 26, "score": 81611.61048822055 }, { "content": "fn parse(line: &str) -> Result<(bool, Volume)> {\n\n let (on, x1, x2, y1, y2, z1, z2) = scan_fmt!(\n\n line,\n\n \"{} x={}..{},y={}..{},z={}..{}\",\n\n String,\n\n i32,\n\n i32,\n\n i32,\n\n i32,\n\n i32,\n\n i32\n\n )?;\n\n let on = match on.as_str() {\n\n \"on\" => true,\n\n \"off\" => false,\n\n _ => return Err(anyhow::anyhow!(\"unexpected initial string {}\", on)),\n\n };\n\n\n\n Ok((\n\n on,\n\n Volume {\n\n x: (x1, x2 + 1),\n\n y: (y1, y2 + 1),\n\n z: (z1, z2 + 1),\n\n },\n\n ))\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 27, "score": 73934.48580824381 }, { "content": "fn parse(reader: &mut &str) -> Result<Pair> {\n\n let next = read_char(reader);\n\n if next.is_ascii_digit() {\n\n Ok(Pair::Number(next - b'0'))\n\n } else if next == b'[' {\n\n let pair1 = parse(reader)?;\n\n if read_char(reader) != b',' {\n\n return Err(anyhow!(\"expected comma\"));\n\n }\n\n let pair2 = parse(reader)?;\n\n if read_char(reader) != b']' {\n\n return Err(anyhow!(\"expected closing bracket\"));\n\n }\n\n Ok(Pair::npair(pair1, pair2))\n\n } else {\n\n Err(anyhow!(\"unexpected char: {}\", next))\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 28, "score": 73934.48580824381 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\nenum Packet {\n\n Literal(u32, u64),\n\n Operator(u32, u32, Vec<Packet>),\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 29, "score": 57033.70003072524 }, { "content": "#[derive(Debug, Clone)]\n\nenum Pair {\n\n Number(u8),\n\n Pair(Box<Pair>, Box<Pair>),\n\n}\n\n\n\nimpl Pair {\n\n fn npair(a: Pair, b: Pair) -> Pair {\n\n Pair::Pair(Box::new(a), Box::new(b))\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 30, "score": 57033.151976586785 }, { "content": "fn lowest(map: &HashMap<Coords, usize>, (x, y): Coords, target: Coords) -> usize {\n\n let mut queue = VecDeque::from([(x, y)]);\n\n let mut distmap = HashMap::from([((x, y), 0)]);\n\n while !queue.is_empty() {\n\n let (x, y) = queue.pop_front().unwrap();\n\n for (nx, ny) in [(x + 1, y), (x - 1, y), (x, y + 1), (x, y - 1)]\n\n .into_iter()\n\n .filter(|p| map.contains_key(p))\n\n {\n\n let new_val = distmap[&(x, y)] + map[&(nx, ny)];\n\n let valref = distmap.entry((nx, ny)).or_insert(usize::MAX);\n\n if new_val < *valref {\n\n *valref = new_val;\n\n if (nx, ny) != target {\n\n queue.push_back((nx, ny));\n\n }\n\n }\n\n }\n\n }\n\n\n\n distmap[&target]\n\n}\n\n\n", "file_path": "src/day15.rs", "rank": 31, "score": 55609.8465091066 }, { "content": "fn _show(map: &HashMap<(i32, i32), usize>) {\n\n println!();\n\n let max = map.keys().max().unwrap();\n\n for y in 0..=max.1 {\n\n for x in 0..=max.0 {\n\n print!(\"{:2} \", map[&(x, y)])\n\n }\n\n println!();\n\n }\n\n}\n\n\n", "file_path": "src/day15.rs", "rank": 32, "score": 54100.88602837335 }, { "content": "fn add_right(pair: Pair, val: Option<u8>) -> Pair {\n\n match val {\n\n None => pair,\n\n Some(v) => match pair {\n\n Pair::Number(n) => Pair::Number(n + v),\n\n Pair::Pair(a, b) => Pair::npair(*a, add_right(*b, val)),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 33, "score": 52008.79052987076 }, { "content": "fn add_left(pair: Pair, val: Option<u8>) -> Pair {\n\n match val {\n\n None => pair,\n\n Some(v) => match pair {\n\n Pair::Number(n) => Pair::Number(n + v),\n\n Pair::Pair(a, b) => Pair::npair(add_left(*a, val), *b),\n\n },\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 34, "score": 52008.79052987076 }, { "content": "fn game_step(mut state: GameState, roll: u64, player: usize) -> GameState {\n\n state.states[player] = step(state.states[player], roll);\n\n state\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 35, "score": 45651.293400618044 }, { "content": "fn step(\n\n bitrepo: &[bool],\n\n map: HashMap<(isize, isize), bool>,\n\n default: bool,\n\n) -> HashMap<(isize, isize), bool> {\n\n let valid_keys: Vec<(isize, isize)> = map\n\n .iter()\n\n .filter(|(_k, &v)| v)\n\n .map(|p| p.0)\n\n .copied()\n\n .collect();\n\n\n\n let (minx, maxx) = valid_keys\n\n .iter()\n\n .map(|k| k.0)\n\n .minmax()\n\n .into_option()\n\n .unwrap();\n\n\n\n let (miny, maxy) = valid_keys\n", "file_path": "src/day20.rs", "rank": 36, "score": 42509.76706478695 }, { "content": "#[test]\n\nfn example() {\n\n dbg!(run([[B, A], [C, D], [B, C], [D, A]]));\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 37, "score": 42509.76706478695 }, { "content": "fn main() {\n\n let args = std::env::args().collect_vec();\n\n let input = args[1].clone();\n\n\n\n match args.get(2) {\n\n Some(n) => match n.as_str() {\n\n \"day01\" => day01::day01(&input),\n\n \"day02\" => day02::day02(&input),\n\n \"day03\" => day03::day03(&input),\n\n \"day03_bin\" => day03::day03_bin(&input),\n\n \"day04\" => day04::day04(&input),\n\n \"day05\" => day05::day05(&input),\n\n \"day06\" => day06::day06(&input),\n\n \"day07\" => day07::day07(&input),\n\n \"day08\" => day08::day08(&input),\n\n \"day09\" => day09::day09(&input),\n\n \"day10\" => day10::day10(&input),\n\n \"day11\" => day11::day11(&input),\n\n \"day12\" => day12::day12(&input),\n\n \"day13\" => day13::day13(&input),\n", "file_path": "src/main.rs", "rank": 38, "score": 42509.76706478695 }, { "content": "#[test]\n\nfn example() {\n\n run(\"input/day25/example.txt\");\n\n}\n", "file_path": "src/day25.rs", "rank": 39, "score": 42509.76706478695 }, { "content": "#[test]\n\nfn testcut() {\n\n let V1 = Volume {\n\n x: (0, 10),\n\n y: (0, 10),\n\n z: (0, 10),\n\n };\n\n let V2 = Volume {\n\n x: (3, 6),\n\n y: (3, 6),\n\n z: (3, 6),\n\n };\n\n dbg!(V1.cut(V2));\n\n}\n", "file_path": "src/day22.rs", "rank": 40, "score": 42509.76706478695 }, { "content": "#[test]\n\nfn example1() {\n\n dbg!(run(\"input/day22/example1.txt\"));\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 41, "score": 42509.76706478695 }, { "content": "#[test]\n\nfn example2() {\n\n dbg!(run(\"input/day22/example2.txt\"));\n\n}\n\n\n", "file_path": "src/day22.rs", "rank": 42, "score": 42509.76706478695 }, { "content": "fn visit_node(\n\n current: usize,\n\n target: usize,\n\n map: &[Node],\n\n mut visited: Vec<usize>,\n\n small_cave: bool,\n\n) -> Vec<Vec<usize>> {\n\n visited.push(current);\n\n if current == target {\n\n return [visited].into();\n\n }\n\n\n\n if visited.len() > 1 && visited[0] == current {\n\n return vec![];\n\n }\n\n\n\n let is_visited = map[current].small && visited.iter().filter(|n| **n == current).count() > 1;\n\n if is_visited && small_cave {\n\n return vec![];\n\n }\n", "file_path": "src/day12.rs", "rank": 45, "score": 41323.61176106108 }, { "content": "fn step_probabilites(\n\n probabilities: HashMap<GameState, u64>,\n\n player: usize,\n\n) -> HashMap<GameState, u64> {\n\n probabilities\n\n .into_iter()\n\n .flat_map(|(state, count)| (1..=3).map(move |roll| (game_step(state, roll, player), count)))\n\n .into_grouping_map_by(|t| t.0)\n\n .fold(0, |acc, _key, val| acc + val.1)\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 46, "score": 41323.61176106108 }, { "content": "fn gauss(i: i32) -> i32 {\n\n i * (i + 1) / 2\n\n}\n", "file_path": "src/day17.rs", "rank": 47, "score": 36926.121262089815 }, { "content": "fn magnitude(pair: &Pair) -> u64 {\n\n match pair {\n\n Pair::Number(n) => *n as _,\n\n Pair::Pair(a, b) => 3 * magnitude(a) + 2 * magnitude(b),\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 48, "score": 35937.538475568675 }, { "content": "fn version_sum(packet: &Packet) -> u32 {\n\n match packet {\n\n Packet::Literal(version, _) => *version,\n\n Packet::Operator(version, _, sub_packets) => {\n\n *version + sub_packets.iter().map(version_sum).sum::<u32>()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 49, "score": 35029.720739688295 }, { "content": "fn eval_packet(packet: &Packet) -> u64 {\n\n match packet {\n\n Packet::Literal(_, val) => *val,\n\n Packet::Operator(_, op_type, sub_packets) => {\n\n let mut sub_values = sub_packets.iter().map(eval_packet);\n\n match op_type {\n\n 0 => sub_values.sum::<u64>(),\n\n 1 => sub_values.product::<u64>(),\n\n 2 => sub_values.min().unwrap(),\n\n 3 => sub_values.max().unwrap(),\n\n 5 => (sub_values.next() > sub_values.next()) as u64,\n\n 6 => (sub_values.next() < sub_values.next()) as u64,\n\n 7 => (sub_values.next() == sub_values.next()) as u64,\n\n _ => unreachable!(),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 50, "score": 35029.720739688295 }, { "content": "fn run(initial: [[i8; 2]; 4]) -> Result<()> {\n\n let rooms = [\n\n [initial[0][0], D, D, initial[0][1]],\n\n [initial[1][0], C, B, initial[1][1]],\n\n [initial[2][0], B, A, initial[2][1]],\n\n [initial[3][0], A, C, initial[3][1]],\n\n ];\n\n let initial = State {\n\n hallway: [-1; 11],\n\n rooms,\n\n };\n\n\n\n let mut result_map = HashMap::new();\n\n dbg!(min_cost_to(initial, &mut result_map));\n\n\n\n initial.show();\n\n let mut next = initial;\n\n loop {\n\n let states = next.possible_states();\n\n if states.is_empty() {\n", "file_path": "src/day23.rs", "rank": 51, "score": 34361.577393998 }, { "content": "fn reduce(mut pair: Pair) -> Pair {\n\n loop {\n\n let (npair, _, _, exploded) = explode(pair, 0);\n\n pair = npair;\n\n if exploded {\n\n continue;\n\n }\n\n\n\n let (npair, has_split) = split(pair);\n\n pair = npair;\n\n if has_split {\n\n continue;\n\n }\n\n break;\n\n }\n\n pair\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 52, "score": 34193.703316068495 }, { "content": "fn split(pair: Pair) -> (Pair, bool) {\n\n match pair {\n\n Pair::Number(n) => {\n\n if n >= 10 {\n\n (\n\n Pair::npair(Pair::Number(n / 2), Pair::Number(n / 2 + n % 2)),\n\n true,\n\n )\n\n } else {\n\n (pair, false)\n\n }\n\n }\n\n\n\n Pair::Pair(a, b) => {\n\n let (new_a, has_split) = split(*a);\n\n if has_split {\n\n (Pair::npair(new_a, *b), true)\n\n } else {\n\n let (new_b, has_split) = split(*b);\n\n (Pair::npair(new_a, new_b), has_split)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/day18.rs", "rank": 53, "score": 34193.703316068495 }, { "content": "fn show(set: &HashSet<(i32, i32)>) {\n\n let max = set\n\n .iter()\n\n .copied()\n\n .reduce(|(a, b), (a2, b2)| (a.max(a2), b.max(b2)))\n\n .unwrap();\n\n\n\n println!(\"{} {}\", max.0, max.1);\n\n for y in 0..=max.1 {\n\n for x in 0..=max.0 {\n\n print!(\n\n \"{}\",\n\n match set.contains(&(x, y)) {\n\n true => \"#\",\n\n false => \".\",\n\n }\n\n )\n\n }\n\n println!()\n\n }\n\n}\n\n\n", "file_path": "src/day13.rs", "rank": 54, "score": 33357.14157353129 }, { "content": "fn ypos(velocity: i32, step: i32) -> i32 {\n\n velocity * step - gauss(step - 1)\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 55, "score": 32617.197915580426 }, { "content": "fn xpos(velocity: i32, step: i32) -> i32 {\n\n if velocity >= step {\n\n velocity * step - gauss(step - 1)\n\n } else {\n\n gauss(velocity)\n\n }\n\n}\n\n\n", "file_path": "src/day17.rs", "rank": 56, "score": 32617.197915580426 }, { "content": "fn show(map: &HashMap<(isize, isize), bool>) {\n\n println!();\n\n let max = map.keys().max().unwrap();\n\n let min = map.keys().min().unwrap();\n\n for y in min.1..=max.1 {\n\n for x in min.0..=max.0 {\n\n print!(\"{}\", if map[&(x, y)] { '#' } else { '.' })\n\n }\n\n println!();\n\n }\n\n}\n", "file_path": "src/day20.rs", "rank": 57, "score": 31843.81949763802 }, { "content": "fn adjacent((x, y): (isize, isize)) -> Vec<(isize, isize)> {\n\n (y - 1..=y + 1)\n\n .cartesian_product(x - 1..=x + 1)\n\n .map(|(y, x)| (x, y))\n\n .collect()\n\n}\n\n\n", "file_path": "src/day20.rs", "rank": 58, "score": 31366.116024788753 }, { "content": "fn value(bits: impl IntoIterator<Item = bool>) -> u64 {\n\n bits.into_iter()\n\n .fold(0, |acc, bit| acc * 2 + if bit { 1 } else { 0 })\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 59, "score": 31184.288516921093 }, { "content": "fn rotate(Vector(x, y, z): Vector, rot: i32) -> Vector {\n\n match rot {\n\n 0 => Vector(x, y, z),\n\n 1 => Vector(x, z, -y),\n\n 2 => Vector(x, -y, -z),\n\n 3 => Vector(x, -z, y),\n\n 4 => Vector(y, x, -z),\n\n 5 => Vector(y, z, x),\n\n 6 => Vector(y, -x, z),\n\n 7 => Vector(y, -z, -x),\n\n 8 => Vector(z, x, y),\n\n 9 => Vector(z, y, -x),\n\n 10 => Vector(z, -x, -y),\n\n 11 => Vector(z, -y, x),\n\n 12 => Vector(-x, y, -z),\n\n 13 => Vector(-x, z, y),\n\n 14 => Vector(-x, -y, z),\n\n 15 => Vector(-x, -z, -y),\n\n 16 => Vector(-y, x, z),\n\n 17 => Vector(-y, z, -x),\n\n 18 => Vector(-y, -x, -z),\n\n 19 => Vector(-y, -z, x),\n\n 20 => Vector(-z, x, -y),\n\n 21 => Vector(-z, y, x),\n\n 22 => Vector(-z, -x, y),\n\n 23 => Vector(-z, -y, -x),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 60, "score": 30832.54990785706 }, { "content": "fn match_coord(a: &[Vector], b: &[Vector]) -> (Vector, Vec<Vector>) {\n\n for rid in 0..24 {\n\n let b_rotated = b.iter().map(|v| rotate(*v, rid)).collect_vec();\n\n\n\n for (&apos, &bpos) in a.iter().cartesian_product(b_rotated.iter()) {\n\n let diff = bpos - apos;\n\n let unique_count = b_rotated\n\n .iter()\n\n .map(|v| *v - diff)\n\n .chain(a.iter().copied())\n\n .collect::<HashSet<_>>()\n\n .len();\n\n if a.len() + b.len() - unique_count >= 12 {\n\n dbg!(diff);\n\n return (\n\n -diff,\n\n b_rotated.iter().copied().map(|v| v - diff).collect_vec(),\n\n );\n\n }\n\n }\n\n }\n\n (Vector(0, 0, 0), vec![])\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 61, "score": 30592.737606846345 }, { "content": "fn parse(bits: &mut VecDeque<bool>) -> Result<Packet> {\n\n let version = value(bits.drain(0..3));\n\n let packet_type = value(bits.drain(0..3));\n\n\n\n match packet_type {\n\n 4 => parse_literal(version as _, bits),\n\n _ => parse_operator(version as _, packet_type as _, bits),\n\n }\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 62, "score": 30467.195796118547 }, { "content": "fn step(mut state: PlayerState, roll: u64) -> PlayerState {\n\n state.position = (state.position + roll) % 10;\n\n state.rollcount += 1;\n\n\n\n if state.rollcount == 3 {\n\n state.score += state.position + 1;\n\n state.rollcount = 0;\n\n }\n\n\n\n state\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 63, "score": 29800.459897262746 }, { "content": "fn parse_literal(version: u32, bits: &mut VecDeque<bool>) -> Result<Packet> {\n\n let mut result = 0;\n\n loop {\n\n let mut chunk = bits.drain(0..5);\n\n let cont = chunk.next().unwrap();\n\n\n\n result = result * 16 + value(chunk);\n\n if !cont {\n\n break;\n\n }\n\n }\n\n\n\n Ok(Packet::Literal(version, result))\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 64, "score": 27473.116316983476 }, { "content": "fn explode(pair: Pair, depth: u8) -> (Pair, Option<u8>, Option<u8>, bool) {\n\n match pair {\n\n Pair::Number(_) => (pair, None, None, false),\n\n Pair::Pair(a, b) => {\n\n if depth >= 4 {\n\n match (*a, *b) {\n\n (Pair::Number(a), Pair::Number(b)) => {\n\n return (Pair::Number(0), Some(a), Some(b), true)\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n let (sub_pair_left, left_ret, left_add, exploded) = explode(*a, depth + 1);\n\n if exploded {\n\n (\n\n Pair::npair(sub_pair_left, add_left(*b, left_add)),\n\n left_ret,\n\n None,\n\n true,\n\n )\n", "file_path": "src/day18.rs", "rank": 65, "score": 26548.880250791037 }, { "content": "fn min_cost_to(state: State, mem: &mut HashMap<State, Option<i64>>) -> Option<i64> {\n\n if state.rooms == [[A, A, A, A], [B, B, B, B], [C, C, C, C], [D, D, D, D]] {\n\n return Some(0);\n\n }\n\n\n\n if mem.contains_key(&state) {\n\n return mem[&state];\n\n }\n\n\n\n let states = state.possible_states();\n\n // if (states.is_empty()) {\n\n // state.show();\n\n // }\n\n let result = states\n\n .into_iter()\n\n .filter_map(|(state, cost)| min_cost_to(state, mem).map(|f| f + cost as i64))\n\n .min();\n\n mem.insert(state, result);\n\n result\n\n}\n\n\n", "file_path": "src/day23.rs", "rank": 66, "score": 25494.709059022534 }, { "content": "fn parse_operator(version: u32, packet_type: u32, bits: &mut VecDeque<bool>) -> Result<Packet> {\n\n let len_type = bits.drain(0..1).next().unwrap();\n\n let mut sub_packets = Vec::new();\n\n if !len_type {\n\n let len = value(bits.drain(0..15)) as usize;\n\n let mut sub_slice = bits.drain(0..len).collect::<VecDeque<_>>();\n\n while !sub_slice.is_empty() {\n\n sub_packets.push(parse(&mut sub_slice)?);\n\n }\n\n } else {\n\n let count = value(bits.drain(0..11)) as usize;\n\n for _ in 0..count {\n\n sub_packets.push(parse(bits)?);\n\n }\n\n }\n\n Ok(Packet::Operator(version, packet_type, sub_packets))\n\n}\n\n\n", "file_path": "src/day16.rs", "rank": 67, "score": 25014.852706658537 }, { "content": "use std::collections::HashMap;\n\n\n\nuse anyhow::{Context, Result};\n\nuse itertools::Itertools;\n\n\n\n#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy)]\n", "file_path": "src/day21.rs", "rank": 71, "score": 10.10931921036884 }, { "content": "use std::{\n\n collections::HashMap,\n\n fmt::{Debug, Display},\n\n};\n\n\n\nuse anyhow::Result;\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]\n", "file_path": "src/day23.rs", "rank": 72, "score": 9.90292193134174 }, { "content": "use itertools::Itertools;\n\nuse std::collections::VecDeque;\n\n\n\nuse anyhow::Result;\n\n\n\npub(crate) fn day09(path: &str) -> Result<()> {\n\n let map: Vec<Vec<usize>> = std::fs::read_to_string(path)?\n\n .lines()\n\n .map(|s| {\n\n s.chars()\n\n .map(|c| (c as usize - '0' as usize) as usize)\n\n .collect()\n\n })\n\n .collect();\n\n dbg!(&map[0]);\n\n\n\n let get = |map: Vec<Vec<usize>>, x, y| -> Option<usize> {\n\n Some(*map.get(y as usize)?.get(x as usize)?)\n\n };\n\n let adjacent = |map: Vec<Vec<usize>>, x, y| {\n", "file_path": "src/day09.rs", "rank": 73, "score": 9.72815433917998 }, { "content": "use std::{\n\n collections::{HashSet, VecDeque},\n\n ops::{Neg, Not, Sub},\n\n};\n\n\n\nuse anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n", "file_path": "src/day19.rs", "rank": 74, "score": 9.584882392796255 }, { "content": "use std::{\n\n collections::{HashMap, HashSet},\n\n vec,\n\n};\n\n\n\nuse anyhow::Result;\n\nuse itertools::Itertools;\n\nuse scan_fmt::scan_fmt;\n\n\n\n#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default)]\n", "file_path": "src/day22.rs", "rank": 75, "score": 9.144831269533237 }, { "content": "#[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)]\n\nstruct State {\n\n hallway: [i8; 11],\n\n rooms: [[i8; 4]; 4],\n\n}\n\n\n\nconst A: i8 = 0;\n\nconst B: i8 = 1;\n\nconst C: i8 = 2;\n\nconst D: i8 = 3;\n\n\n\nimpl State {\n\n fn show(&self) {\n\n let charmap = HashMap::from([(-1, '.'), (0, 'A'), (1, 'B'), (2, 'C'), (3, 'D')]);\n\n println!(\"#############\");\n\n print!(\"#\");\n\n for i in self.hallway {\n\n print!(\"{}\", charmap[&i])\n\n }\n\n println!(\"#\");\n\n\n", "file_path": "src/day23.rs", "rank": 76, "score": 8.673700909406557 }, { "content": "#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy)]\n\nstruct GameState {\n\n states: [PlayerState; 2],\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 77, "score": 8.673700909406557 }, { "content": "#[derive(Hash, PartialEq, Eq, Debug, Clone, Copy)]\n\nstruct PlayerState {\n\n position: u64,\n\n score: u64,\n\n rollcount: u64,\n\n}\n\n\n", "file_path": "src/day21.rs", "rank": 78, "score": 8.673700909406557 }, { "content": "#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\nstruct Vector(i32, i32, i32);\n\n\n\nimpl Sub for Vector {\n\n type Output = Vector;\n\n\n\n fn sub(self, rhs: Self) -> Self::Output {\n\n self + -rhs\n\n }\n\n}\n\nimpl Neg for Vector {\n\n type Output = Vector;\n\n\n\n fn neg(self) -> Self::Output {\n\n Vector(-self.0, -self.1, -self.2)\n\n }\n\n}\n\n\n\nimpl std::ops::Add for Vector {\n\n type Output = Vector;\n\n\n\n fn add(self, rhs: Self) -> Self::Output {\n\n Vector(self.0 + rhs.0, self.1 + rhs.1, self.2 + rhs.2)\n\n }\n\n}\n\n\n", "file_path": "src/day19.rs", "rank": 79, "score": 8.673700909406557 }, { "content": "use anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\npub(crate) fn day01(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let ints = file.split('\\n').filter_map(|s| str::parse::<u32>(s).ok());\n\n\n\n let ex1: usize = ints.clone().tuple_windows().filter(|(a, b)| b > a).count();\n\n println!(\"ex01: {}\", ex1);\n\n\n\n let ex2: usize = ints\n\n .tuple_windows()\n\n .map(|(a, b, c)| [a, b, c].iter().sum::<u32>())\n\n .tuple_windows()\n\n .filter(|(a, b)| b > a)\n\n .count();\n\n\n\n println!(\"ex02: {}\", ex2);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day01.rs", "rank": 80, "score": 8.449421899268906 }, { "content": "#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default)]\n\nstruct Volume {\n\n x: (i32, i32),\n\n y: (i32, i32),\n\n z: (i32, i32),\n\n}\n\n\n\nimpl Volume {\n\n fn get(self, axis: usize) -> Option<(i32, i32)> {\n\n Some(match axis {\n\n 0 => self.x,\n\n 1 => self.y,\n\n 2 => self.z,\n\n _ => return None,\n\n })\n\n }\n\n\n\n fn cut_axis(self, axis: usize, at: i32) -> (Volume, Volume) {\n\n let mut before = [self.x, self.y, self.z];\n\n let mut after = [self.x, self.y, self.z];\n\n\n", "file_path": "src/day22.rs", "rank": 81, "score": 8.189080911683336 }, { "content": "use anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\npub(crate) fn day10(path: &str) -> Result<()> {\n\n let lines = std::fs::read_to_string(path)?\n\n .lines()\n\n .map(String::from)\n\n .collect_vec();\n\n\n\n let pairs = [\n\n ('(', ')', 3, 1),\n\n ('[', ']', 57, 2),\n\n ('{', '}', 1197, 3),\n\n ('<', '>', 25137, 4),\n\n ];\n\n\n\n let illegals = lines\n\n .iter()\n\n .map(|l| {\n\n l.chars()\n", "file_path": "src/day10.rs", "rank": 82, "score": 7.969496963278316 }, { "content": "use anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\npub(crate) type Grid = Vec<Vec<Option<i32>>>;\n\n\n\npub(crate) fn day04(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n\n\n let numbers = file\n\n .lines()\n\n .next()\n\n .unwrap()\n\n .split(',')\n\n .map(|s| s.parse::<i32>().unwrap())\n\n .collect_vec();\n\n\n\n let grids = file\n\n .lines()\n\n .skip(1)\n\n .chunks(6)\n", "file_path": "src/day04.rs", "rank": 83, "score": 7.485535833496073 }, { "content": "use anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\npub(crate) fn day02(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let instructions = file\n\n .lines()\n\n .map(|l| l.split(' ').collect_vec())\n\n .map(|vec| (vec[0], vec[1].parse::<i64>().unwrap()));\n\n\n\n let (x, y, aim) = instructions.fold((0, 0, 0), |(x, y, aim), inst| match inst {\n\n (\"forward\", n) => (x + n, y + n * aim, aim),\n\n (\"down\", n) => (x, y, aim + n),\n\n (\"up\", n) => (x, y, aim - n),\n\n inst => panic!(\"invalid line {:?}\", inst),\n\n });\n\n\n\n println!(\"ex1 position:{:?} ex1 result:{:?}\", (x, aim), x * aim); // aim in ex2 is just depth in ex1\n\n println!(\"ex2 position:{:?} ex2 result:{:?}\", (x, y), x * y);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/day02.rs", "rank": 84, "score": 6.625658282921354 }, { "content": "use std::clone::Clone;\n\n\n\nuse anyhow::Result;\n\n\n\nuse itertools::Itertools;\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Default, Clone, Debug)]\n", "file_path": "src/day12.rs", "rank": 85, "score": 6.450784576263356 }, { "content": "use std::collections::VecDeque;\n\n\n\nuse anyhow::Result;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n", "file_path": "src/day16.rs", "rank": 86, "score": 6.121334898981794 }, { "content": "use std::collections::HashMap;\n\n\n\nuse anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\npub(crate) type Coords = (i32, i32);\n\n\n\npub(crate) fn day05(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let coords: Vec<(Coords, Coords)> = file\n\n .lines()\n\n .map(|line| {\n\n line.replace(\"->\", \"\")\n\n .replace(\",\", \" \")\n\n .split_ascii_whitespace()\n\n .map(|n| n.parse().unwrap())\n\n .collect_vec()\n\n })\n\n .map(|v| ((v[0], v[1]), (v[2], v[3])))\n\n .collect();\n", "file_path": "src/day05.rs", "rank": 87, "score": 5.944210399384005 }, { "content": "use anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\npub(crate) fn day03(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let bits = file\n\n .lines()\n\n .map(|s| s.chars().map(|c| c.to_digit(2).unwrap() != 0).collect_vec())\n\n .collect_vec();\n\n\n\n fn common_bits(vec: &[Vec<bool>]) -> Vec<bool> {\n\n let len = vec.len();\n\n vec.iter()\n\n .fold(vec![0; vec[0].len()], |acc, v| {\n\n acc.iter()\n\n .zip(v.iter())\n\n .map(|(a, b)| a + if *b { 1 } else { 0 })\n\n .collect_vec()\n\n })\n\n .iter()\n", "file_path": "src/day03.rs", "rank": 88, "score": 5.831697738374286 }, { "content": "use std::{collections::HashMap, ops::AddAssign};\n\n\n\nuse anyhow::Result;\n\nuse itertools::Itertools;\n\n\n", "file_path": "src/day14.rs", "rank": 89, "score": 5.708748265121206 }, { "content": "use anyhow::{anyhow, Context, Result};\n\nuse itertools::Itertools;\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "src/day18.rs", "rank": 90, "score": 5.5152122303296505 }, { "content": "use itertools::Itertools;\n\n\n\nmod day01;\n\nmod day02;\n\nmod day03;\n\nmod day04;\n\nmod day05;\n\nmod day06;\n\nmod day07;\n\nmod day08;\n\nmod day09;\n\nmod day10;\n\nmod day11;\n\nmod day12;\n\nmod day13;\n\nmod day14;\n\nmod day15;\n\nmod day16;\n\nmod day17;\n\nmod day18;\n\nmod day19;\n\nmod day20;\n\nmod day21;\n\nmod day22;\n\nmod day23;\n\nmod day24;\n\nmod day25;\n\n\n", "file_path": "src/main.rs", "rank": 91, "score": 5.358152447277998 }, { "content": "\n\n map[current]\n\n .links\n\n .iter()\n\n .flat_map(|id| visit_node(*id, target, map, visited.clone(), small_cave || is_visited))\n\n .collect()\n\n}\n\n\n\npub(crate) fn day12(path: &str) -> Result<()> {\n\n let links: Vec<(_, _)> = std::fs::read_to_string(path)?\n\n .lines()\n\n .map(|l| l.split('-').next_tuple::<(_, _)>().unwrap())\n\n .map(|(a, b)| (String::from(a), String::from(b)))\n\n .collect();\n\n let inverted_links = links.iter().map(|(a, b)| (b.clone(), a.clone()));\n\n\n\n let link_map: HashMap<_, _> = links\n\n .iter()\n\n .map(Clone::clone)\n\n .chain(inverted_links)\n", "file_path": "src/day12.rs", "rank": 92, "score": 5.240189374923661 }, { "content": " let mut not_visited = (1..scanner_list.len()).collect::<HashSet<_>>();\n\n let mut queue = VecDeque::from([scanner_list[0].clone()]);\n\n let mut beacons = vec![scanner_list[0].clone()];\n\n let mut scanners = vec![Vector(0, 0, 0)];\n\n while queue.is_empty().not() {\n\n let next = queue.pop_front().unwrap();\n\n dbg!(\"pop\");\n\n\n\n for &i in not_visited.clone().iter() {\n\n dbg!((\"check\", i));\n\n let (pos, matched) = match_coord(&next, &scanner_list[i]);\n\n dbg!(matched.len());\n\n if !matched.is_empty() {\n\n dbg!((\"push\", i));\n\n beacons.push(matched.clone());\n\n queue.push_back(matched);\n\n not_visited.remove(&i);\n\n scanners.push(pos)\n\n }\n\n }\n", "file_path": "src/day19.rs", "rank": 93, "score": 5.128647655318166 }, { "content": " [(0, 1), (1, 0), (0, -1), (-1, 0)]\n\n .iter()\n\n .filter_map(|(yoffs, xoffs)| {\n\n (\n\n (x + xoffs) as usize,\n\n (y + yoffs) as usize,\n\n get(map.clone(), x + xoffs, y + yoffs)?,\n\n )\n\n .into()\n\n })\n\n .collect_vec()\n\n };\n\n\n\n let low_points: Vec<(usize, usize, usize)> = map\n\n .iter()\n\n .enumerate()\n\n .flat_map(|(y, line)| {\n\n line.iter()\n\n .enumerate()\n\n .map(|(x, height)| (x, y, *height))\n", "file_path": "src/day09.rs", "rank": 95, "score": 4.898931447795709 }, { "content": "use anyhow::Result;\n\nuse itertools::Itertools;\n\n\n\nuse std::collections::HashMap;\n\n\n\npub(crate) fn _show(map: &HashMap<(i32, i32), i32>) {\n\n println!();\n\n for x in 0..10 {\n\n for y in 0..10 {\n\n print!(\"{:2} \", map[&(x, y)])\n\n }\n\n println!();\n\n }\n\n}\n\n\n\npub(crate) fn inc_octopus(map: &mut HashMap<(i32, i32), i32>, x: i32, y: i32) {\n\n let target_octopus = match map.get_mut(&(x, y)) {\n\n Some(n) => n,\n\n None => return,\n\n };\n", "file_path": "src/day11.rs", "rank": 96, "score": 4.738354857106661 }, { "content": "\n\n *target_octopus += 1;\n\n if *target_octopus != 10 {\n\n return;\n\n }\n\n (x - 1..=x + 1)\n\n .cartesian_product(y - 1..=y + 1)\n\n .filter(|p| *p != (x, y))\n\n .for_each(|(x, y)| inc_octopus(map, x, y));\n\n}\n\n\n\npub(crate) fn day11(path: &str) -> Result<()> {\n\n let mut map: HashMap<(i32, i32), i32> = std::fs::read_to_string(path)?\n\n .lines()\n\n .enumerate()\n\n .flat_map(|(y, line)| {\n\n line.chars()\n\n .enumerate()\n\n .map(move |(x, c)| ((x as _, y as _), c as i32 - '0' as i32))\n\n })\n", "file_path": "src/day11.rs", "rank": 97, "score": 4.698564166160848 }, { "content": " let oxygen = bits_to_int(filtered_bits[0].clone());\n\n\n\n let mut filtered_bits = bits;\n\n let mut filter = vec![false; 0];\n\n while filtered_bits.len() > 1 {\n\n filter.push(!common_bits(&filtered_bits)[filter.len()]);\n\n filtered_bits.retain(|v| v.starts_with(filter.as_slice()))\n\n }\n\n let co2 = bits_to_int(filtered_bits[0].clone());\n\n\n\n dbg!((oxygen, co2, oxygen * co2));\n\n Ok(())\n\n}\n\n\n\npub(crate) fn day03_bin(path: &str) -> Result<()> {\n\n let file = std::fs::read_to_string(path)?;\n\n let lines = file.lines();\n\n\n\n let bit_count = lines.clone().next().unwrap().len();\n\n let nums = lines\n", "file_path": "src/day03.rs", "rank": 98, "score": 4.425918864822945 }, { "content": " (0, y)\n\n };\n\n\n\n if map.get(&ncell).copied() == Some(b'.') {\n\n hmap.insert((x, y), b'.');\n\n hmap.insert(ncell, b'>');\n\n }\n\n }\n\n\n\n let mut vmap = hmap.clone();\n\n for (&(x, y), _) in hmap.iter().filter(|&p| *p.1 == b'v') {\n\n let ncell = if map.get(&(x, y + 1)).is_some() {\n\n (x, y + 1)\n\n } else {\n\n (x, 0)\n\n };\n\n if hmap.get(&ncell).copied() == Some(b'.') {\n\n vmap.insert((x, y), b'.');\n\n vmap.insert(ncell, b'v');\n\n }\n", "file_path": "src/day25.rs", "rank": 99, "score": 4.370717345514041 } ]
Rust
src/ir/mod.rs
playXE/rcc
7f95c3d5b14f6912f04e9ce2baeb2bc3f29990ed
mod expr; mod static_init; mod stmt; use std::collections::{HashMap, VecDeque}; use std::convert::TryFrom; use crate::data::{prelude::*, types::FunctionType, Initializer, Scope, StorageClass}; use crate::utils; use cranelift::codegen::{ self, ir::{ entities::StackSlot, function::Function, stackslot::{StackSlotData, StackSlotKind}, ExternalName, InstBuilder, MemFlags, }, settings, }; use cranelift::frontend::Switch; use cranelift::prelude::{Block, FunctionBuilder, FunctionBuilderContext, Signature}; use cranelift_module::{self, Backend, DataId, FuncId, Linkage, Module}; enum Id { Function(FuncId), Global(DataId), Local(StackSlot), } struct Compiler<T: Backend> { module: Module<T>, scope: Scope<InternedStr, Id>, debug: bool, last_saw_loop: bool, strings: HashMap<Vec<u8>, DataId>, loops: Vec<(Block, Block)>, switches: Vec<(Switch, Option<Block>, Block)>, labels: HashMap<InternedStr, Block>, error_handler: ErrorHandler, } pub(crate) fn compile<B: Backend>( module: Module<B>, program: Vec<Locatable<Declaration>>, debug: bool, ) -> (Result<Module<B>, CompileError>, VecDeque<CompileWarning>) { let mut err = None; let mut compiler = Compiler::<B>::new(module, debug); for decl in program { let current = match (decl.data.symbol.ctype.clone(), decl.data.init) { (Type::Function(func_type), None) => compiler .declare_func( decl.data.symbol.id, &func_type.signature(compiler.module.isa()), decl.data.symbol.storage_class, false, ) .map(|_| ()), (Type::Void, _) => unreachable!("parser let an incomplete type through"), (Type::Function(func_type), Some(Initializer::FunctionBody(stmts))) => compiler .compile_func( decl.data.symbol.id, func_type, decl.data.symbol.storage_class, stmts, decl.location, ), (_, Some(Initializer::FunctionBody(_))) => { unreachable!("only functions should have a function body") } (_, init) => compiler.store_static(decl.data.symbol, init, decl.location), }; if let Err(e) = current { err = Some(e); break; } } let warns = compiler.error_handler.warnings; if let Some(err) = err { (Err(err), warns) } else { (Ok(compiler.module), warns) } } impl<B: Backend> Compiler<B> { fn new(module: Module<B>, debug: bool) -> Compiler<B> { Compiler { module, scope: Scope::new(), loops: Vec::new(), switches: Vec::new(), labels: HashMap::new(), last_saw_loop: true, strings: Default::default(), error_handler: Default::default(), debug, } } fn declare_func( &mut self, id: InternedStr, signature: &Signature, sc: StorageClass, is_definition: bool, ) -> CompileResult<FuncId> { use crate::get_str; if !is_definition { if let Some(Id::Function(func_id)) = self.scope.get(&id) { return Ok(*func_id); } } let linkage = match sc { StorageClass::Auto | StorageClass::Extern if is_definition => Linkage::Export, StorageClass::Auto | StorageClass::Extern => Linkage::Import, StorageClass::Static => Linkage::Local, StorageClass::Register | StorageClass::Typedef => unreachable!(), }; let func_id = self .module .declare_function(get_str!(id), linkage, &signature) .unwrap_or_else(|err| utils::fatal(err, 6)); self.scope.insert(id, Id::Function(func_id)); Ok(func_id) } fn declare_stack( &mut self, decl: Declaration, location: Location, builder: &mut FunctionBuilder, ) -> CompileResult<()> { if let Type::Function(ftype) = decl.symbol.ctype { self.declare_func( decl.symbol.id, &ftype.signature(self.module.isa()), decl.symbol.storage_class, false, )?; return Ok(()); } let u64_size = match decl.symbol.ctype.sizeof() { Ok(size) => size, Err(err) => { return Err(CompileError::semantic(Locatable { data: err.into(), location, })) } }; let kind = StackSlotKind::ExplicitSlot; let size = match u32::try_from(u64_size) { Ok(size) => size, Err(_) => return Err(CompileError::semantic(Locatable { data: "cannot store items on the stack that are more than 4 GB, it will overflow the stack".into(), location, })) }; let data = StackSlotData { kind, size, offset: None, }; let stack_slot = builder.create_stack_slot(data); self.scope.insert(decl.symbol.id, Id::Local(stack_slot)); if let Some(init) = decl.init { self.store_stack(init, stack_slot, builder)?; } Ok(()) } fn store_stack( &mut self, init: Initializer, stack_slot: StackSlot, builder: &mut FunctionBuilder, ) -> CompileResult<()> { match init { Initializer::Scalar(expr) => { let val = self.compile_expr(*expr, builder)?; let addr = builder.ins().stack_addr(Type::ptr_type(), stack_slot, 0); builder.ins().store(MemFlags::new(), val.ir_val, addr, 0); } Initializer::InitializerList(_) => unimplemented!("aggregate dynamic initialization"), Initializer::FunctionBody(_) => unreachable!("functions can't be stored on the stack"), } Ok(()) } fn store_stack_params( &mut self, params: Vec<Symbol>, func_start: Block, location: &Location, builder: &mut FunctionBuilder, ) -> CompileResult<()> { let ir_vals: Vec<_> = params .iter() .map(|param| { let ir_type = param.ctype.as_ir_type(); Ok(builder.append_block_param(func_start, ir_type)) }) .collect::<CompileResult<_>>()?; for (param, ir_val) in params.into_iter().zip(ir_vals) { let u64_size = match param.ctype.sizeof() { Err(data) => semantic_err!(data.into(), *location), Ok(size) => size, }; let u32_size = match u32::try_from(u64_size) { Err(_) => semantic_err!( format!( "size {} is too large for stack (can only handle 32-bit values)", u64_size ), *location ), Ok(size) => size, }; let stack_data = StackSlotData { kind: StackSlotKind::ExplicitSlot, size: u32_size, offset: None, }; let slot = builder.create_stack_slot(stack_data); let addr = builder.ins().stack_addr(Type::ptr_type(), slot, 0); builder.ins().store(MemFlags::new(), ir_val, addr, 0); self.scope.insert(param.id, Id::Local(slot)); } Ok(()) } fn compile_func( &mut self, id: InternedStr, func_type: FunctionType, sc: StorageClass, stmts: Vec<Stmt>, location: Location, ) -> CompileResult<()> { let signature = func_type.signature(self.module.isa()); let func_id = self.declare_func(id.clone(), &signature, sc, true)?; self.scope.enter(); let mut func = Function::with_name_signature(ExternalName::user(0, 0), signature); let mut ctx = FunctionBuilderContext::new(); let mut builder = FunctionBuilder::new(&mut func, &mut ctx); let func_start = builder.create_block(); builder.switch_to_block(func_start); let should_ret = func_type.should_return(); if func_type.has_params() { self.store_stack_params(func_type.params, func_start, &location, &mut builder)?; } self.compile_all(stmts, &mut builder)?; if !builder.is_filled() { if id == InternedStr::get_or_intern("main") { let ir_int = func_type.return_type.as_ir_type(); let zero = [builder.ins().iconst(ir_int, 0)]; builder.ins().return_(&zero); } else if should_ret { semantic_err!( format!( "expected a return statement before end of function '{}' returning {}", id, func_type.return_type ), location ); } else { builder.ins().return_(&[]); } } self.scope.exit(); builder.seal_all_blocks(); builder.finalize(); let flags = settings::Flags::new(settings::builder()); if self.debug { println!("{}", func); } if let Err(err) = codegen::verify_function(&func, &flags) { println!("{}", func); utils::fatal(err, 3); } let mut ctx = codegen::Context::for_function(func); if let Err(err) = self.module.define_function(func_id, &mut ctx) { println!("{}", ctx.func); utils::fatal(err, 4); } Ok(()) } } impl FunctionType { fn has_params(&self) -> bool { !(self.params.len() == 1 && self.params[0].ctype == Type::Void) } }
mod expr; mod static_init; mod stmt; use std::collections::{HashMap, VecDeque}; use std::convert::TryFrom; use crate::data::{prelude::*, types::FunctionType, Initializer, Scope, StorageClass}; use crate::utils; use cranelift::codegen::{ self, ir::{ entities::StackSlot, function::Function, stackslot::{StackSlotData, StackSlotKind}, ExternalName, InstBuilder, MemFlags, }, settings, }; use cranelift::frontend::Switch; use cranelift::prelude::{Block, FunctionBuilder, FunctionBuilderContext, Signature}; use cranelift_module::{self, Backend, DataId, FuncId, Linkage, Module}; enum Id { Function(FuncId), Global(DataId), Local(StackSlot), } struct Compiler<T: Backend> { module: Module<T>, scope: Scope<InternedStr, Id>, debug: bool, last_saw_loop: bool, strings: HashMap<Vec<u8>, DataId>, loops: Vec<(Block, Block)>, switches: Vec<(Switch, Option<Block>, Block)>, labels: HashMap<InternedStr, Block>, error_handler: ErrorHandler, } pub(crate) fn compile<B: Backend>( module: Module<B>, program: Vec<Locatable<Declaration>>, debug: bool, ) -> (Result<Module<B>, CompileError>, VecDeque<CompileWarning>) { let mut err = None; let mut compiler = Compiler::<B>::new(module, debug); for decl in program { let current = match (decl.data.symbol.ctype.clone(), decl.data.init) { (Type::Function(func_type), None) => compiler .declare_func( decl.data.symbol.id, &func_type.signature(compiler.module.isa()), decl.data.symbol.storage_class, false, ) .map(|_| ()), (Type::Void, _) => unreachable!("parser let an incomplete type through"), (Type::Function(func_type), Some(Initializer::FunctionBody(stmts))) => compiler .compile_func( decl.data.symbol.id, func_type, decl.data.symbol.storage_class, stmts, decl.location, ), (_, Some(Initializer::FunctionBody(_))) => { unreachable!("only functions should have a function body") } (_, init) => compiler.store_static(decl.data.symbol, init, decl.location), }; if let Err(e) = current { err = Some(e); break; } } let warns = compiler.error_handler.warnings; if let Some(err) = err { (Err(err), warns) } else { (Ok(compiler.module), warns) } } impl<B: Backend> Compiler<B> { fn new(module: Module<B>, debug: bool) -> Compiler<B> { Compiler { module, scope: Scope::new(), loops: Vec::new(), switches: Vec::new(), labels: HashMap::new(), last_saw_loop: true, strings: Default::default(), error_handler: Default::default(), debug, } }
fn declare_stack( &mut self, decl: Declaration, location: Location, builder: &mut FunctionBuilder, ) -> CompileResult<()> { if let Type::Function(ftype) = decl.symbol.ctype { self.declare_func( decl.symbol.id, &ftype.signature(self.module.isa()), decl.symbol.storage_class, false, )?; return Ok(()); } let u64_size = match decl.symbol.ctype.sizeof() { Ok(size) => size, Err(err) => { return Err(CompileError::semantic(Locatable { data: err.into(), location, })) } }; let kind = StackSlotKind::ExplicitSlot; let size = match u32::try_from(u64_size) { Ok(size) => size, Err(_) => return Err(CompileError::semantic(Locatable { data: "cannot store items on the stack that are more than 4 GB, it will overflow the stack".into(), location, })) }; let data = StackSlotData { kind, size, offset: None, }; let stack_slot = builder.create_stack_slot(data); self.scope.insert(decl.symbol.id, Id::Local(stack_slot)); if let Some(init) = decl.init { self.store_stack(init, stack_slot, builder)?; } Ok(()) } fn store_stack( &mut self, init: Initializer, stack_slot: StackSlot, builder: &mut FunctionBuilder, ) -> CompileResult<()> { match init { Initializer::Scalar(expr) => { let val = self.compile_expr(*expr, builder)?; let addr = builder.ins().stack_addr(Type::ptr_type(), stack_slot, 0); builder.ins().store(MemFlags::new(), val.ir_val, addr, 0); } Initializer::InitializerList(_) => unimplemented!("aggregate dynamic initialization"), Initializer::FunctionBody(_) => unreachable!("functions can't be stored on the stack"), } Ok(()) } fn store_stack_params( &mut self, params: Vec<Symbol>, func_start: Block, location: &Location, builder: &mut FunctionBuilder, ) -> CompileResult<()> { let ir_vals: Vec<_> = params .iter() .map(|param| { let ir_type = param.ctype.as_ir_type(); Ok(builder.append_block_param(func_start, ir_type)) }) .collect::<CompileResult<_>>()?; for (param, ir_val) in params.into_iter().zip(ir_vals) { let u64_size = match param.ctype.sizeof() { Err(data) => semantic_err!(data.into(), *location), Ok(size) => size, }; let u32_size = match u32::try_from(u64_size) { Err(_) => semantic_err!( format!( "size {} is too large for stack (can only handle 32-bit values)", u64_size ), *location ), Ok(size) => size, }; let stack_data = StackSlotData { kind: StackSlotKind::ExplicitSlot, size: u32_size, offset: None, }; let slot = builder.create_stack_slot(stack_data); let addr = builder.ins().stack_addr(Type::ptr_type(), slot, 0); builder.ins().store(MemFlags::new(), ir_val, addr, 0); self.scope.insert(param.id, Id::Local(slot)); } Ok(()) } fn compile_func( &mut self, id: InternedStr, func_type: FunctionType, sc: StorageClass, stmts: Vec<Stmt>, location: Location, ) -> CompileResult<()> { let signature = func_type.signature(self.module.isa()); let func_id = self.declare_func(id.clone(), &signature, sc, true)?; self.scope.enter(); let mut func = Function::with_name_signature(ExternalName::user(0, 0), signature); let mut ctx = FunctionBuilderContext::new(); let mut builder = FunctionBuilder::new(&mut func, &mut ctx); let func_start = builder.create_block(); builder.switch_to_block(func_start); let should_ret = func_type.should_return(); if func_type.has_params() { self.store_stack_params(func_type.params, func_start, &location, &mut builder)?; } self.compile_all(stmts, &mut builder)?; if !builder.is_filled() { if id == InternedStr::get_or_intern("main") { let ir_int = func_type.return_type.as_ir_type(); let zero = [builder.ins().iconst(ir_int, 0)]; builder.ins().return_(&zero); } else if should_ret { semantic_err!( format!( "expected a return statement before end of function '{}' returning {}", id, func_type.return_type ), location ); } else { builder.ins().return_(&[]); } } self.scope.exit(); builder.seal_all_blocks(); builder.finalize(); let flags = settings::Flags::new(settings::builder()); if self.debug { println!("{}", func); } if let Err(err) = codegen::verify_function(&func, &flags) { println!("{}", func); utils::fatal(err, 3); } let mut ctx = codegen::Context::for_function(func); if let Err(err) = self.module.define_function(func_id, &mut ctx) { println!("{}", ctx.func); utils::fatal(err, 4); } Ok(()) } } impl FunctionType { fn has_params(&self) -> bool { !(self.params.len() == 1 && self.params[0].ctype == Type::Void) } }
fn declare_func( &mut self, id: InternedStr, signature: &Signature, sc: StorageClass, is_definition: bool, ) -> CompileResult<FuncId> { use crate::get_str; if !is_definition { if let Some(Id::Function(func_id)) = self.scope.get(&id) { return Ok(*func_id); } } let linkage = match sc { StorageClass::Auto | StorageClass::Extern if is_definition => Linkage::Export, StorageClass::Auto | StorageClass::Extern => Linkage::Import, StorageClass::Static => Linkage::Local, StorageClass::Register | StorageClass::Typedef => unreachable!(), }; let func_id = self .module .declare_function(get_str!(id), linkage, &signature) .unwrap_or_else(|err| utils::fatal(err, 6)); self.scope.insert(id, Id::Function(func_id)); Ok(func_id) }
function_block-full_function
[]
Rust
src/lib.rs
Ujang360/py-udp-loop
73bd587a64d262bc98a4d0784eb77d95a1141282
use crossbeam_queue::ArrayQueue; use pyo3::prelude::*; use pyo3::types::PyByteArray; use std::net::{SocketAddr, UdpSocket}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use std::thread::{sleep, spawn as spawn_thread, JoinHandle}; use std::time::Duration; pub const LOOP_GRACE_DURATION_MS: u64 = 1; pub const MAX_PENDING_TX: usize = 128; pub const MAX_PENDING_RX: usize = 128; pub const MAX_PACKET_SIZE: usize = 1460; #[pyclass(freelist = 1024)] #[derive(Clone, Debug)] pub struct UdpPacket { pub peer: SocketAddr, pub data: Vec<u8>, } #[pymethods] impl UdpPacket { #[new] pub fn new(peer_ip_address: &str, peer_port: u16) -> Self { Self { peer: SocketAddr::new(peer_ip_address.parse().unwrap(), peer_port), data: Vec::new(), } } #[getter] pub fn get_data<'a>(&self, py: Python<'a>) -> PyResult<&'a PyByteArray> { Ok(PyByteArray::new(py, &self.data[..])) } #[setter] pub fn set_data(&mut self, raw_bytes: &PyByteArray) -> PyResult<()> { self.data = raw_bytes.to_vec(); Ok(()) } } #[pyclass] pub struct UdpLoop { stop_flag: Arc<AtomicBool>, pending_tx: Arc<ArrayQueue<UdpPacket>>, pending_rx: Arc<ArrayQueue<UdpPacket>>, loop_handle: Mutex<Option<JoinHandle<()>>>, } impl Default for UdpLoop { fn default() -> Self { Self { stop_flag: Arc::new(AtomicBool::new(false)), pending_tx: Arc::new(ArrayQueue::new(MAX_PENDING_TX)), pending_rx: Arc::new(ArrayQueue::new(MAX_PENDING_RX)), loop_handle: Mutex::new(None), } } } #[pymethods] impl UdpLoop { #[new] pub fn new() -> Self { Default::default() } pub fn try_receive(&self) -> PyResult<Option<UdpPacket>> { match self.pending_rx.pop() { Err(_) => Ok(None), Ok(packet) => Ok(Some(packet)), } } pub fn transmit(&self, packet: UdpPacket) -> PyResult<bool> { if let Err(_) = self.pending_tx.push(packet) { return Ok(false); } Ok(true) } pub fn start(&mut self, listen_address: &str, listen_port: u16) -> PyResult<bool> { let stop_flag = self.stop_flag.clone(); let pending_tx = self.pending_tx.clone(); let pending_rx = self.pending_rx.clone(); let listen_address = listen_address.to_string(); let mut loop_handle = self.loop_handle.lock().unwrap(); if loop_handle.is_some() { return Ok(false); } (*loop_handle) = Some(spawn_thread(move || { let loop_grace_duration = Duration::from_millis(LOOP_GRACE_DURATION_MS); let mut buffer_rx = [0u8; MAX_PACKET_SIZE]; let binding_socket = SocketAddr::new(listen_address.parse().unwrap(), listen_port); let udp_socket = UdpSocket::bind(binding_socket).unwrap(); udp_socket.set_nonblocking(true).unwrap(); let pending_tx = pending_tx; let pending_rx = pending_rx; while !stop_flag.load(Ordering::Relaxed) { if let Ok(new_tx_packet) = pending_tx.pop() { let _ = udp_socket.send_to(&new_tx_packet.data, new_tx_packet.peer); } match udp_socket.recv_from(&mut buffer_rx) { Err(_) => { if pending_tx.is_empty() { sleep(loop_grace_duration); } } Ok((rx_length, peer)) => { if rx_length > buffer_rx.len() { eprintln!("Received packet beyond maximum size of {}", MAX_PACKET_SIZE); } else { let new_rx_packet = UdpPacket { peer, data: (&buffer_rx[0..rx_length]).to_vec(), }; let _ = pending_rx.push(new_rx_packet); } } } } })); Ok(true) } pub fn stop(&mut self) -> PyResult<bool> { match self.loop_handle.lock().unwrap().take() { None => Ok(false), Some(loop_handle) => { self.stop_flag.store(true, Ordering::Relaxed); let _ = loop_handle.join(); Ok(true) } } } } #[pymodule] fn py_udp_loop(_: Python, m: &PyModule) -> PyResult<()> { m.add_class::<UdpPacket>()?; m.add_class::<UdpLoop>()?; Ok(()) }
use crossbeam_queue::ArrayQueue; use pyo3::prelude::*; use pyo3::types::PyByteArray; use std::net::{SocketAddr, UdpSocket}; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::{Arc, Mutex}; use std::thread::{sleep, spawn as spawn_thread, JoinHandle}; use std::time::Duration; pub const LOOP_GRACE_DURATION_MS: u64 = 1; pub const MAX_PENDING_TX: usize = 128; pub const MAX_PENDING_RX: usize = 128; pub const MAX_PACKET_SIZE: usize = 1460; #[pyclass(freelist = 1024)] #[derive(Clone, Debug)] pub struct UdpPacket { pub peer: SocketAddr, pub data: Vec<u8>, } #[pymethods] impl UdpPacket { #[new] pub fn new(peer_ip_address: &str, peer_port: u16) -> Self { Self { peer: SocketAddr::new(peer_ip_address.parse().unwrap(), peer_port), data: Vec::new(), } } #[getter] pub fn get_data<'a>(&self, py: Python<'a>) -> PyResult<&'a PyByteArray> { Ok(PyByteArray::new(py, &self.data[..])) } #[setter] pub fn set_data(&mut self, raw_bytes: &PyByteArray) -> PyResult<()> { self.data = raw_bytes.to_vec(); Ok(()) } } #[pyclass] pub struct UdpLoop { stop_flag: Arc<AtomicBool>, pending_tx: Arc<ArrayQueue<UdpPacket>>, pending_rx: Arc<ArrayQueue<UdpPacket>>, loop_handle: Mutex<Option<JoinHandle<()>>>, } impl Default for UdpLoop { fn default() -> Self { Self { stop_flag: Arc::new(AtomicBool::new(false)), pending_tx: Arc::new(ArrayQueue::new(MAX_PENDING_TX)), pending_rx: Arc::new(ArrayQueue::new(MAX_PENDING_RX)), loop_handle: Mutex::new(None), } } } #[pymethods] impl UdpLoop { #[new] pub fn new() -> Self { Default::default() } pub fn try_receive(&self) -> PyResult<Option<UdpPacket>> { match self.pending_rx.pop() { Err(_) => Ok(None), Ok(packet) => Ok(Some(packet)), } } pub fn transmit(&self, packet: UdpPacket) -> PyResult<bool> { if let Err(_) = self.pending_tx.push(packet) { return Ok(false); } Ok(true) } pub fn start(&mut self, listen_address: &str, listen_port: u16) -> PyResult<bool> { let stop_flag = self.stop_flag.clone(); let pending_tx = self.pending_tx.clone(); let pending_rx = self.pending_rx.clone(); let listen_address = listen_address.to_string(); let mut loop_handle = self.loop_handle.lock().unwrap(); if loop_handle.is_some() { return Ok(false); } (*loop_handle) =
; Ok(true) } pub fn stop(&mut self) -> PyResult<bool> { match self.loop_handle.lock().unwrap().take() { None => Ok(false), Some(loop_handle) => { self.stop_flag.store(true, Ordering::Relaxed); let _ = loop_handle.join(); Ok(true) } } } } #[pymodule] fn py_udp_loop(_: Python, m: &PyModule) -> PyResult<()> { m.add_class::<UdpPacket>()?; m.add_class::<UdpLoop>()?; Ok(()) }
Some(spawn_thread(move || { let loop_grace_duration = Duration::from_millis(LOOP_GRACE_DURATION_MS); let mut buffer_rx = [0u8; MAX_PACKET_SIZE]; let binding_socket = SocketAddr::new(listen_address.parse().unwrap(), listen_port); let udp_socket = UdpSocket::bind(binding_socket).unwrap(); udp_socket.set_nonblocking(true).unwrap(); let pending_tx = pending_tx; let pending_rx = pending_rx; while !stop_flag.load(Ordering::Relaxed) { if let Ok(new_tx_packet) = pending_tx.pop() { let _ = udp_socket.send_to(&new_tx_packet.data, new_tx_packet.peer); } match udp_socket.recv_from(&mut buffer_rx) { Err(_) => { if pending_tx.is_empty() { sleep(loop_grace_duration); } } Ok((rx_length, peer)) => { if rx_length > buffer_rx.len() { eprintln!("Received packet beyond maximum size of {}", MAX_PACKET_SIZE); } else { let new_rx_packet = UdpPacket { peer, data: (&buffer_rx[0..rx_length]).to_vec(), }; let _ = pending_rx.push(new_rx_packet); } } } } }))
call_expression
[]
Rust
machine_manager/src/config/machine_config.rs
ican2002/stratorvirt
b8e9b675ae0fac9e359a2096cc818f92c4f11c30
extern crate serde; extern crate serde_json; use std::str::FromStr; use serde::{Deserialize, Serialize}; use super::errors::{ErrorKind, Result, ResultExt}; use crate::config::{CmdParser, ConfigCheck, ExBool, VmConfig}; const DEFAULT_CPUS: u8 = 1; const DEFAULT_MEMSIZE: u64 = 256; const MAX_NR_CPUS: u64 = 254; const MIN_NR_CPUS: u64 = 1; const MAX_MEMSIZE: u64 = 549_755_813_888; const MIN_MEMSIZE: u64 = 268_435_456; const M: u64 = 1024 * 1024; const G: u64 = 1024 * 1024 * 1024; #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] pub enum MachineType { None, MicroVm, StandardVm, } impl FromStr for MachineType { type Err = (); fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { match s.to_lowercase().as_str() { "none" => Ok(MachineType::None), "microvm" => Ok(MachineType::MicroVm), #[cfg(target_arch = "x86_64")] "q35" => Ok(MachineType::StandardVm), #[cfg(target_arch = "aarch64")] "virt" => Ok(MachineType::StandardVm), _ => Err(()), } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct MachineMemConfig { pub mem_size: u64, pub mem_path: Option<String>, pub dump_guest_core: bool, pub mem_share: bool, } impl Default for MachineMemConfig { fn default() -> Self { MachineMemConfig { mem_size: DEFAULT_MEMSIZE * M, mem_path: None, dump_guest_core: true, mem_share: false, } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct MachineConfig { pub mach_type: MachineType, pub nr_cpus: u8, pub mem_config: MachineMemConfig, } impl Default for MachineConfig { fn default() -> Self { MachineConfig { mach_type: MachineType::MicroVm, nr_cpus: DEFAULT_CPUS, mem_config: MachineMemConfig::default(), } } } impl ConfigCheck for MachineConfig { fn check(&self) -> Result<()> { if self.mem_config.mem_size < MIN_MEMSIZE || self.mem_config.mem_size > MAX_MEMSIZE { bail!("Memory size must >= 256MiB and <= 512GiB, default unit: MiB, current memory size: {:?} bytes", &self.mem_config.mem_size); } Ok(()) } } impl VmConfig { pub fn add_machine(&mut self, mach_config: &str) -> Result<()> { let mut cmd_parser = CmdParser::new("machine"); cmd_parser .push("") .push("type") .push("accel") .push("usb") .push("dump-guest-core") .push("mem-share"); #[cfg(target_arch = "aarch64")] cmd_parser.push("gic-version"); cmd_parser.parse(mach_config)?; #[cfg(target_arch = "aarch64")] if let Some(gic_version) = cmd_parser.get_value::<u8>("gic-version")? { if gic_version != 3 { bail!("Unsupported gic version, only gicv3 is supported"); } } if let Some(accel) = cmd_parser.get_value::<String>("accel")? { if accel.ne("kvm:tcg") && accel.ne("tcg") && accel.ne("kvm") { bail!("Only \'kvm\', \'kvm:tcg\' and \'tcg\' are supported for \'accel\' of \'machine\'"); } } if let Some(usb) = cmd_parser.get_value::<ExBool>("usb")? { if usb.into() { bail!("Argument \'usb\' should be set to \'off\'"); } } if let Some(mach_type) = cmd_parser .get_value::<MachineType>("") .chain_err(|| "Unrecognized machine type")? { self.machine_config.mach_type = mach_type; } if let Some(mach_type) = cmd_parser .get_value::<MachineType>("type") .chain_err(|| "Unrecognized machine type")? { self.machine_config.mach_type = mach_type; } if let Some(dump_guest) = cmd_parser.get_value::<ExBool>("dump-guest-core")? { self.machine_config.mem_config.dump_guest_core = dump_guest.into(); } if let Some(mem_share) = cmd_parser.get_value::<ExBool>("mem-share")? { self.machine_config.mem_config.mem_share = mem_share.into(); } Ok(()) } pub fn add_memory(&mut self, mem_config: &str) -> Result<()> { let mut cmd_parser = CmdParser::new("m"); cmd_parser.push("").push("size"); cmd_parser.parse(mem_config)?; let mem = if let Some(mem_size) = cmd_parser.get_value::<String>("")? { memory_unit_conversion(&mem_size)? } else if let Some(mem_size) = cmd_parser.get_value::<String>("size")? { memory_unit_conversion(&mem_size)? } else { return Err(ErrorKind::FieldIsMissing("size", "memory").into()); }; self.machine_config.mem_config.mem_size = mem; Ok(()) } pub fn add_cpu(&mut self, cpu_config: &str) -> Result<()> { let mut cmd_parser = CmdParser::new("smp"); cmd_parser .push("") .push("sockets") .push("cores") .push("threads") .push("cpus"); cmd_parser.parse(cpu_config)?; let cpu = if let Some(cpu) = cmd_parser.get_value::<u64>("")? { cpu } else if let Some(cpu) = cmd_parser.get_value::<u64>("cpus")? { cpu } else { return Err(ErrorKind::FieldIsMissing("cpus", "smp").into()); }; if let Some(sockets) = cmd_parser.get_value::<u64>("sockets")? { if sockets.ne(&cpu) { bail!("Invalid \'sockets\' arguments for \'smp\', it should equal to the number of cpus"); } } if let Some(cores) = cmd_parser.get_value::<u64>("cores")? { if cores.ne(&1) { bail!("Invalid \'cores\' arguments for \'smp\', it should be \'1\'"); } } if let Some(threads) = cmd_parser.get_value::<u64>("threads")? { if threads.ne(&1) { bail!("Invalid \'threads\' arguments for \'smp\', it should be \'1\'"); } } if !(MIN_NR_CPUS..=MAX_NR_CPUS).contains(&cpu) { return Err(ErrorKind::IllegalValue( "CPU number".to_string(), MIN_NR_CPUS, true, MAX_NR_CPUS, true, ) .into()); } self.machine_config.nr_cpus = cpu as u8; Ok(()) } pub fn add_mem_path(&mut self, mem_path: &str) -> Result<()> { self.machine_config.mem_config.mem_path = Some(mem_path.replace("\"", "")); Ok(()) } } fn memory_unit_conversion(origin_value: &str) -> Result<u64> { if (origin_value.ends_with('M') | origin_value.ends_with('m')) && (origin_value.contains('M') ^ origin_value.contains('m')) { let value = origin_value.replacen("M", "", 1); let value = value.replacen("m", "", 1); get_inner( value .parse::<u64>() .map_err(|_| { ErrorKind::ConvertValueFailed(origin_value.to_string(), String::from("u64")) })? .checked_mul(M), ) } else if (origin_value.ends_with('G') | origin_value.ends_with('g')) && (origin_value.contains('G') ^ origin_value.contains('g')) { let value = origin_value.replacen("G", "", 1); let value = value.replacen("g", "", 1); get_inner( value .parse::<u64>() .map_err(|_| { ErrorKind::ConvertValueFailed(origin_value.to_string(), String::from("u64")) })? .checked_mul(G), ) } else { let size = origin_value.parse::<u64>().map_err(|_| { ErrorKind::ConvertValueFailed(origin_value.to_string(), String::from("u64")) })?; let memory_size = size.checked_mul(M); get_inner(memory_size) } } fn get_inner<T>(outer: Option<T>) -> Result<T> { if let Some(x) = outer { Ok(x) } else { Err(ErrorKind::IntegerOverflow("-m".to_string()).into()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_health_check() { let memory_config = MachineMemConfig { mem_size: MIN_MEMSIZE, mem_path: None, mem_share: false, dump_guest_core: false, }; let mut machine_config = MachineConfig { mach_type: MachineType::MicroVm, nr_cpus: MIN_NR_CPUS as u8, mem_config: memory_config, }; assert!(machine_config.check().is_ok()); machine_config.nr_cpus = MAX_NR_CPUS as u8; machine_config.mem_config.mem_size = MAX_MEMSIZE; assert!(machine_config.check().is_ok()); machine_config.nr_cpus = MIN_NR_CPUS as u8; machine_config.mem_config.mem_size = MIN_MEMSIZE - 1; assert!(!machine_config.check().is_ok()); machine_config.mem_config.mem_size = MAX_MEMSIZE + 1; assert!(!machine_config.check().is_ok()); machine_config.mem_config.mem_size = MIN_MEMSIZE; assert!(machine_config.check().is_ok()); } }
extern crate serde; extern crate serde_json; use std::str::FromStr; use serde::{Deserialize, Serialize}; use super::errors::{ErrorKind, Result, ResultExt}; use crate::config::{CmdParser, ConfigCheck, ExBool, VmConfig}; const DEFAULT_CPUS: u8 = 1; const DEFAULT_MEMSIZE: u64 = 256; const MAX_NR_CPUS: u64 = 254; const MIN_NR_CPUS: u64 = 1; const MAX_MEMSIZE: u64 = 549_755_813_888; const MIN_MEMSIZE: u64 = 268_435_456; const M: u64 = 1024 * 1024; const G: u64 = 1024 * 1024 * 1024; #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] pub enum MachineType { None, MicroVm, StandardVm, } impl FromStr for MachineType { type Err = (); fn from_str(s: &str) -> std::result::Result<Self, Self::Err> { match s.to_lowercase().as_str() { "none" => Ok(MachineType::None), "microvm" => Ok(MachineType::MicroVm), #[cfg(target_arch = "x86_64")] "q35" => Ok(MachineType::StandardVm), #[cfg(target_arch = "aarch64")] "virt" => Ok(MachineType::StandardVm), _ => Err(()), } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct MachineMemConfig { pub mem_size: u64, pub mem_path: Option<String>, pub dump_guest_core: bool, pub mem_share: bool, } impl Default for MachineMemConfig { fn default() -> Self { MachineMemConfig { mem_size: DEFAULT_MEMSIZE * M, mem_path: None, dump_guest_core: true, mem_share: false, } } } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct MachineConfig { pub mach_type: MachineType, pub nr_cpus: u8, pub mem_config: MachineMemConfig, } impl Default for MachineConfig { fn default() -> Self { MachineConfig { mach_type: MachineType::MicroVm, nr_cpus: DEFAULT_CPUS, mem_config: MachineMemConfig::default(), } } } impl ConfigCheck for MachineConfig { fn check(&self) -> Result<()> { if self.mem_config.mem_size < MIN_MEMSIZE || self.mem_config.mem_size > MAX_MEMSIZE { bail!("Memory size must >= 256MiB and <= 512GiB, default unit: MiB, current memory size: {:?} bytes", &self.mem_config.mem_size); } Ok(()) } } impl VmConfig { pub fn add_machine(&mut self, mach_config: &str) -> Result<()> { let mut cmd_parser = CmdParser::new("machine"); cmd_parser .push("") .push("type") .push("accel") .push("usb") .push("dump-guest-core") .push("mem-share"); #[cfg(target_arch = "aarch64")] cmd_parser.push("gic-version"); cmd_parser.parse(mach_config)?; #[cfg(target_arch = "aarch64")] if let Some(gic_version) = cmd_parser.get_value::<u8>("gic-version")? { if gic_version != 3 { bail!("Unsupported gic version, only gicv3 is supported"); } } if let Some(accel) = cmd_parser.get_value::<String>("accel")? { if accel.ne("kvm:tcg") && accel.ne("tcg") && accel.ne("kvm") { bail!("Only \'kvm\', \'kvm:tcg\' and \'tcg\' are supported for \'accel\' of \'machine\'"); } } if let Some(usb) = cmd_parser.get_value::<ExBool>("usb")? { if usb.into() { bail!("Argument \'usb\' should be set to \'off\'"); } } if let Some(mach_type) = cmd_parser .get_value::<MachineType>("") .chain_err(|| "Unrecognized machine type")? { self.machine_config.mach_type = mach_type; } if let Some(mach_type) = cmd_parser .get_value::<MachineType>("type") .chain_err(|| "Unrecognized machine type")? { self.machine_config.mach_type = mach_type; } if let Some(dump_guest) = cmd_parser.get_value::<ExBool>("dump-guest-core")? { self.machine_config.mem_config.dump_guest_core = dump_guest.into(); } if let Some(mem_share) = cmd_parser.get_value::<ExBool>("mem-share")? { self.machine_config.mem_config.mem_share = mem_share.into(); } Ok(()) } pub fn add_memory(&mut self, mem_config: &str) -> Result<()> { let mut cmd_parser = CmdParser::new("m"); cmd_parser.push("").push("size"); cmd_parser.parse(mem_config)?; let mem = if let Some(mem_size) = cmd_parser.get_value::<String>("")? { memory_unit_conversion(&mem_size)? } else if let Some(mem_size) = cmd_parser.get_value::<String>("size")? { memory_unit_conversion(&mem_size)? } else { return Err(ErrorKind::FieldIsMissing("size", "memory").into()); }; self.machine_config.mem_config.mem_size = mem; Ok(()) } pub fn add_cpu(&mut self, cpu_config: &str) -> Result<()> { let mut cmd_parser = CmdParser::new("smp"); cmd_parser .push("") .push("sockets") .push("cores") .push("threads") .push("cpus"); cmd_parser.parse(cpu_config)?; let cpu = if let Some(cpu) = cmd_parser.get_value::<u64>("")? { cpu } else if let Some(cpu) = cmd_parser.get_value::<u64>("cpus")? { cpu } else { return Err(ErrorKind::FieldIsMissing("cpus", "smp").into()); }; if let Some(sockets) = cmd_parser.get_value::<u64>("sockets")? { if sockets.ne(&cpu) { bail!("Invalid \'sockets\' arguments for \'smp\', it should equal to the number of cpus"); } } if let Some(cores) = cmd_parser.get_value::<u64>("cores")? { if cores.ne(&1) { bail!("Invalid \'cores\' arguments for \'smp\', it should be \'1\'"); } } if let Some(threads) = cmd_parser.get_value::<u64>("threads")? { if threads.ne(&1) { bail!("Invalid \'threads\' arguments for \'smp\', it should be \'1\'"); } } if !(MIN_NR_CPUS..=MAX_NR_CPUS).contains(&cpu) { return Err(ErrorKind::IllegalValue( "CPU number".to_string(), MIN_NR_CPUS, true, MAX_NR_CPUS, true, ) .into()); } self.machine_config.nr_cpus = cpu as u8; Ok(()) } pub fn add_mem_path(&mut self, mem_path: &str) -> Result<()> { self.machine_config.mem_config.mem_path = Some(mem_path.replace("\"", "")); Ok(()) } } fn memory_unit_conversion(origin_value: &str) -> Result<u64> { if (origin_value.ends_with('M') | origin_value.ends_with('m')) && (origin_value.contains('M') ^ origin_value.contains('m')) { let value = origin_value.replacen("M", "", 1); let value = value.replacen("m", "", 1);
} else if (origin_value.ends_with('G') | origin_value.ends_with('g')) && (origin_value.contains('G') ^ origin_value.contains('g')) { let value = origin_value.replacen("G", "", 1); let value = value.replacen("g", "", 1); get_inner( value .parse::<u64>() .map_err(|_| { ErrorKind::ConvertValueFailed(origin_value.to_string(), String::from("u64")) })? .checked_mul(G), ) } else { let size = origin_value.parse::<u64>().map_err(|_| { ErrorKind::ConvertValueFailed(origin_value.to_string(), String::from("u64")) })?; let memory_size = size.checked_mul(M); get_inner(memory_size) } } fn get_inner<T>(outer: Option<T>) -> Result<T> { if let Some(x) = outer { Ok(x) } else { Err(ErrorKind::IntegerOverflow("-m".to_string()).into()) } } #[cfg(test)] mod tests { use super::*; #[test] fn test_health_check() { let memory_config = MachineMemConfig { mem_size: MIN_MEMSIZE, mem_path: None, mem_share: false, dump_guest_core: false, }; let mut machine_config = MachineConfig { mach_type: MachineType::MicroVm, nr_cpus: MIN_NR_CPUS as u8, mem_config: memory_config, }; assert!(machine_config.check().is_ok()); machine_config.nr_cpus = MAX_NR_CPUS as u8; machine_config.mem_config.mem_size = MAX_MEMSIZE; assert!(machine_config.check().is_ok()); machine_config.nr_cpus = MIN_NR_CPUS as u8; machine_config.mem_config.mem_size = MIN_MEMSIZE - 1; assert!(!machine_config.check().is_ok()); machine_config.mem_config.mem_size = MAX_MEMSIZE + 1; assert!(!machine_config.check().is_ok()); machine_config.mem_config.mem_size = MIN_MEMSIZE; assert!(machine_config.check().is_ok()); } }
get_inner( value .parse::<u64>() .map_err(|_| { ErrorKind::ConvertValueFailed(origin_value.to_string(), String::from("u64")) })? .checked_mul(M), )
call_expression
[ { "content": "/// Sets the value of one register for this vCPU.\n\n///\n\n/// The id of the register is encoded as specified in the kernel documentation\n\n/// for `KVM_SET_ONE_REG`.\n\n///\n\n/// Max register size is 256 Bytes.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `reg_id` - ID of the register for which we are setting the value.\n\n/// * `data` - value for the specified register.\n\npub fn set_one_reg_vec(vcpu_fd: &VcpuFd, reg_id: u64, data: &[u8]) -> Result<()> {\n\n let reg_size = 1u64 << ((reg_id & KVM_REG_SIZE_MASK) >> KVM_REG_SIZE_SHIFT);\n\n if reg_size > KVM_REG_MAX_SIZE || reg_size as usize > data.len() {\n\n return Err(errno::Error::new(libc::EINVAL));\n\n };\n\n let data_ref = data.as_ptr() as *const u8;\n\n let onereg = kvm_one_reg {\n\n id: reg_id,\n\n addr: data_ref as u64,\n\n };\n\n\n\n // This is safe because we allocated the struct and we know the kernel will read\n\n // exactly the size of the struct.\n\n let ret = unsafe { ioctl_with_ref(vcpu_fd, KVM_SET_ONE_REG(), &onereg) };\n\n if ret < 0 {\n\n return Err(errno::Error::last());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cpu/src/aarch64/core_regs.rs", "rank": 1, "score": 360453.9665538241 }, { "content": "pub fn parse_virtio_serial(vm_config: &mut VmConfig, serial_config: &str) -> Result<()> {\n\n let mut cmd_parser = CmdParser::new(\"virtio-serial\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"id\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\");\n\n cmd_parser.parse(serial_config)?;\n\n pci_args_check(&cmd_parser)?;\n\n\n\n if vm_config.virtio_serial.is_none() {\n\n let id = if let Some(id) = cmd_parser.get_value::<String>(\"id\")? {\n\n id\n\n } else {\n\n \"\".to_string()\n\n };\n\n let multifunction = if let Some(switch) = cmd_parser.get_value::<ExBool>(\"multifunction\")? {\n\n switch.into()\n\n } else {\n", "file_path": "machine_manager/src/config/chardev.rs", "rank": 2, "score": 353158.9361438936 }, { "content": "/// Sets the vcpu's current \"core_register\"\n\n///\n\n/// The register state is gotten from `KVM_SET_ONE_REG` api in KVM.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu_fd` - the VcpuFd in KVM mod.\n\n/// * `core_regs` - kvm_regs state to be written.\n\npub fn set_core_regs(vcpu_fd: &VcpuFd, core_regs: kvm_regs) -> Result<()> {\n\n vcpu_fd.set_one_reg(Arm64CoreRegs::UserPTRegSp.into(), core_regs.regs.sp)?;\n\n vcpu_fd.set_one_reg(Arm64CoreRegs::KvmSpEl1.into(), core_regs.sp_el1)?;\n\n vcpu_fd.set_one_reg(Arm64CoreRegs::UserPTRegPState.into(), core_regs.regs.pstate)?;\n\n vcpu_fd.set_one_reg(Arm64CoreRegs::UserPTRegPc.into(), core_regs.regs.pc)?;\n\n vcpu_fd.set_one_reg(Arm64CoreRegs::KvmElrEl1.into(), core_regs.elr_el1)?;\n\n\n\n for i in 0..KVM_NR_REGS as usize {\n\n vcpu_fd.set_one_reg(\n\n Arm64CoreRegs::UserPTRegRegs(i).into(),\n\n core_regs.regs.regs[i] as u64,\n\n )?;\n\n }\n\n\n\n for i in 0..KVM_NR_SPSR as usize {\n\n vcpu_fd.set_one_reg(Arm64CoreRegs::KvmSpsr(i).into(), core_regs.spsr[i])?;\n\n }\n\n\n\n for i in 0..KVM_NR_FP_REGS as usize {\n\n let mut data: Vec<u8> = Vec::new();\n", "file_path": "cpu/src/aarch64/core_regs.rs", "rank": 3, "score": 332246.0865874947 }, { "content": "/// Returns the 128 bits value of the specified vCPU register.\n\n///\n\n/// The id of the register is encoded as specified in the kernel documentation\n\n/// for `KVM_GET_ONE_REG`.\n\n///\n\n/// Max register size is 256 Bytes.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu_fd` - The file descriptor of kvm_based vcpu.\n\n/// * `reg_id` - ID of register.\n\npub fn get_one_reg_vec(vcpu_fd: &VcpuFd, reg_id: u64) -> Result<Vec<u8>> {\n\n let reg_size = 1_u64 << ((reg_id & KVM_REG_SIZE_MASK) >> KVM_REG_SIZE_SHIFT);\n\n if reg_size > KVM_REG_MAX_SIZE {\n\n return Err(errno::Error::new(libc::EINVAL));\n\n }\n\n let mut reg_value: Vec<u8> = vec![0; reg_size as usize];\n\n reg_value.resize(reg_size as usize, 0);\n\n let mut onereg = kvm_one_reg {\n\n id: reg_id,\n\n addr: reg_value.as_mut_ptr() as *mut u8 as u64,\n\n };\n\n\n\n // This is safe because we allocated the struct and we know the kernel will read\n\n // exactly the size of the struct.\n\n let ret = unsafe { ioctl_with_mut_ref(vcpu_fd, KVM_GET_ONE_REG(), &mut onereg) };\n\n if ret < 0 {\n\n return Err(errno::Error::last());\n\n }\n\n\n\n Ok(reg_value)\n\n}\n\n\n", "file_path": "cpu/src/aarch64/core_regs.rs", "rank": 4, "score": 331293.9152946706 }, { "content": "pub fn get_multi_function(pci_cfg: &str) -> Result<bool> {\n\n let mut cmd_parser = CmdParser::new(\"multifunction\");\n\n cmd_parser.push(\"\").push(\"multifunction\");\n\n cmd_parser.get_parameters(pci_cfg)?;\n\n\n\n if let Some(multi_func) = cmd_parser\n\n .get_value::<ExBool>(\"multifunction\")\n\n .chain_err(|| \"Failed to get multifunction parameter, please set on or off (default).\")?\n\n {\n\n return Ok(multi_func.inner);\n\n }\n\n\n\n Ok(false)\n\n}\n\n\n", "file_path": "machine_manager/src/config/pci.rs", "rank": 5, "score": 323060.5037653779 }, { "content": "pub fn has_cap(cap: u8) -> Result<bool> {\n\n let mut hdr = CapUserHeader {\n\n version: CAPS_V3,\n\n pid: 0,\n\n };\n\n let mut data: CapUserData = Default::default();\n\n syscall::capget(&mut hdr, &mut data)?;\n\n let caps: u64 = (u64::from(data.permitted_s1) << 32) + u64::from(data.permitted_s0);\n\n let has_cap = (caps & (1u64 << cap)) != 0;\n\n Ok(has_cap)\n\n}\n\n\n", "file_path": "ozone/src/capability.rs", "rank": 6, "score": 318131.1456701233 }, { "content": "/// Returns the vcpu's current `core_register`.\n\n///\n\n/// The register state is gotten from `KVM_GET_ONE_REG` api in KVM.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `vcpu_fd` - the VcpuFd in KVM mod.\n\npub fn get_core_regs(vcpu_fd: &VcpuFd) -> Result<kvm_regs> {\n\n let mut core_regs = kvm_regs::default();\n\n\n\n core_regs.regs.sp = vcpu_fd.get_one_reg(Arm64CoreRegs::UserPTRegSp.into())?;\n\n core_regs.sp_el1 = vcpu_fd.get_one_reg(Arm64CoreRegs::KvmSpEl1.into())?;\n\n core_regs.regs.pstate = vcpu_fd.get_one_reg(Arm64CoreRegs::UserPTRegPState.into())?;\n\n core_regs.regs.pc = vcpu_fd.get_one_reg(Arm64CoreRegs::UserPTRegPc.into())?;\n\n core_regs.elr_el1 = vcpu_fd.get_one_reg(Arm64CoreRegs::KvmElrEl1.into())?;\n\n\n\n for i in 0..KVM_NR_REGS as usize {\n\n core_regs.regs.regs[i] = vcpu_fd.get_one_reg(Arm64CoreRegs::UserPTRegRegs(i).into())?;\n\n }\n\n\n\n for i in 0..KVM_NR_SPSR as usize {\n\n core_regs.spsr[i] = vcpu_fd.get_one_reg(Arm64CoreRegs::KvmSpsr(i).into())?;\n\n }\n\n\n\n for i in 0..KVM_NR_FP_REGS as usize {\n\n let register_value_vec =\n\n get_one_reg_vec(&vcpu_fd, Arm64CoreRegs::UserFPSIMDStateVregs(i).into())?;\n", "file_path": "cpu/src/aarch64/core_regs.rs", "rank": 7, "score": 310023.48276495485 }, { "content": "/// Create `VmConfig` from `ArgMatches`'s arg.\n\n///\n\n/// When accepted cmdline arguments, `StratoVirt` will parse useful arguments and\n\n/// transform them to VM's configuration structure -- `VmConfig`.\n\n///\n\n/// # Arguments\n\n///\n\n/// - * `args` - The structure accepted input cmdline arguments.\n\n///\n\n/// # Errors\n\n///\n\n/// Input arguments is illegal for `VmConfig` or `VmConfig`'s health check\n\n/// failed -- with this unhealthy `VmConfig`, VM will not boot successfully.\n\npub fn create_vmconfig(args: &ArgMatches) -> Result<VmConfig> {\n\n // Parse config-file json.\n\n // VmConfig can be transformed by json file which described VmConfig\n\n // directly.\n\n let mut vm_cfg = VmConfig::default();\n\n\n\n // Parse cmdline args which need to set in VmConfig\n\n add_args_to_config!((args.value_of(\"name\")), vm_cfg, add_name);\n\n add_args_to_config!((args.value_of(\"machine\")), vm_cfg, add_machine);\n\n add_args_to_config!((args.value_of(\"memory\")), vm_cfg, add_memory);\n\n add_args_to_config!((args.value_of(\"mem-path\")), vm_cfg, add_mem_path);\n\n add_args_to_config!((args.value_of(\"smp\")), vm_cfg, add_cpu);\n\n add_args_to_config!((args.value_of(\"kernel\")), vm_cfg, add_kernel);\n\n add_args_to_config!((args.value_of(\"initrd-file\")), vm_cfg, add_initrd);\n\n add_args_to_config!(\n\n (args.values_of(\"kernel-cmdline\")),\n\n vm_cfg,\n\n add_kernel_cmdline,\n\n vec\n\n );\n", "file_path": "machine_manager/src/cmdline.rs", "rank": 8, "score": 304678.8309819363 }, { "content": "pub fn parse_balloon(vm_config: &mut VmConfig, balloon_config: &str) -> Result<BalloonConfig> {\n\n if vm_config.dev_name.get(\"balloon\").is_some() {\n\n bail!(\"Only one balloon device is supported for each vm.\");\n\n }\n\n let mut cmd_parser = CmdParser::new(\"virtio-balloon\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\")\n\n .push(\"id\")\n\n .push(\"deflate-on-oom\");\n\n cmd_parser.parse(balloon_config)?;\n\n\n\n pci_args_check(&cmd_parser)?;\n\n let mut balloon: BalloonConfig = Default::default();\n\n if let Some(default) = cmd_parser.get_value::<ExBool>(\"deflate-on-oom\")? {\n\n balloon.deflate_on_oom = default.into();\n\n }\n\n if let Some(id) = cmd_parser.get_value::<String>(\"id\")? {\n", "file_path": "machine_manager/src/config/balloon.rs", "rank": 9, "score": 296734.71637331444 }, { "content": "pub fn parse_virtconsole(vm_config: &mut VmConfig, config_args: &str) -> Result<VirtioConsole> {\n\n let mut cmd_parser = CmdParser::new(\"virtconsole\");\n\n cmd_parser.push(\"\").push(\"id\").push(\"chardev\");\n\n cmd_parser.parse(config_args)?;\n\n\n\n let chardev_name = if let Some(chardev) = cmd_parser.get_value::<String>(\"chardev\")? {\n\n chardev\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"chardev\", \"virtconsole\").into());\n\n };\n\n\n\n let id = if let Some(chardev_id) = cmd_parser.get_value::<String>(\"id\")? {\n\n chardev_id\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"id\", \"virtconsole\").into());\n\n };\n\n\n\n if let Some(char_dev) = vm_config.chardev.remove(&chardev_name) {\n\n return Ok(VirtioConsole {\n\n id,\n", "file_path": "machine_manager/src/config/chardev.rs", "rank": 10, "score": 296734.71637331444 }, { "content": "/// Set namespace for uts.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `hostname` - Host name.\n\npub fn set_uts_namespace(hostname: &str) -> Result<()> {\n\n syscall::unshare(libc::CLONE_NEWUTS).chain_err(|| \"Failed to unshare into a new namespace\")?;\n\n syscall::set_host_name(hostname).chain_err(|| \"Failed to set new hostname\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/namespace.rs", "rank": 11, "score": 294437.37363462243 }, { "content": "/// Set namespace for network.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `path` - Path of network namespace.\n\npub fn set_network_namespace(path: &str) -> Result<()> {\n\n let network_ns_fd = File::open(path)\n\n .chain_err(|| format!(\"Failed to open netns path: {}\", path))?\n\n .into_raw_fd();\n\n syscall::setns(network_ns_fd, libc::CLONE_NEWNET)\n\n .chain_err(|| \"Failed to set network namespace\")?;\n\n syscall::close(network_ns_fd)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/namespace.rs", "rank": 12, "score": 294437.25689920585 }, { "content": "// set_capability_for_ozone , you can use -capability cap_* to obtain a capability\n\npub fn set_capability_for_ozone(capability: &str) -> Result<()> {\n\n let cap_str = capability.to_uppercase();\n\n let cap_add_arr: Vec<&str> = cap_str.split(',').collect();\n\n let all_caps = init_cap();\n\n\n\n for cap in cap_add_arr.iter() {\n\n if let Some(val) = all_caps.get(cap) {\n\n if !val.1 {\n\n let warning = format!(\"Alert! Adding dangerous capability {:?} to ozone , it might cause risk of escape!\\n\", cap);\n\n std::io::stdout()\n\n .write(warning.as_bytes())\n\n .chain_err(|| \"Failed to write warnings\")?;\n\n std::io::stdout()\n\n .flush()\n\n .chain_err(|| \"Failed to flush stdout\")?;\n\n }\n\n } else {\n\n bail!(\"Invalid capability argument: {:?}\", cap);\n\n }\n\n }\n", "file_path": "ozone/src/capability.rs", "rank": 13, "score": 294437.08202459617 }, { "content": "pub fn parse_blk(vm_config: &mut VmConfig, drive_config: &str) -> Result<BlkDevConfig> {\n\n let mut cmd_parser = CmdParser::new(\"virtio-blk\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"id\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\")\n\n .push(\"drive\")\n\n .push(\"bootindex\")\n\n .push(\"serial\")\n\n .push(\"iothread\");\n\n\n\n cmd_parser.parse(drive_config)?;\n\n\n\n pci_args_check(&cmd_parser)?;\n\n\n\n if let Err(ref e) = cmd_parser.get_value::<u8>(\"bootindex\") {\n\n bail!(\"Failed to parse \\'bootindex\\': {:?}\", &e);\n\n }\n", "file_path": "machine_manager/src/config/drive.rs", "rank": 14, "score": 292440.115428301 }, { "content": "pub fn parse_net(vm_config: &mut VmConfig, net_config: &str) -> Result<NetworkInterfaceConfig> {\n\n let mut cmd_parser = CmdParser::new(\"virtio-net\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"id\")\n\n .push(\"netdev\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\")\n\n .push(\"mac\")\n\n .push(\"iothread\");\n\n\n\n cmd_parser.parse(net_config)?;\n\n pci_args_check(&cmd_parser)?;\n\n let mut netdevinterfacecfg = NetworkInterfaceConfig::default();\n\n\n\n let netdev = if let Some(devname) = cmd_parser.get_value::<String>(\"netdev\")? {\n\n devname\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"netdev\", \"net\").into());\n", "file_path": "machine_manager/src/config/network.rs", "rank": 15, "score": 292440.11542830104 }, { "content": "pub fn parse_rng_dev(vm_config: &mut VmConfig, rng_config: &str) -> Result<RngConfig> {\n\n let mut cmd_parser = CmdParser::new(\"rng\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"id\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\")\n\n .push(\"max-bytes\")\n\n .push(\"period\")\n\n .push(\"rng\");\n\n\n\n cmd_parser.parse(rng_config)?;\n\n pci_args_check(&cmd_parser)?;\n\n let mut rng_cfg = RngConfig::default();\n\n let rng = if let Some(rng_id) = cmd_parser.get_value::<String>(\"rng\")? {\n\n rng_id\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"rng\", \"rng\").into());\n\n };\n", "file_path": "machine_manager/src/config/rng.rs", "rank": 16, "score": 292440.115428301 }, { "content": "/// Set hostname\n\n///\n\n/// # Arguments\n\n///\n\n/// * `Hostname` - The host name.\n\npub fn set_host_name(host_name: &str) -> Result<()> {\n\n let len = host_name.len() as libc::size_t;\n\n let name = into_cstring(host_name)?;\n\n SyscallResult {\n\n ret: unsafe { libc::sethostname(name.as_ptr(), len) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 17, "score": 288528.93675114174 }, { "content": "/// Set namespace for mount directory.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `mount_dir` - Path of mount directory .\n\npub fn set_mount_namespace(mount_dir: &str) -> Result<()> {\n\n syscall::unshare(libc::CLONE_NEWNS).chain_err(|| \"Failed to unshare into a new namespace\")?;\n\n syscall::mount(None, ROOT_DIR_NAME, libc::MS_SLAVE | libc::MS_REC)\n\n .chain_err(|| \"Failed to mount root path as slave and rec\")?;\n\n\n\n syscall::mount(Some(mount_dir), mount_dir, libc::MS_BIND | libc::MS_REC)\n\n .chain_err(|| \"Failed to mount target path as bind and rec\")?;\n\n\n\n std::env::set_current_dir(mount_dir)\n\n .chain_err(|| \"Failed to change current dir to mount dir path\")?;\n\n\n\n syscall::mkdir(OLD_ROOT_DIR_NAME).chain_err(|| \"Failed to create old root dir\")?;\n\n\n\n syscall::pivot_root(CURRENT_DIR_NAME, OLD_ROOT_DIR_NAME)\n\n .chain_err(|| \"Failed to call pivot_root\")?;\n\n\n\n syscall::chdir(ROOT_DIR_NAME).chain_err(|| \"Failed to call chdir to change dir\")?;\n\n\n\n syscall::umount(OLD_ROOT_DIR_NAME).chain_err(|| \"Failed to umount old root path dir\")?;\n\n\n\n std::fs::remove_dir(OLD_ROOT_DIR_NAME).chain_err(|| \"Failed to remove old root path dir\")?;\n\n Ok(())\n\n}\n", "file_path": "ozone/src/namespace.rs", "rank": 18, "score": 288528.47446621186 }, { "content": "fn write_buf_mem(buf: &[u8], hva: u64) -> Result<()> {\n\n let mut slice = unsafe { std::slice::from_raw_parts_mut(hva as *mut u8, buf.len()) };\n\n (&mut slice)\n\n .write(buf)\n\n .chain_err(|| format!(\"Failed to write buf(hva:{})\", hva))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "virtio/src/block.rs", "rank": 19, "score": 282291.0844781575 }, { "content": "pub fn set_numa_node(node: &str, exec_file: &str, name: &str) -> Result<()> {\n\n let write_path = get_base_location(\"cpuset\", &exec_file, &name)?;\n\n write_cgroup_value(&write_path, \"cpuset.mems\", &node)\n\n .chain_err(|| ErrorKind::WriteError(\"cpuset.mems\".to_string(), node.to_string()))?;\n\n\n\n let mut upper_path = write_path.clone();\n\n upper_path.pop();\n\n upper_path.push(\"cpuset.cpus\");\n\n inherit_config(&write_path, \"cpuset.cpus\").chain_err(|| {\n\n format!(\n\n \"Failed to inherit configuration for path: {:?}\",\n\n &write_path\n\n )\n\n })?;\n\n let value = read_file_value(upper_path.clone());\n\n if let Ok(val) = value {\n\n write_cgroup_value(&write_path, \"cpuset.cpus\", &val)\n\n .chain_err(|| ErrorKind::WriteError(\"cpuset.cpus\".to_string(), val.to_string()))?;\n\n } else {\n\n bail!(\"Can not read value from: {:?}\", &upper_path);\n\n }\n\n let pid = process::id();\n\n write_cgroup_value(&write_path, \"tasks\", &pid.to_string())\n\n .chain_err(|| \"Failed to attach pid\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/cgroup.rs", "rank": 20, "score": 282128.0952649451 }, { "content": "pub fn parse_cgroup(cgroup: &mut CgroupCfg, config: &str) -> Result<()> {\n\n let split: Vec<&str> = config.split('=').collect();\n\n if split.len() != 2 {\n\n bail!(\"Invalid parameter: {:?}\", &config);\n\n }\n\n if cgroup.contains_key(split[0]) {\n\n if cgroup.get(split[0]).unwrap().is_some() {\n\n bail!(\"{} has been set more than once\", &split[0]);\n\n }\n\n cgroup.insert(split[0].to_string(), Some(split[1].to_string()));\n\n } else {\n\n bail!(\"Unknown argument: {:?}\", &split[0]);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/cgroup.rs", "rank": 21, "score": 279684.78237933264 }, { "content": "/// Check current version and compat version.\n\n///\n\n/// # Check rules\n\n///\n\n/// 1. If version in `Cargo.toml` exists, current_version should equal to it.\n\n/// 2. Compat_version can't greater than current_version.\n\n/// 3. If current_version not set, it will be equal to compat_version.\n\n/// 4. Compat_version should be given with attribute.\n\npub fn validate_version(current_version: &mut u32, compat_version: &mut u32) {\n\n if *compat_version == 0 {\n\n panic!(\"compat_version should be given.\");\n\n }\n\n\n\n if let Some(version_str) = VERSION {\n\n let version = version_to_u32(version_str);\n\n if *current_version == 0 {\n\n *current_version = version;\n\n }\n\n } else if *current_version == 0 {\n\n *current_version = *compat_version;\n\n }\n\n\n\n if *current_version < *compat_version {\n\n panic!(\"version check error, compat version can't greater than current_version.\")\n\n }\n\n}\n\n\n", "file_path": "migration_derive/src/attr_parser.rs", "rank": 22, "score": 269683.196962244 }, { "content": "/// This function is to parse qmp socket path and type.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `args` - The structure accepted input cmdline arguments.\n\n///\n\n/// # Errors\n\n///\n\n/// The value of `qmp` is illegel.\n\npub fn check_api_channel(args: &ArgMatches, vm_config: &mut VmConfig) -> Result<Vec<UnixListener>> {\n\n let mut sock_paths = Vec::new();\n\n if let Some(qmp_config) = args.value_of(\"qmp\") {\n\n let mut cmd_parser = CmdParser::new(\"qmp\");\n\n cmd_parser.push(\"\").push(\"server\").push(\"nowait\");\n\n\n\n cmd_parser.parse(&qmp_config)?;\n\n if let Some(uri) = cmd_parser.get_value::<String>(\"\")? {\n\n let (_api_type, api_path) =\n\n parse_uri(&uri).chain_err(|| \"Failed to parse qmp socket path\")?;\n\n sock_paths.push(api_path);\n\n } else {\n\n bail!(\"No uri found for qmp\");\n\n }\n\n if cmd_parser.get_value::<String>(\"server\")?.is_none() {\n\n bail!(\"Argument \\'server\\' is needed for qmp\");\n\n }\n\n if cmd_parser.get_value::<String>(\"nowait\")?.is_none() {\n\n bail!(\"Argument \\'nowait\\' is needed for qmp\");\n\n }\n", "file_path": "machine_manager/src/cmdline.rs", "rank": 23, "score": 266925.35098082235 }, { "content": "/// Create folder using a relative path.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `path` - The relative path of filder.\n\npub fn mkdir(path: &str) -> Result<()> {\n\n let path_ptr = into_cstring(path)?;\n\n SyscallResult {\n\n ret: unsafe { libc::mkdir(path_ptr.as_ptr(), libc::S_IRUSR | libc::S_IWUSR) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 24, "score": 266408.84616477013 }, { "content": "pub fn parse_vsock(vsock_config: &str) -> Result<VsockConfig> {\n\n let mut cmd_parser = CmdParser::new(\"vhost-vsock\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"id\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\")\n\n .push(\"guest-cid\")\n\n .push(\"vhostfd\");\n\n cmd_parser.parse(vsock_config)?;\n\n pci_args_check(&cmd_parser)?;\n\n let id = if let Some(vsock_id) = cmd_parser.get_value::<String>(\"id\")? {\n\n vsock_id\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"id\", \"vsock\").into());\n\n };\n\n\n\n let guest_cid = if let Some(cid) = cmd_parser.get_value::<u64>(\"guest-cid\")? {\n\n cid\n", "file_path": "machine_manager/src/config/chardev.rs", "rank": 25, "score": 264337.83677772334 }, { "content": "/// Set Mac address configured into the virtio configuration, and return features mask with\n\n/// VIRTIO_NET_F_MAC set.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `device_config` - Virtio net configurations.\n\n/// * `mac` - Mac address configured by user.\n\npub fn build_device_config_space(device_config: &mut VirtioNetConfig, mac: &str) -> u64 {\n\n let mut config_features = 0_u64;\n\n let mut bytes = [0_u8; 6];\n\n for (i, s) in mac.split(':').collect::<Vec<&str>>().iter().enumerate() {\n\n bytes[i] = if let Ok(v) = u8::from_str_radix(s, 16) {\n\n v\n\n } else {\n\n return config_features;\n\n };\n\n }\n\n device_config.mac.copy_from_slice(&bytes);\n\n config_features |= 1 << VIRTIO_NET_F_MAC;\n\n\n\n config_features\n\n}\n\n\n", "file_path": "virtio/src/net.rs", "rank": 26, "score": 261974.29608572167 }, { "content": "pub fn qmp_balloon(target: u64) -> bool {\n\n // Safe, because there is no confliction when writing global variable BALLOON_DEV, in other words,\n\n // this function will not be called simultaneously.\n\n if let Some(dev) = unsafe { &BALLOON_DEV } {\n\n match dev.lock().unwrap().set_guest_memory_size(target) {\n\n Ok(()) => {\n\n return true;\n\n }\n\n Err(ref e) => {\n\n error!(\n\n \"Failed to set balloon memory size: {}, :{}\",\n\n target,\n\n error_chain::ChainedError::display_chain(e)\n\n );\n\n return false;\n\n }\n\n }\n\n }\n\n error!(\"Balloon device not configured\");\n\n false\n\n}\n\n\n", "file_path": "virtio/src/balloon.rs", "rank": 27, "score": 261905.0451817848 }, { "content": "/// This function used to remove group and others permission using libc::chmod.\n\npub fn limit_permission(path: &str) -> Result<()> {\n\n let file_path = path.as_bytes().to_vec();\n\n let cstr_file_path = std::ffi::CString::new(file_path).unwrap();\n\n let ret = unsafe { libc::chmod(cstr_file_path.as_ptr(), 0o600) };\n\n\n\n if ret == 0 {\n\n Ok(())\n\n } else {\n\n Err(ErrorKind::ChmodFailed(ret).into())\n\n }\n\n}\n\n\n", "file_path": "util/src/unix.rs", "rank": 28, "score": 261028.77438253083 }, { "content": "/// Umount destination directory.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `dst_path` - Path of destination directory.\n\npub fn umount(dst_path: &str) -> Result<()> {\n\n let target = into_cstring(dst_path)?;\n\n\n\n SyscallResult {\n\n ret: unsafe { libc::umount2(target.as_ptr(), libc::MNT_DETACH) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 29, "score": 261027.86492931837 }, { "content": "/// Change working directory.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `new_path` - The new path of working directory.\n\npub fn chdir(new_path: &str) -> Result<()> {\n\n let path = into_cstring(new_path)?;\n\n\n\n SyscallResult {\n\n ret: unsafe { libc::chdir(path.as_ptr()) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 30, "score": 261027.8087497644 }, { "content": "pub fn get_pci_bdf(pci_cfg: &str) -> Result<PciBdf> {\n\n let mut cmd_parser = CmdParser::new(\"bdf\");\n\n cmd_parser.push(\"\").push(\"bus\").push(\"addr\");\n\n cmd_parser.get_parameters(pci_cfg)?;\n\n\n\n let mut pci_bdf = PciBdf::default();\n\n if let Some(bus) = cmd_parser.get_value::<String>(\"bus\")? {\n\n pci_bdf.bus = bus;\n\n } else {\n\n bail!(\"Bus not specified for pci device\");\n\n }\n\n if let Some(addr) = cmd_parser.get_value::<String>(\"addr\")? {\n\n let addr_vec: Vec<&str> = addr.split('.').collect();\n\n if addr_vec.len() > 2 {\n\n bail!(\n\n \"The number of args for addr is supported to be no more than two, find :{}\",\n\n addr_vec.len()\n\n );\n\n }\n\n let slot = addr_vec.get(0).unwrap();\n", "file_path": "machine_manager/src/config/pci.rs", "rank": 31, "score": 259700.2617031038 }, { "content": "pub fn parse_vfio(_vm_config: &VmConfig, vfio_config: &str) -> Result<VfioConfig> {\n\n let mut cmd_parser = CmdParser::new(\"vfio-pci\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"host\")\n\n .push(\"id\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"multifunction\");\n\n cmd_parser.parse(vfio_config)?;\n\n\n\n let mut vfio: VfioConfig = VfioConfig::default();\n\n if let Some(host) = cmd_parser.get_value::<String>(\"host\")? {\n\n vfio.host = host;\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"host\", \"vfio\").into());\n\n }\n\n if let Some(id) = cmd_parser.get_value::<String>(\"id\")? {\n\n vfio.id = id;\n\n }\n", "file_path": "machine_manager/src/config/vfio.rs", "rank": 32, "score": 256674.20694251766 }, { "content": "pub fn drop_bounding_caps(cap: u8) -> Result<()> {\n\n SyscallResult {\n\n ret: unsafe { libc::prctl(PR_CAPBSET_DROP, libc::c_uint::from(cap), 0, 0) },\n\n }\n\n .into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n pub use super::*;\n\n\n\n #[test]\n\n fn test_into_cstring() {\n\n let str = into_cstring(\"stratovirt\");\n\n assert!(str.is_ok());\n\n let str = str.unwrap();\n\n let cstr = CString::new(\"stratovirt\").unwrap();\n\n assert_eq!(cstr, str);\n\n }\n\n}\n", "file_path": "ozone/src/syscall.rs", "rank": 33, "score": 256213.9114468274 }, { "content": "pub fn parse_root_port(rootport_cfg: &str) -> Result<RootPortConfig> {\n\n let mut cmd_parser = CmdParser::new(\"pcie-root-port\");\n\n cmd_parser\n\n .push(\"\")\n\n .push(\"bus\")\n\n .push(\"addr\")\n\n .push(\"port\")\n\n .push(\"chassis\")\n\n .push(\"multifunction\")\n\n .push(\"id\");\n\n cmd_parser.parse(rootport_cfg)?;\n\n\n\n let mut root_port = RootPortConfig::default();\n\n if let Some(port) = cmd_parser.get_value::<String>(\"port\")? {\n\n let without_prefix = port.trim_start_matches(\"0x\");\n\n root_port.port = u8::from_str_radix(without_prefix, 16).unwrap();\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"port\", \"rootport\").into());\n\n }\n\n let _ = cmd_parser.get_value::<u8>(\"chassis\")?;\n", "file_path": "machine_manager/src/config/pci.rs", "rank": 34, "score": 255310.5560099393 }, { "content": "/// Gets the page size of host.\n\npub fn host_page_size() -> u64 {\n\n unsafe { libc::sysconf(libc::_SC_PAGESIZE) as u64 }\n\n}\n\n\n\n#[derive(PartialEq, Debug)]\n\n/// Three path type in unix.\n\npub enum UnixPath {\n\n File = 0,\n\n Unix = 1,\n\n Tcp = 2,\n\n Unknown = 3,\n\n}\n\n\n\nimpl From<&str> for UnixPath {\n\n fn from(s: &str) -> Self {\n\n match s {\n\n \"file\" | \"File\" | \"FILE\" => UnixPath::File,\n\n \"unix\" | \"Unix\" | \"UNIX\" => UnixPath::Unix,\n\n \"tcp\" | \"Tcp\" | \"TCP\" => UnixPath::Tcp,\n\n _ => UnixPath::Unknown,\n\n }\n\n }\n\n}\n\n\n", "file_path": "util/src/unix.rs", "rank": 35, "score": 254993.40107538138 }, { "content": "fn open_pflash_file(file_name: &str, unit: usize) -> Result<File> {\n\n let fd = if unit == 0 {\n\n std::fs::OpenOptions::new().read(true).open(file_name)?\n\n } else {\n\n std::fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .open(file_name)?\n\n };\n\n Ok(fd)\n\n}\n\n\n", "file_path": "machine/src/standard_vm/mod.rs", "rank": 36, "score": 254356.24053501306 }, { "content": "/// Set namespace for ipc.\n\npub fn set_ipc_namespace() -> Result<()> {\n\n syscall::unshare(libc::CLONE_NEWIPC)\n\n .chain_err(|| \"Failed to share into a new ipc namespace\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/namespace.rs", "rank": 37, "score": 254279.1788889008 }, { "content": "/// Change the root mount in the mount namespace of the calling process.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `new_root` - The new root path, but can't be \"/\".\n\n/// * `put_old` - The old root path.\n\npub fn pivot_root(new_root: &str, put_root: &str) -> Result<()> {\n\n let new_path = into_cstring(new_root)?;\n\n let old_path = into_cstring(put_root)?;\n\n SyscallResult {\n\n ret: unsafe { libc::syscall(libc::SYS_pivot_root, new_path.as_ptr(), old_path.as_ptr()) }\n\n as libc::c_int,\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 38, "score": 253460.19679242704 }, { "content": "fn init_cap() -> HashMap<&'static str, (u8, bool)> {\n\n [\n\n (\"CAP_CHOWN\", (0, true)),\n\n (\"CAP_DAC_OVERRIDE\", (1, true)),\n\n (\"CAP_DAC_READ_SEARCH\", (2, false)),\n\n (\"CAP_FOWNER\", (3, true)),\n\n (\"CAP_FSETID\", (4, true)),\n\n (\"CAP_KILL\", (5, true)),\n\n (\"CAP_SETGID\", (6, true)),\n\n (\"CAP_SETUID\", (7, true)),\n\n (\"CAP_SETPCAP\", (8, true)),\n\n (\"CAP_LINUX_IMMUTABLE\", (9, false)),\n\n (\"CAP_NET_BIND_SERVICE\", (10, true)),\n\n (\"CAP_NET_BROADCAST\", (11, false)),\n\n (\"CAP_NET_ADMIN\", (12, false)),\n\n (\"CAP_NET_RAW\", (13, true)),\n\n (\"CAP_IPC_LOCK\", (14, false)),\n\n (\"CAP_IPC_OWNER\", (15, false)),\n\n (\"CAP_SYS_MODULE\", (16, false)),\n\n (\"CAP_SYS_RAWIO\", (17, false)),\n", "file_path": "ozone/src/capability.rs", "rank": 39, "score": 245494.79209231774 }, { "content": "fn write_cgroup_value(path: &Path, file: &str, value: &str) -> Result<()> {\n\n if file != \"tasks\" {\n\n if !path.exists() {\n\n fs::create_dir_all(path)\n\n .chain_err(|| format!(\"Failed to create directory: {:?}\", path))?;\n\n }\n\n inherit_config(path, file)\n\n .chain_err(|| format!(\"Failed to inherit configuration for path: {:?}\", &path))?;\n\n }\n\n\n\n let mut path_to_write = path.to_path_buf();\n\n path_to_write.push(&file);\n\n fs::write(&path_to_write, format!(\"{}\\n\", value)).chain_err(|| {\n\n ErrorKind::WriteError(\n\n (&path_to_write.to_string_lossy()).to_string(),\n\n value.to_string(),\n\n )\n\n })?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/cgroup.rs", "rank": 40, "score": 240991.202259321 }, { "content": "pub fn dump_dtb(fdt: &[u8], file_path: &str) {\n\n use std::fs::File;\n\n use std::io::Write;\n\n let mut f = File::create(file_path).unwrap();\n\n f.write_all(fdt).expect(\"Unable to write data\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_add_all_properties() {\n\n let mut fdt_builder = FdtBuilder::new();\n\n let root_node = fdt_builder.begin_node(\"\").unwrap();\n\n fdt_builder.set_property(\"null\", &[]).unwrap();\n\n fdt_builder.set_property_u32(\"u32\", 0x01234567).unwrap();\n\n fdt_builder\n\n .set_property_u64(\"u64\", 0x0123456789abcdef)\n\n .unwrap();\n", "file_path": "util/src/device_tree.rs", "rank": 41, "score": 239969.28135096806 }, { "content": "/// Check if the bit of features is configured.\n\npub fn virtio_has_feature(feature: u64, fbit: u32) -> bool {\n\n feature & (1 << fbit) != 0\n\n}\n\n\n\n/// Identifier of different virtio device, refer to Virtio Spec.\n\npub const VIRTIO_TYPE_NET: u32 = 1;\n\npub const VIRTIO_TYPE_BLOCK: u32 = 2;\n\npub const VIRTIO_TYPE_CONSOLE: u32 = 3;\n\npub const VIRTIO_TYPE_RNG: u32 = 4;\n\npub const VIRTIO_TYPE_BALLOON: u32 = 5;\n\npub const VIRTIO_TYPE_VSOCK: u32 = 19;\n\npub const _VIRTIO_TYPE_FS: u32 = 26;\n\n\n\n// The Status of Virtio Device.\n\nconst CONFIG_STATUS_ACKNOWLEDGE: u32 = 0x01;\n\nconst CONFIG_STATUS_DRIVER: u32 = 0x02;\n\nconst CONFIG_STATUS_DRIVER_OK: u32 = 0x04;\n\nconst CONFIG_STATUS_FEATURES_OK: u32 = 0x08;\n\nconst CONFIG_STATUS_FAILED: u32 = 0x80;\n\n\n", "file_path": "virtio/src/lib.rs", "rank": 42, "score": 238176.12990847576 }, { "content": "pub fn pci_args_check(cmd_parser: &CmdParser) -> Result<()> {\n\n let device_type = cmd_parser.get_value::<String>(\"\")?;\n\n let dev_type = device_type.unwrap();\n\n // Safe, because this function only be called when certain\n\n // devices type are added.\n\n if dev_type.ends_with(\"-device\") {\n\n if cmd_parser.get_value::<String>(\"bus\")?.is_some() {\n\n bail!(\"virtio mmio device does not support bus arguments\");\n\n }\n\n if cmd_parser.get_value::<String>(\"addr\")?.is_some() {\n\n bail!(\"virtio mmio device does not support addr arguments\");\n\n }\n\n if cmd_parser.get_value::<ExBool>(\"multifunction\")?.is_some() {\n\n bail!(\"virtio mmio device does not support multifunction arguments\");\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "machine_manager/src/config/pci.rs", "rank": 43, "score": 237988.69169312494 }, { "content": "pub fn raw_write(fd: RawFd, buf: u64, size: usize, offset: usize) -> Result<i64> {\n\n let ret = unsafe { pwrite(fd, buf as *mut c_void, size, offset as i64) as i64 };\n\n if ret < 0 {\n\n bail!(\"Failed to pwrite for {}, return {}.\", fd, ret);\n\n }\n\n\n\n Ok(ret)\n\n}\n\n\n", "file_path": "util/src/aio/raw.rs", "rank": 44, "score": 236623.88488639786 }, { "content": "pub fn raw_read(fd: RawFd, buf: u64, size: usize, offset: usize) -> Result<i64> {\n\n let ret = unsafe { pread(fd, buf as *mut c_void, size, offset as i64) as i64 };\n\n if ret < 0 {\n\n bail!(\"Failed to pread for {}, return {}.\", fd, ret);\n\n }\n\n\n\n Ok(ret)\n\n}\n\n\n", "file_path": "util/src/aio/raw.rs", "rank": 45, "score": 236623.88488639786 }, { "content": "fn get_serial_num_config(serial_num: &str) -> Vec<u8> {\n\n let mut id_bytes = vec![0; VIRTIO_BLK_ID_BYTES as usize];\n\n let bytes_to_copy = cmp::min(serial_num.len(), VIRTIO_BLK_ID_BYTES as usize);\n\n\n\n let serial_bytes = serial_num.as_bytes();\n\n id_bytes[..bytes_to_copy].clone_from_slice(&serial_bytes[..bytes_to_copy]);\n\n id_bytes\n\n}\n\n\n", "file_path": "virtio/src/block.rs", "rank": 46, "score": 235821.534051964 }, { "content": "/// Parse unix uri to unix path.\n\n///\n\n/// # Notions\n\n///\n\n/// Unix uri is the string as `file:/xxx/xxx` or `unix:/xxx/xxx` or `tcp:xxx.xxx.xxx`.\n\npub fn parse_uri(uri: &str) -> Result<(UnixPath, String)> {\n\n let parse_vec: Vec<&str> = uri.split(':').collect();\n\n if parse_vec.len() == 2 {\n\n match UnixPath::from(parse_vec[0]) {\n\n UnixPath::File => Ok((UnixPath::File, String::from(parse_vec[1]))),\n\n UnixPath::Unix => Ok((UnixPath::Unix, String::from(parse_vec[1]))),\n\n _ => bail!(\"Unsupported unix path type.\"),\n\n }\n\n } else {\n\n bail!(\"Invalid unix uri: {}\", uri)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{parse_uri, UnixPath};\n\n\n\n #[test]\n\n fn test_parse_uri() {\n\n let test_uri_01 = \"file:/tmp/test_file\";\n", "file_path": "util/src/unix.rs", "rank": 47, "score": 232959.61429637414 }, { "content": "// Function that helps to generate serial node in device-tree.\n\n//\n\n// # Arguments\n\n//\n\n// * `dev_info` - Device resource info of serial device.\n\n// * `fdt` - Flatted device-tree blob where serial node will be filled into.\n\nfn generate_serial_device_node(fdt: &mut FdtBuilder, res: &SysRes) -> util::errors::Result<()> {\n\n let node = format!(\"pl011@{:x}\", res.region_base);\n\n let serial_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"arm,pl011\\0arm,primecell\")?;\n\n fdt.set_property_string(\"clock-names\", \"uartclk\\0apb_pclk\")?;\n\n fdt.set_property_array_u32(\n\n \"clocks\",\n\n &[device_tree::CLK_PHANDLE, device_tree::CLK_PHANDLE],\n\n )?;\n\n fdt.set_property_array_u64(\"reg\", &[res.region_base, res.region_size])?;\n\n fdt.set_property_array_u32(\n\n \"interrupts\",\n\n &[\n\n device_tree::GIC_FDT_IRQ_TYPE_SPI,\n\n res.irq as u32,\n\n device_tree::IRQ_TYPE_EDGE_RISING,\n\n ],\n\n )?;\n\n fdt.end_node(serial_node_dep)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "machine/src/standard_vm/aarch64/mod.rs", "rank": 48, "score": 232620.73623036835 }, { "content": "fn check_mac_address(mac: &str) -> bool {\n\n if mac.len() != MAC_ADDRESS_LENGTH {\n\n return false;\n\n }\n\n\n\n let mac_vec: Vec<&str> = mac.split(':').collect();\n\n if mac_vec.len() != 6 {\n\n return false;\n\n }\n\n\n\n let bit_list = [\n\n '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f', 'A', 'B',\n\n 'C', 'D', 'E', 'F',\n\n ];\n\n for mac_bit in mac_vec {\n\n if mac_bit.len() != 2 {\n\n return false;\n\n }\n\n let mut mac_bit_char = mac_bit.chars();\n\n if !bit_list.contains(&mac_bit_char.next().unwrap())\n", "file_path": "machine_manager/src/config/network.rs", "rank": 49, "score": 232012.88705526915 }, { "content": "pub fn capget(hdr: &mut CapUserHeader, data: &mut CapUserData) -> Result<()> {\n\n SyscallResult {\n\n ret: unsafe { libc::syscall(CAPGET, hdr, data) as i32 },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 50, "score": 231574.8060092262 }, { "content": "pub fn is_msix_enabled(msix_cap_offset: usize, config: &[u8]) -> bool {\n\n let offset: usize = msix_cap_offset + MSIX_CAP_CONTROL as usize;\n\n let msix_ctl = le_read_u16(&config, offset).unwrap();\n\n if msix_ctl & MSIX_CAP_ENABLE > 0 {\n\n return true;\n\n }\n\n false\n\n}\n\n\n", "file_path": "pci/src/msix.rs", "rank": 51, "score": 229571.79763452994 }, { "content": "/// Change permissions of file or directory.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `file_path` - The path of file.\n\n/// * `mode` - The file permissions.\n\npub fn chmod(file_path: &str, mode: libc::mode_t) -> Result<()> {\n\n let path = into_cstring(file_path)?;\n\n SyscallResult {\n\n ret: unsafe { libc::chmod(path.as_ptr(), mode) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 52, "score": 227345.15674366127 }, { "content": "pub fn parse_drive(cmd_parser: CmdParser) -> Result<DriveConfig> {\n\n let mut drive = DriveConfig::default();\n\n\n\n if let Some(format) = cmd_parser.get_value::<String>(\"format\")? {\n\n if format.ne(\"raw\") {\n\n bail!(\"Only \\'raw\\' type of block is supported\");\n\n }\n\n }\n\n\n\n if let Some(id) = cmd_parser.get_value::<String>(\"id\")? {\n\n drive.id = id;\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"id\", \"blk\").into());\n\n }\n\n\n\n if let Some(file) = cmd_parser.get_value::<String>(\"file\")? {\n\n drive.path_on_host = file;\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"file\", \"blk\").into());\n\n }\n", "file_path": "machine_manager/src/config/drive.rs", "rank": 53, "score": 226832.53108856053 }, { "content": "pub fn parse_netdev(cmd_parser: CmdParser) -> Result<NetDevcfg> {\n\n let mut net = NetDevcfg::default();\n\n let netdev_type = if let Some(netdev_type) = cmd_parser.get_value::<String>(\"\")? {\n\n netdev_type\n\n } else {\n\n \"\".to_string()\n\n };\n\n if netdev_type.ne(\"tap\") {\n\n bail!(\"Unsupported netdev type: {:?}\", &netdev_type);\n\n }\n\n if let Some(net_id) = cmd_parser.get_value::<String>(\"id\")? {\n\n net.id = net_id;\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"id\", \"netdev\").into());\n\n }\n\n if let Some(ifname) = cmd_parser.get_value::<String>(\"ifname\")? {\n\n net.ifname = ifname;\n\n }\n\n\n\n if let Some(vhost) = cmd_parser.get_value::<ExBool>(\"vhost\")? {\n", "file_path": "machine_manager/src/config/network.rs", "rank": 54, "score": 226832.53108856053 }, { "content": "pub fn parse_chardev(cmd_parser: CmdParser) -> Result<ChardevConfig> {\n\n let chardev_id = if let Some(chardev_id) = cmd_parser.get_value::<String>(\"id\")? {\n\n chardev_id\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"id\", \"chardev\").into());\n\n };\n\n let backend = cmd_parser.get_value::<String>(\"\")?;\n\n let path = cmd_parser.get_value::<String>(\"path\")?;\n\n check_chardev_args(cmd_parser)?;\n\n let chardev_type = if let Some(backend) = backend {\n\n match backend.as_str() {\n\n \"stdio\" => ChardevType::Stdio,\n\n \"pty\" => ChardevType::Pty,\n\n \"socket\" => {\n\n if let Some(path) = path {\n\n ChardevType::Socket(path)\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"path\", \"socket-type chardev\").into());\n\n }\n\n }\n", "file_path": "machine_manager/src/config/chardev.rs", "rank": 55, "score": 226832.53108856053 }, { "content": "pub fn set_termi_raw_mode() -> std::io::Result<()> {\n\n let tty_fd = std::io::stdin().lock().tty_fd();\n\n\n\n // Safe because this only set the `old_term_mode` struct to zero.\n\n let mut old_term_mode: termios = unsafe { std::mem::zeroed() };\n\n // Safe because this only get stdin's current mode and save it.\n\n let ret = unsafe { tcgetattr(tty_fd, &mut old_term_mode as *mut _) };\n\n if ret < 0 {\n\n return Err(std::io::Error::last_os_error());\n\n }\n\n *TERMINAL_MODE.lock().unwrap() = Some(old_term_mode);\n\n\n\n let mut new_term_mode: termios = old_term_mode;\n\n // Safe because this function only change the `new_term_mode` argument.\n\n unsafe { libc::cfmakeraw(&mut new_term_mode as *mut _) };\n\n new_term_mode.c_oflag |= OPOST;\n\n // Safe because this function only set the stdin to raw mode.\n\n let ret = unsafe { tcsetattr(tty_fd, TCSANOW, &new_term_mode as *const _) };\n\n if ret < 0 {\n\n return Err(std::io::Error::last_os_error());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "util/src/lib.rs", "rank": 56, "score": 225007.15206102526 }, { "content": "pub fn set_termi_canon_mode() -> std::io::Result<()> {\n\n let tty_fd = std::io::stdin().lock().tty_fd();\n\n if let Some(old_term_mode) = TERMINAL_MODE.lock().unwrap().as_ref() {\n\n // Safe because this only recover the stdin's mode.\n\n let ret = unsafe { tcsetattr(tty_fd, TCSANOW, old_term_mode as *const _) };\n\n if ret < 0 {\n\n return Err(std::io::Error::last_os_error());\n\n }\n\n } else {\n\n debug!(\"stdin's mode is not initialized: please check the config\");\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "util/src/lib.rs", "rank": 57, "score": 225007.15206102526 }, { "content": "pub fn checksum(slice: &[u8]) -> u8 {\n\n let mut sum: u32 = 0;\n\n\n\n for byte in slice.iter() {\n\n sum += u32::from(*byte);\n\n sum &= 0xff;\n\n }\n\n\n\n (sum & 0xff) as u8\n\n}\n\n\n", "file_path": "util/src/checksum.rs", "rank": 58, "score": 224212.39923541617 }, { "content": "/// This function returns the caller's thread ID(TID).\n\npub fn gettid() -> u64 {\n\n unsafe { libc::syscall(libc::SYS_gettid) as u64 }\n\n}\n\n\n", "file_path": "util/src/unix.rs", "rank": 59, "score": 222865.9250758069 }, { "content": "// Function that helps to generate pci node in device-tree.\n\n//\n\n// # Arguments\n\n//\n\n// * `fdt` - Flatted device-tree blob where node will be filled into.\n\nfn generate_pci_host_node(fdt: &mut FdtBuilder) -> util::errors::Result<()> {\n\n let pcie_ecam_base = MEM_LAYOUT[LayoutEntryType::PcieEcam as usize].0;\n\n let pcie_ecam_size = MEM_LAYOUT[LayoutEntryType::PcieEcam as usize].1;\n\n let pcie_buses_num = MEM_LAYOUT[LayoutEntryType::PcieEcam as usize].1 >> 20;\n\n let node = format!(\"pcie@{:x}\", pcie_ecam_base);\n\n let pci_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"pci-host-ecam-generic\")?;\n\n fdt.set_property_string(\"device_type\", \"pci\")?;\n\n fdt.set_property_array_u64(\"reg\", &[pcie_ecam_base, pcie_ecam_size])?;\n\n fdt.set_property_array_u32(\"bus-range\", &[0, (pcie_buses_num - 1) as u32])?;\n\n fdt.set_property_u32(\"linux,pci-domain\", 0)?;\n\n fdt.set_property_u32(\"#address-cells\", 3)?;\n\n fdt.set_property_u32(\"#size-cells\", 2)?;\n\n\n\n let pcie_mmio_base = MEM_LAYOUT[LayoutEntryType::PcieMmio as usize].0;\n\n let pcie_mmio_size = MEM_LAYOUT[LayoutEntryType::PcieMmio as usize].1;\n\n let fdt_pci_mmio_type: u32 = 0x0200_0000;\n\n let mmio_base_hi: u32 = (pcie_mmio_base >> 32) as u32;\n\n let mmio_base_lo: u32 = (pcie_mmio_base & 0xffff_ffff) as u32;\n\n let mmio_size_hi: u32 = (pcie_mmio_size >> 32) as u32;\n", "file_path": "machine/src/standard_vm/aarch64/mod.rs", "rank": 60, "score": 222711.4237278246 }, { "content": "/// Function that helps to generate flash node in device-tree.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `dev_info` - Device resource info of fw-cfg device.\n\n/// * `flash` - Flatted device-tree blob where fw-cfg node will be filled into.\n\nfn generate_flash_device_node(fdt: &mut FdtBuilder) -> util::errors::Result<()> {\n\n let flash_base = MEM_LAYOUT[LayoutEntryType::Flash as usize].0;\n\n let flash_size = MEM_LAYOUT[LayoutEntryType::Flash as usize].1 / 2;\n\n let node = format!(\"flash@{:x}\", flash_base);\n\n let flash_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"cfi-flash\")?;\n\n fdt.set_property_array_u64(\n\n \"reg\",\n\n &[flash_base, flash_size, flash_base + flash_size, flash_size],\n\n )?;\n\n fdt.set_property_u32(\"bank-width\", 4)?;\n\n fdt.end_node(flash_node_dep)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "machine/src/standard_vm/aarch64/mod.rs", "rank": 61, "score": 222710.9731509401 }, { "content": "pub fn pci_slot(devfn: u8) -> u8 {\n\n devfn >> 3 & 0x1f\n\n}\n\n\n", "file_path": "pci/src/lib.rs", "rank": 62, "score": 220329.64670776413 }, { "content": "pub fn pci_func(devfn: u8) -> u8 {\n\n devfn & 0x07\n\n}\n\n\n", "file_path": "pci/src/lib.rs", "rank": 63, "score": 220329.64670776413 }, { "content": "fn write_idt_value(val: u64, guest_mem: &Arc<AddressSpace>) -> Result<()> {\n\n let boot_idt_addr = BOOT_IDT_OFFSET;\n\n guest_mem\n\n .write_object(&val, GuestAddress(boot_idt_addr))\n\n .chain_err(|| format!(\"Failed to load gdt to 0x{:x}\", boot_idt_addr))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "boot_loader/src/x86_64/direct_boot/gdt.rs", "rank": 64, "score": 219887.66497474717 }, { "content": "/// Extract from the 64 bit input @value the bit field specified by the\n\n/// @start and @length parameters, and return it. The bit field must\n\n/// lie entirely within the 64 bit word. It is valid to request that\n\n/// all 64 bits are returned (ie @length 64 and @start 0).\n\n///\n\n/// # Arguments\n\n///\n\n/// * `value` - The value to extract the bit field from\n\n/// * `start` - The lowest bit in the bit field (numbered from 0)\n\n/// * `length` - The length of the bit field\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// extern crate util;\n\n/// use util::num_ops::extract_u64;\n\n///\n\n/// let value = extract_u64(0xfbfba0a0ffff5a5a, 16, 16).unwrap();\n\n/// assert!(value == 0xffff);\n\n/// ```\n\npub fn extract_u64(value: u64, start: u32, length: u32) -> Option<u64> {\n\n if length > 64 - start {\n\n error!(\n\n \"extract_u64: ( start {} length {} ) is out of range\",\n\n start, length\n\n );\n\n return None;\n\n }\n\n\n\n Some((value >> start as u64) & (!(0_u64) >> (64 - length) as u64))\n\n}\n\n\n", "file_path": "util/src/num_ops.rs", "rank": 65, "score": 219592.85625613024 }, { "content": "/// Mount destination directory.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `dst_path` - Path of destination directory.\n\npub fn mount(source_file: Option<&str>, new_root_dir: &str, flag: libc::c_ulong) -> Result<()> {\n\n let target = into_cstring(new_root_dir)?;\n\n if let Some(path) = source_file {\n\n let source = into_cstring(path)?;\n\n SyscallResult {\n\n ret: unsafe { libc::mount(source.as_ptr(), target.as_ptr(), null(), flag, null()) },\n\n }\n\n .into()\n\n } else {\n\n SyscallResult {\n\n ret: unsafe { libc::mount(null(), target.as_ptr(), null(), flag, null()) },\n\n }\n\n .into()\n\n }\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 66, "score": 219248.45792230056 }, { "content": "/// Change owner of file\n\n///\n\n/// # Arguments\n\n///\n\n/// * `uid` - User id.\n\n/// * `gid` - Group id.\n\npub fn chown(file_path: &str, uid: u32, gid: u32) -> Result<()> {\n\n let path = into_cstring(file_path)?;\n\n SyscallResult {\n\n ret: unsafe { libc::chown(path.as_ptr(), uid as libc::uid_t, gid as libc::gid_t) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 67, "score": 218210.687005597 }, { "content": "// Remove all capability when uid is 0.\n\n// when uid is 0 , the child process capability is :\n\n// P'(permitted) = P(inheritable) | P(bounding)\n\n// P'(effective) = P'(permitted)\n\n// so we set Bounding to limit child process.\n\npub fn clear_all_capabilities() -> Result<()> {\n\n for cap in 0..NR_ALL_CAP {\n\n if has_cap(cap).chain_err(|| ErrorKind::CapsError(\"CAPGET\"))? {\n\n syscall::drop_bounding_caps(cap)\n\n .chain_err(|| ErrorKind::CapsError(\"PR_CAPBSET_DROP\"))?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ozone/src/capability.rs", "rank": 68, "score": 217539.14923773642 }, { "content": "/// Load linux kernel or initrd image file to Guest Memory.\n\n///\n\n/// # Arguments\n\n/// * `image` - image file for kernel or initrd.\n\n/// * `start_addr` - image start address in guest memory.\n\n/// * `sys_mem` - guest memory.\n\n///\n\n/// # Errors\n\n///\n\n/// * Write image to guest memory failed.\n\nfn load_image(image: &mut File, start_addr: u64, sys_mem: &Arc<AddressSpace>) -> Result<()> {\n\n let curr_loc = image.seek(SeekFrom::Current(0))?;\n\n let len = image.seek(SeekFrom::End(0))?;\n\n image.seek(SeekFrom::Start(curr_loc))?;\n\n\n\n sys_mem.write(image, GuestAddress(start_addr), len - curr_loc)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "boot_loader/src/x86_64/direct_boot/mod.rs", "rank": 69, "score": 216845.52427878013 }, { "content": "pub fn pci_devfn(slot: u8, func: u8) -> u8 {\n\n ((slot & 0x1f) << 3) | (func & 0x07)\n\n}\n\n\n", "file_path": "pci/src/lib.rs", "rank": 70, "score": 216199.56120679784 }, { "content": "pub fn obj_checksum<T: ByteCode>(t: &T) -> u8 {\n\n let mut sum: u32 = 0;\n\n\n\n for byte in t.as_bytes().iter() {\n\n sum += u32::from(*byte);\n\n sum &= 0xff;\n\n }\n\n\n\n (sum & 0xff) as u8\n\n}\n", "file_path": "util/src/checksum.rs", "rank": 71, "score": 215254.05697795557 }, { "content": "fn real_main(cmd_args: &arg_parser::ArgMatches, vm_config: &mut VmConfig) -> Result<()> {\n\n TempCleaner::object_init();\n\n\n\n if cmd_args.is_present(\"daemonize\") {\n\n match daemonize(cmd_args.value_of(\"pidfile\")) {\n\n Ok(()) => {\n\n if let Some(pidfile) = cmd_args.value_of(\"pidfile\") {\n\n TempCleaner::add_path(pidfile);\n\n }\n\n info!(\"Daemonize mode start!\");\n\n }\n\n Err(e) => bail!(\"Daemonize start failed: {}\", e),\n\n }\n\n } else if cmd_args.value_of(\"pidfile\").is_some() {\n\n bail!(\"-pidfile must be used with -daemonize together.\");\n\n }\n\n\n\n QmpChannel::object_init();\n\n EventLoop::object_init(&vm_config.iothreads)?;\n\n register_kill_signal();\n", "file_path": "src/main.rs", "rank": 72, "score": 213087.9830877054 }, { "content": "type CpregList = FamStructWrapper<kvm_reg_list>;\n\n\n\n// PSR (Processor State Register) bits.\n\n// See: https://elixir.bootlin.com/linux/v5.6/source/arch/arm64/include/uapi/asm/ptrace.h#L34\n\n#[allow(non_upper_case_globals)]\n\nconst PSR_MODE_EL1h: u64 = 0x0000_0005;\n\nconst PSR_F_BIT: u64 = 0x0000_0040;\n\nconst PSR_I_BIT: u64 = 0x0000_0080;\n\nconst PSR_A_BIT: u64 = 0x0000_0100;\n\nconst PSR_D_BIT: u64 = 0x0000_0200;\n\n// MPIDR is Multiprocessor Affinity Register\n\n// [40:63] bit reserved on AArch64 Architecture,\n\nconst UNINIT_MPIDR: u64 = 0xFFFF_FF00_0000_0000;\n\n// MPIDR - Multiprocessor Affinity Register.\n\n// See: https://elixir.bootlin.com/linux/v5.6/source/arch/arm64/include/asm/sysreg.h#L130\n\nconst SYS_MPIDR_EL1: u64 = 0x6030_0000_0013_c005;\n\nconst KVM_MAX_CPREG_ENTRIES: usize = 1024;\n\n\n\n/// AArch64 CPU booting configure information\n\n///\n", "file_path": "cpu/src/aarch64/mod.rs", "rank": 73, "score": 211657.67130729213 }, { "content": "/// A trait bound defined for types which are safe to convert to a byte slice and\n\n/// to create from a byte slice.\n\npub trait ByteCode: Default + Copy + Send + Sync {\n\n /// Return the contents of an object (impl trait `ByteCode`) as a slice of bytes.\n\n /// the inverse of this function is \"from_bytes\"\n\n fn as_bytes(&self) -> &[u8] {\n\n unsafe { from_raw_parts(self as *const Self as *const u8, size_of::<Self>()) }\n\n }\n\n\n\n /// Return the contents of a mutable object (impl trait `ByteCode`) to a mutable slice of bytes.\n\n /// the inverse of this function is \"from_bytes_mut\"\n\n fn as_mut_bytes(&mut self) -> &mut [u8] {\n\n unsafe { from_raw_parts_mut(self as *mut Self as *mut u8, size_of::<Self>()) }\n\n }\n\n\n\n /// Creates an object (impl trait `ByteCode`) from a slice of bytes\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `data` - the slice of bytes that will be constructed as an object.\n\n fn from_bytes(data: &[u8]) -> Option<&Self> {\n\n if data.len() != size_of::<Self>() {\n", "file_path": "util/src/byte_code.rs", "rank": 74, "score": 211080.52183218274 }, { "content": "/// Write the given u32 to the first or second half in u64,\n\n/// returns the u64 value.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `value` - The origin u32 value.\n\n/// * `page` - Value is 0 or 1, determines which half to write.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// extern crate util;\n\n/// use util::num_ops::write_u32;\n\n///\n\n/// let value = write_u32(0x1000_0000, 1);\n\n/// assert!(value == 0x1000_0000_0000_0000);\n\n/// ```\n\npub fn write_u32(value: u32, page: u32) -> u64 {\n\n match page {\n\n 0 => u64::from(value),\n\n 1 => u64::from(value) << 32,\n\n _ => 0_u64,\n\n }\n\n}\n\n\n", "file_path": "util/src/num_ops.rs", "rank": 75, "score": 210817.2702000573 }, { "content": "/// Get the first half or second half of u64.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `value` - The origin value to get u32 from.\n\n/// * `page` - Value is 0 or 1, determines which half to return.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// extern crate util;\n\n/// use util::num_ops::read_u32;\n\n///\n\n/// let value = read_u32(0x2000_1000_0000, 1);\n\n/// assert!(value == 0x2000);\n\n/// ```\n\npub fn read_u32(value: u64, page: u32) -> u32 {\n\n match page {\n\n 0 => value as u32,\n\n 1 => (value >> 32) as u32,\n\n _ => 0_u32,\n\n }\n\n}\n\n\n", "file_path": "util/src/num_ops.rs", "rank": 76, "score": 210816.60161591077 }, { "content": "/// Calculate the aligned-up u64 value.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `origin` - the origin value.\n\n/// * `align` - the alignment.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// extern crate util;\n\n/// use util::num_ops::round_up;\n\n///\n\n/// let value = round_up(1003 as u64, 4 as u64);\n\n/// assert!(value == Some(1004));\n\n/// ```\n\npub fn round_up(origin: u64, align: u64) -> Option<u64> {\n\n match origin % align {\n\n 0 => Some(origin),\n\n diff => origin.checked_add(align - diff),\n\n }\n\n}\n\n\n", "file_path": "util/src/num_ops.rs", "rank": 77, "score": 208765.4161065937 }, { "content": "/// Calculate the aligned-down u64 value.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `origin` - the origin value.\n\n/// * `align` - the alignment.\n\n///\n\n/// # Examples\n\n///\n\n/// ```rust\n\n/// extern crate util;\n\n/// use util::num_ops::round_down;\n\n///\n\n/// let value = round_down(1003 as u64, 4 as u64);\n\n/// assert!(value == Some(1000));\n\n/// ```\n\npub fn round_down(origin: u64, align: u64) -> Option<u64> {\n\n match origin % align {\n\n 0 => Some(origin),\n\n diff => origin.checked_sub(diff),\n\n }\n\n}\n\n\n", "file_path": "util/src/num_ops.rs", "rank": 78, "score": 208765.4161065937 }, { "content": "fn parse_rng_obj(object_args: &str) -> Result<RngObjConfig> {\n\n let mut cmd_params = CmdParser::new(\"rng-object\");\n\n cmd_params.push(\"\").push(\"id\").push(\"filename\");\n\n\n\n cmd_params.parse(&object_args)?;\n\n let id = if let Some(obj_id) = cmd_params.get_value::<String>(\"id\")? {\n\n obj_id\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"id\", \"rng-object\").into());\n\n };\n\n let filename = if let Some(name) = cmd_params.get_value::<String>(\"filename\")? {\n\n name\n\n } else {\n\n return Err(ErrorKind::FieldIsMissing(\"filename\", \"rng-object\").into());\n\n };\n\n let rng_obj_cfg = RngObjConfig { id, filename };\n\n\n\n Ok(rng_obj_cfg)\n\n}\n\n\n", "file_path": "machine_manager/src/config/mod.rs", "rank": 79, "score": 208560.4831475674 }, { "content": "/// Load bzImage linux kernel to Guest Memory.\n\n///\n\n/// # Notes\n\n/// According to Linux `Documentation/x86/boot.txt`, bzImage includes two parts:\n\n/// * the setup\n\n/// * the compressed kernel\n\n/// The setup `RealModeKernelHeader` can be load at offset `0x01f1` in bzImage kernel image.\n\n/// The compressed kernel will be loaded into guest memory at `code32_start` in\n\n/// `RealModeKernelHeader`.\n\n/// The start address of compressed kernel is the loader address + 0x200. It will be\n\n/// set in `kernel_start` in `BootLoader` structure set.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `kernel_image` - Guest kernel image.\n\n///\n\n/// # Errors\n\n///\n\n/// * Invalid BzImage header or version.\n\n/// * Failed to write bzImage linux kernel to guest memory.\n\npub fn load_bzimage(kernel_image: &mut File) -> Result<RealModeKernelHeader> {\n\n let mut boot_hdr = RealModeKernelHeader::new();\n\n\n\n kernel_image.seek(SeekFrom::Start(BOOT_HDR_START))?;\n\n kernel_image\n\n .read_exact(&mut boot_hdr.as_mut_bytes())\n\n .chain_err(|| \"Failed to read boot_hdr from bzImage kernel\")?;\n\n boot_hdr.type_of_loader = UNDEFINED_ID;\n\n\n\n if let Err(e) = boot_hdr.check_valid_kernel() {\n\n kernel_image.seek(SeekFrom::Start(0))?;\n\n return Err(e);\n\n }\n\n\n\n let mut setup_size = boot_hdr.setup_sects as u64;\n\n if setup_size == 0 {\n\n setup_size = 4;\n\n }\n\n setup_size = (setup_size + 1) << 9;\n\n kernel_image.seek(SeekFrom::Start(setup_size as u64))?;\n\n\n\n Ok(boot_hdr)\n\n}\n\n\n", "file_path": "boot_loader/src/x86_64/direct_boot/mod.rs", "rank": 80, "score": 208209.20252550702 }, { "content": "#[cfg(target_arch = \"aarch64\")]\n\nfn generate_serial_device_node(fdt: &mut FdtBuilder, res: &SysRes) -> util::errors::Result<()> {\n\n let node = format!(\"uart@{:x}\", res.region_base);\n\n let serial_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"ns16550a\")?;\n\n fdt.set_property_string(\"clock-names\", \"apb_pclk\")?;\n\n fdt.set_property_u32(\"clocks\", device_tree::CLK_PHANDLE)?;\n\n fdt.set_property_array_u64(\"reg\", &[res.region_base, res.region_size])?;\n\n fdt.set_property_array_u32(\n\n \"interrupts\",\n\n &[\n\n device_tree::GIC_FDT_IRQ_TYPE_SPI,\n\n res.irq as u32,\n\n device_tree::IRQ_TYPE_EDGE_RISING,\n\n ],\n\n )?;\n\n fdt.end_node(serial_node_dep)?;\n\n\n\n Ok(())\n\n}\n\n\n\n// Function that helps to generate RTC node in device-tree.\n\n//\n\n// # Arguments\n\n//\n\n// * `dev_info` - Device resource info of RTC device.\n\n// * `fdt` - Flatted device-tree blob where RTC node will be filled into.\n", "file_path": "machine/src/micro_vm/mod.rs", "rank": 81, "score": 207222.37824047782 }, { "content": "#[derive(Default, Debug, Serialize, Deserialize, PartialEq)]\n\nstruct VersionNumber {\n\n micro: u8,\n\n minor: u8,\n\n major: u8,\n\n}\n\n\n\nimpl QmpGreeting {\n\n /// Create qmp greeting message.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `micro` - Micro version number.\n\n /// * `minor` - Minor version number.\n\n /// * `major` - Major version number.\n\n pub fn create_greeting(micro: u8, minor: u8, major: u8) -> Self {\n\n let version_number = VersionNumber {\n\n micro,\n\n minor,\n\n major,\n\n };\n", "file_path": "machine_manager/src/qmp/mod.rs", "rank": 82, "score": 206820.78074200594 }, { "content": "// Follow ACPI spec: 5.4 Definition Block Encoding\n\n// The lower two bits indicates how many bytes are used for PkgLength\n\n// The 3,4 bits are only used if PkgLength consists of one bytes.\n\n// Therefore, the max value of PkgLength is 0x3F(one-byte encoding),\n\n// 0xF_FF(two-byte encoding), 0xF_FF_FF(three-byte encoding), 0xF_FF_FF_FF(four-byte encoding).\n\n/// Calculate PkgLength according to the length, and convert it to bytes.\n\nfn build_pkg_length(length: usize, include_self: bool) -> Vec<u8> {\n\n let pkg_1byte_shift = 6;\n\n let pkg_2byte_shift = 4;\n\n let pkg_3byte_shift = 12;\n\n let pkg_4byte_shift = 20;\n\n let mut pkg_length = length;\n\n let mut bytes = Vec::new();\n\n\n\n let bytes_count = if length + 1 < (1 << pkg_1byte_shift) {\n\n 1\n\n } else if length + 2 < (1 << pkg_3byte_shift) {\n\n 2\n\n } else if length + 3 < (1 << pkg_4byte_shift) {\n\n 3\n\n } else {\n\n 4\n\n };\n\n\n\n if include_self {\n\n pkg_length += bytes_count;\n", "file_path": "acpi/src/aml_compiler.rs", "rank": 83, "score": 203703.95673273184 }, { "content": "// Function that helps to generate RTC node in device-tree.\n\n//\n\n// # Arguments\n\n//\n\n// * `dev_info` - Device resource info of RTC device.\n\n// * `fdt` - Flatted device-tree blob where RTC node will be filled into.\n\nfn generate_rtc_device_node(fdt: &mut FdtBuilder, res: &SysRes) -> util::errors::Result<()> {\n\n let node = format!(\"pl031@{:x}\", res.region_base);\n\n let rtc_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"arm,pl031\\0arm,primecell\\0\")?;\n\n fdt.set_property_string(\"clock-names\", \"apb_pclk\")?;\n\n fdt.set_property_u32(\"clocks\", device_tree::CLK_PHANDLE)?;\n\n fdt.set_property_array_u64(\"reg\", &[res.region_base, res.region_size])?;\n\n fdt.set_property_array_u32(\n\n \"interrupts\",\n\n &[\n\n device_tree::GIC_FDT_IRQ_TYPE_SPI,\n\n res.irq as u32,\n\n device_tree::IRQ_TYPE_LEVEL_HIGH,\n\n ],\n\n )?;\n\n fdt.end_node(rtc_node_dep)?;\n\n\n\n Ok(())\n\n}\n\n\n\n/// Trait that helps to generate all nodes in device-tree.\n", "file_path": "machine/src/standard_vm/aarch64/mod.rs", "rank": 84, "score": 203474.78232944192 }, { "content": "// Function that helps to generate Virtio-Mmio device's node in device-tree.\n\n//\n\n// # Arguments\n\n//\n\n// * `dev_info` - Device resource info of Virtio-Mmio device.\n\n// * `fdt` - Flatted device-tree blob where node will be filled into.\n\nfn generate_virtio_devices_node(fdt: &mut FdtBuilder, res: &SysRes) -> util::errors::Result<()> {\n\n let node = format!(\"virtio_mmio@{:x}\", res.region_base);\n\n let virtio_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"virtio,mmio\")?;\n\n fdt.set_property_u32(\"interrupt-parent\", device_tree::GIC_PHANDLE)?;\n\n fdt.set_property_array_u64(\"reg\", &[res.region_base, res.region_size])?;\n\n fdt.set_property_array_u32(\n\n \"interrupts\",\n\n &[\n\n device_tree::GIC_FDT_IRQ_TYPE_SPI,\n\n res.irq as u32,\n\n device_tree::IRQ_TYPE_EDGE_RISING,\n\n ],\n\n )?;\n\n fdt.end_node(virtio_node_dep)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "machine/src/standard_vm/aarch64/mod.rs", "rank": 85, "score": 203474.69864272283 }, { "content": "/// Function that helps to generate fw-cfg node in device-tree.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `dev_info` - Device resource info of fw-cfg device.\n\n/// * `fdt` - Flatted device-tree blob where fw-cfg node will be filled into.\n\nfn generate_fwcfg_device_node(fdt: &mut FdtBuilder, res: &SysRes) -> util::errors::Result<()> {\n\n let node = format!(\"fw-cfg@{:x}\", res.region_base);\n\n let fwcfg_node_dep = fdt.begin_node(&node)?;\n\n fdt.set_property_string(\"compatible\", \"qemu,fw-cfg-mmio\")?;\n\n fdt.set_property_array_u64(\"reg\", &[res.region_base, res.region_size])?;\n\n fdt.end_node(fwcfg_node_dep)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "machine/src/standard_vm/aarch64/mod.rs", "rank": 86, "score": 203474.6578895823 }, { "content": "// If there is null character in string, return false.\n\nfn check_string_legality(s: &str) -> bool {\n\n !s.contains('\\0')\n\n}\n\n\n\nimpl Default for FdtBuilder {\n\n fn default() -> Self {\n\n Self {\n\n fdt_header: vec![0_u8; FDT_HEADER_SIZE],\n\n mem_reserve: Vec::new(),\n\n structure_blk: Vec::new(),\n\n strings_blk: Vec::new(),\n\n boot_cpuid_phys: 0,\n\n subnode_depth: 0,\n\n begin_node: false,\n\n }\n\n }\n\n}\n\n\n\nimpl FdtBuilder {\n\n pub fn new() -> Self {\n", "file_path": "util/src/device_tree.rs", "rank": 87, "score": 203210.51290003245 }, { "content": "/// Write process id to pid file.\n\nfn create_pid_file(path: &str) -> Result<()> {\n\n let pid: u32 = std::process::id();\n\n\n\n let mut pid_file: File = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .mode(0o600)\n\n .open(path)?;\n\n write!(pid_file, \"{}\", pid)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "util/src/daemonize.rs", "rank": 88, "score": 202469.50915305625 }, { "content": "/// Create a special or ordinary file.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `node_path` - The path of file node.\n\n/// * `mode` - The node permissions.\n\n/// * `dev` - The device number.\n\npub fn mknod(node_path: &str, mode: libc::mode_t, dev: libc::dev_t) -> Result<()> {\n\n let path = into_cstring(node_path)?;\n\n SyscallResult {\n\n ret: unsafe { libc::mknod(path.as_ptr(), mode, dev) },\n\n }\n\n .into()\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 89, "score": 202212.75305709016 }, { "content": "pub fn qmp_query_balloon() -> Option<u64> {\n\n // Safe, because there is no confliction when writing global variable BALLOON_DEV, in other words,\n\n // this function will not be called simultaneously.\n\n if let Some(dev) = unsafe { &BALLOON_DEV } {\n\n let unlocked_dev = dev.lock().unwrap();\n\n return Some(unlocked_dev.get_guest_memory_size());\n\n }\n\n None\n\n}\n\n\n", "file_path": "virtio/src/balloon.rs", "rank": 90, "score": 202167.67678898998 }, { "content": "/// Parse `DeviceState` structure to `DeviceStateDesc`.\n\npub fn parse_struct(\n\n input: &syn::DataStruct,\n\n ident: &syn::Ident,\n\n current_version: u32,\n\n compat_version: u32,\n\n) -> proc_macro2::TokenStream {\n\n let struct_ident = format_ident!(\"DeviceStateDesc\");\n\n let name = format!(\"{}\", ident);\n\n\n\n let fields = parse_fields(&input.fields, ident);\n\n\n\n quote! {\n\n #struct_ident {\n\n name: #name.to_string(),\n\n alias: MigrationManager::desc_db_len(),\n\n size: std::mem::size_of::<#ident>() as u32,\n\n current_version: #current_version,\n\n compat_version: #compat_version,\n\n fields: vec![#(#fields), *],\n\n }\n", "file_path": "migration_derive/src/struct_parser.rs", "rank": 91, "score": 201980.67876398924 }, { "content": "fn get_attr_version(meta_list: MetaList, current_version: &mut u32, compat_version: &mut u32) {\n\n for meta in meta_list.nested.iter() {\n\n if let syn::NestedMeta::Meta(syn::Meta::NameValue(attr_name_value)) = meta {\n\n if let Some(version) = meta_name_parse(attr_name_value, CURRENT_VERSION) {\n\n *current_version = version;\n\n }\n\n if let Some(version) = meta_name_parse(attr_name_value, COMPAT_VERSION) {\n\n *compat_version = version;\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "migration_derive/src/attr_parser.rs", "rank": 92, "score": 201640.687789291 }, { "content": "fn bind_socket(path: String) -> Result<UnixListener> {\n\n let listener =\n\n UnixListener::bind(&path).chain_err(|| format!(\"Failed to bind socket file {}\", &path))?;\n\n // Add file to temporary pool, so it could be cleaned when vm exits.\n\n TempCleaner::add_path(path.clone());\n\n limit_permission(&path)\n\n .chain_err(|| format!(\"Failed to limit permission for socket file {}\", &path))?;\n\n Ok(listener)\n\n}\n", "file_path": "machine_manager/src/cmdline.rs", "rank": 93, "score": 198465.84921574892 }, { "content": "/// [setsid(2)](https://man7.org/linux/man-pages/man2/setsid.2.html)\n\n/// setsid() creates a new session if the calling process is not a process group\n\n/// leader. The calling process is the leader of the new session. The calling\n\n/// process also becomes the process group leader or a new process group in the\n\n/// session.\n\n/// The calling process will be the only process in the new process group and in\n\n/// the new session. New session has no controlling termimal.\n\n///\n\n/// # Errors\n\n///\n\n/// `DaemonSetsid` Error, the ret of `libc::setsid()` is -1\n\nfn set_sid() -> Result<()> {\n\n let ret = unsafe { libc::setsid() };\n\n\n\n if ret == -1 {\n\n Err(ErrorKind::DaemonSetsid.into())\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "util/src/daemonize.rs", "rank": 94, "score": 198219.84653307984 }, { "content": "/// Create a syscall allowlist for seccomp.\n\n///\n\n/// # Notes\n\n/// This allowlist limit syscall with:\n\n/// * aarch64-unknown-gnu: 43 syscalls\n\n/// * aarch64-unknown-musl: 42 syscalls\n\n/// To reduce performance losses, the syscall rules is ordered by frequency.\n\npub fn syscall_whitelist() -> Vec<BpfRule> {\n\n vec![\n\n BpfRule::new(libc::SYS_read),\n\n BpfRule::new(libc::SYS_write),\n\n ioctl_allow_list(),\n\n BpfRule::new(libc::SYS_epoll_pwait),\n\n BpfRule::new(libc::SYS_io_getevents),\n\n BpfRule::new(libc::SYS_io_submit),\n\n BpfRule::new(libc::SYS_dup),\n\n BpfRule::new(libc::SYS_close),\n\n BpfRule::new(libc::SYS_eventfd2),\n\n BpfRule::new(libc::SYS_epoll_ctl),\n\n BpfRule::new(libc::SYS_fdatasync),\n\n BpfRule::new(libc::SYS_recvmsg),\n\n BpfRule::new(libc::SYS_sendmsg),\n\n BpfRule::new(libc::SYS_recvfrom),\n\n BpfRule::new(libc::SYS_mremap),\n\n BpfRule::new(libc::SYS_io_setup),\n\n BpfRule::new(libc::SYS_brk),\n\n BpfRule::new(libc::SYS_fcntl)\n", "file_path": "machine/src/standard_vm/aarch64/syscall.rs", "rank": 95, "score": 197662.99745835667 }, { "content": "/// Initial pagetables.\n\nfn setup_page_table(sys_mem: &Arc<AddressSpace>) -> Result<u64> {\n\n // Puts PML4 right after zero page but aligned to 4k.\n\n let boot_pml4_addr = PML4_START;\n\n let boot_pdpte_addr = PDPTE_START;\n\n let boot_pde_addr = PDE_START;\n\n\n\n // Entry covering VA [0..512GB)\n\n let pdpte = boot_pdpte_addr | 0x03;\n\n sys_mem\n\n .write_object(&pdpte, GuestAddress(boot_pml4_addr))\n\n .chain_err(|| format!(\"Failed to load PD PTE to 0x{:x}\", boot_pml4_addr))?;\n\n\n\n // Entry covering VA [0..1GB)\n\n let pde = boot_pde_addr | 0x03;\n\n sys_mem\n\n .write_object(&pde, GuestAddress(boot_pdpte_addr))\n\n .chain_err(|| format!(\"Failed to load PDE to 0x{:x}\", boot_pdpte_addr))?;\n\n\n\n // 512 2MB entries together covering VA [0..1GB). Note we are assuming\n\n // CPU supports 2MB pages (/proc/cpuinfo has 'pse'). All modern CPUs do.\n\n for i in 0..512u64 {\n\n let pde = (i << 21) + 0x83u64;\n\n sys_mem\n\n .write_object(&pde, GuestAddress(boot_pde_addr + i * 8))\n\n .chain_err(|| format!(\"Failed to load PDE to 0x{:x}\", boot_pde_addr + i * 8))?;\n\n }\n\n\n\n Ok(boot_pml4_addr)\n\n}\n\n\n", "file_path": "boot_loader/src/x86_64/direct_boot/mod.rs", "rank": 96, "score": 197260.78821014997 }, { "content": "struct GicRedistRegion {\n\n /// Base address.\n\n base: u64,\n\n /// Size of redistributor region.\n\n size: u64,\n\n /// Attribute of redistributor region.\n\n base_attr: u64,\n\n}\n\n\n\n/// A wrapper around creating and managing a `GICv3`.\n\npub struct GICv3 {\n\n /// The fd for the GICv3 device.\n\n fd: DeviceFd,\n\n /// Number of vCPUs, determines the number of redistributor and CPU interface.\n\n pub(crate) vcpu_count: u64,\n\n /// GICv3 ITS device.\n\n pub(crate) its_dev: Option<Arc<GICv3Its>>,\n\n /// Maximum irq number.\n\n pub(crate) nr_irqs: u32,\n\n /// GICv3 redistributor info, support multiple redistributor regions.\n", "file_path": "devices/src/interrupt_controller/aarch64/gicv3.rs", "rank": 97, "score": 196594.72937353334 }, { "content": "/// Transfer &str to CString.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `item` - 'item' is &str type.\n\nfn into_cstring(item: &str) -> Result<CString> {\n\n CString::new(item).map_err(|_| std::io::ErrorKind::InvalidInput.into())\n\n}\n\n\n", "file_path": "ozone/src/syscall.rs", "rank": 98, "score": 195957.54789302213 }, { "content": "// Version in `Cargo.toml` will be shown as \"x.x.x\".\n\n// This function will separate it to three bytes, and padding to [u8;4] after\n\n// three as u32.\n\n// Version in `attribute` can also be shown as this format.\n\nfn version_to_u32(version_str: &str) -> u32 {\n\n let version_vec: Vec<u8> = version_str\n\n .split('.')\n\n .map(|x| x.parse::<u8>().unwrap())\n\n .collect();\n\n\n\n if version_vec.len() != 3 {\n\n panic!(\"Version str is illegal.\");\n\n }\n\n\n\n (version_vec[2] as u32) + ((version_vec[1] as u32) << 8) + ((version_vec[0] as u32) << 16)\n\n}\n\n\n", "file_path": "migration_derive/src/attr_parser.rs", "rank": 99, "score": 194627.9613850353 } ]
Rust
npk/src/dm_verity.rs
AmoghAJ/northstar
06e087df238bd9fc45026ecaf48e2642266ac7c5
use rand::RngCore; use sha2::{Digest, Sha256}; use std::{ fs::{File, OpenOptions}, io::{BufReader, Read, Seek, SeekFrom::Start, Write}, path::Path, }; use thiserror::Error; use uuid::Uuid; pub const SHA256_SIZE: usize = 32; pub const BLOCK_SIZE: usize = 4096; pub type Sha256Digest = [u8; SHA256_SIZE]; pub type Salt = Sha256Digest; #[derive(Error, Debug)] pub enum Error { #[error("Error generating hash tree: {0}")] HashTree(String), #[error("Error creating valid uuid")] Uuid, #[error("OS error: {context}")] Os { context: String, #[source] error: std::io::Error, }, } pub fn append_dm_verity_block(fsimg_path: &Path, fsimg_size: u64) -> Result<Sha256Digest, Error> { let (level_offsets, tree_size) = calc_hash_tree_level_offsets(fsimg_size as usize, BLOCK_SIZE, SHA256_SIZE as usize); let (salt, root_hash, hash_tree) = gen_hash_tree(&fsimg_path, fsimg_size, &level_offsets, tree_size)?; append_superblock_and_hashtree(&fsimg_path, fsimg_size, &salt, &hash_tree)?; Ok(root_hash) } fn gen_salt() -> Salt { let mut salt: Salt = [0u8; SHA256_SIZE]; rand::thread_rng().fill_bytes(&mut salt); salt } fn calc_hash_tree_level_offsets( image_size: usize, block_size: usize, digest_size: usize, ) -> (Vec<usize>, usize) { let mut level_offsets: Vec<usize> = vec![]; let mut level_sizes: Vec<usize> = vec![]; let mut tree_size = 0; let mut num_levels = 0; let mut rem_size = image_size; while rem_size > block_size { let num_blocks = (rem_size + block_size - 1) / block_size; let level_size = round_up_to_multiple(num_blocks * digest_size, block_size); level_sizes.push(level_size); tree_size += level_size; num_levels += 1; rem_size = level_size; } for n in 0..num_levels { let mut offset = 0; #[allow(clippy::needless_range_loop)] for m in (n + 1)..num_levels { offset += level_sizes[m]; } level_offsets.push(offset); } (level_offsets, tree_size) } fn gen_hash_tree( fsimg_path: &Path, image_size: u64, level_offsets: &[usize], tree_size: usize, ) -> Result<(Salt, Sha256Digest, Vec<u8>), Error> { let image = &File::open(&fsimg_path).map_err(|e| Error::Os { context: format!("Cannot open '{}'", &fsimg_path.display()), error: e, })?; let mut hashes: Vec<[u8; SHA256_SIZE]> = vec![]; let mut level_num = 0; let mut level_size = image_size; let mut hash_tree = vec![0_u8; tree_size]; if image_size % BLOCK_SIZE as u64 != 0 { return Err(Error::HashTree(format!("Failed to generate verity has tree. The image size {} is not a multiple of the block size {}", image_size, BLOCK_SIZE) )); } let salt = gen_salt(); loop { hashes.clear(); let mut rem_size = level_size; while rem_size > 0 { let mut sha256 = Sha256::new(); sha256.update(salt); if level_num == 0 { let offset = level_size - rem_size; let mut data = vec![0_u8; BLOCK_SIZE]; let mut image_reader = BufReader::new(image); image_reader.seek(Start(offset)).map_err(|e| Error::Os { context: format!("Failed to seek in file {}", &fsimg_path.display()), error: e, })?; image_reader.read_exact(&mut data).map_err(|e| Error::Os { context: "Failed to read from fs-image".to_string(), error: e, })?; sha256.update(&data); } else { let offset = level_offsets[level_num - 1] + level_size as usize - rem_size as usize; sha256.update(&hash_tree[offset..offset + BLOCK_SIZE]); } rem_size -= BLOCK_SIZE as u64; hashes.push(sha256.finalize().into()); } if hashes.len() == 1 { break; } let mut level = hashes.iter().flat_map(|s| s.iter().copied()).collect(); pad_to_block_size(&mut level); let offset = level_offsets[level_num]; hash_tree[offset..offset + level.len()].copy_from_slice(level.as_slice()); level_size = level.len() as u64; level_num += 1; } let root_hash = hashes[0]; Ok((salt, root_hash, hash_tree)) } fn append_superblock_and_hashtree( fsimg_path: &Path, fsimg_size: u64, salt: &Salt, hash_tree: &[u8], ) -> Result<(), Error> { let uuid = Uuid::new_v4(); assert_eq!(fsimg_size % BLOCK_SIZE as u64, 0); let data_blocks = fsimg_size / BLOCK_SIZE as u64; let mut fsimg = OpenOptions::new() .write(true) .append(true) .open(&fsimg_path) .map_err(|e| Error::Os { context: format!("Cannot open '{}'", &fsimg_path.display()), error: e, })?; const VERITY_SIGNATURE: &[u8; 8] = b"verity\x00\x00"; const HASH_ALG_NAME: &[u8; 6] = b"sha256"; let mut raw_sb: Vec<u8> = vec![]; raw_sb.extend(VERITY_SIGNATURE); raw_sb.extend(&1_u32.to_ne_bytes()); raw_sb.extend(&1_u32.to_ne_bytes()); raw_sb.extend(&hex::decode(uuid.to_string().replace("-", "")).map_err(|_e| Error::Uuid)?); raw_sb.extend(HASH_ALG_NAME); raw_sb.extend(&[0_u8; 26]); raw_sb.extend(&(BLOCK_SIZE as u32).to_ne_bytes()); raw_sb.extend(&(BLOCK_SIZE as u32).to_ne_bytes()); raw_sb.extend(&data_blocks.to_ne_bytes()); raw_sb.extend(&(SHA256_SIZE as u16).to_ne_bytes()); raw_sb.extend(&[0_u8; 6]); raw_sb.extend(salt); raw_sb.extend(&vec![0_u8; 256 - salt.len()]); || -> Result<(), std::io::Error> { fsimg.write_all(&raw_sb)?; fsimg.write_all(vec![0u8; BLOCK_SIZE - raw_sb.len()].as_slice())?; fsimg.write_all(&hash_tree) }() .map_err(|e| Error::Os { context: "Failed to write to fs-image".to_string(), error: e, })?; Ok(()) } fn pad_to_block_size(data: &mut Vec<u8>) { let pad_size = round_up_to_multiple(data.len(), BLOCK_SIZE) - data.len(); data.append(&mut vec![0_u8; pad_size]); } fn round_up_to_multiple(number: usize, multiple: usize) -> usize { number + ((multiple - (number % multiple)) % multiple) }
use rand::RngCore; use sha2::{Digest, Sha256}; use std::{ fs::{File, OpenOptions}, io::{BufReader, Read, Seek, SeekFrom::Start, Write}, path::Path, }; use thiserror::Error; use uuid::Uuid; pub const SHA256_SIZE: usize = 32; pub const BLOCK_SIZE: usize = 4096; pub type Sha256Digest = [u8; SHA256_SIZE]; pub type Salt = Sha256Digest; #[derive(Error, Debug)] pub enum Error { #[error("Error generating hash tree: {0}")] HashTree(String), #[error("Error creating valid uuid")] Uuid, #[error("OS error: {context}")] Os { context: String, #[source] error: std::io::Error, }, } pub fn append_dm_verity_block(fsimg_path: &Path, fsimg_size: u64) -> Result<Sha256Digest, Error> { let (level_offsets, tree_size) = calc_hash_tree_level_offsets(fsimg_size as usize, BLOCK_SIZE, SHA256_SIZE as usize); let (salt, root_hash, hash_tree) = gen_hash_tree(&fsimg_path, fsimg_size, &level_offsets, tree_size)?; append_superblock_and_hashtree(&fsimg_path, fsimg_size, &salt, &hash_tree)?; Ok(root_hash) } fn gen_salt() -> Salt { let mut salt: Salt = [0u8; SHA256_SIZE]; rand::thread_rng().fill_bytes(&mut salt); salt }
fn gen_hash_tree( fsimg_path: &Path, image_size: u64, level_offsets: &[usize], tree_size: usize, ) -> Result<(Salt, Sha256Digest, Vec<u8>), Error> { let image = &File::open(&fsimg_path).map_err(|e| Error::Os { context: format!("Cannot open '{}'", &fsimg_path.display()), error: e, })?; let mut hashes: Vec<[u8; SHA256_SIZE]> = vec![]; let mut level_num = 0; let mut level_size = image_size; let mut hash_tree = vec![0_u8; tree_size]; if image_size % BLOCK_SIZE as u64 != 0 { return Err(Error::HashTree(format!("Failed to generate verity has tree. The image size {} is not a multiple of the block size {}", image_size, BLOCK_SIZE) )); } let salt = gen_salt(); loop { hashes.clear(); let mut rem_size = level_size; while rem_size > 0 { let mut sha256 = Sha256::new(); sha256.update(salt); if level_num == 0 { let offset = level_size - rem_size; let mut data = vec![0_u8; BLOCK_SIZE]; let mut image_reader = BufReader::new(image); image_reader.seek(Start(offset)).map_err(|e| Error::Os { context: format!("Failed to seek in file {}", &fsimg_path.display()), error: e, })?; image_reader.read_exact(&mut data).map_err(|e| Error::Os { context: "Failed to read from fs-image".to_string(), error: e, })?; sha256.update(&data); } else { let offset = level_offsets[level_num - 1] + level_size as usize - rem_size as usize; sha256.update(&hash_tree[offset..offset + BLOCK_SIZE]); } rem_size -= BLOCK_SIZE as u64; hashes.push(sha256.finalize().into()); } if hashes.len() == 1 { break; } let mut level = hashes.iter().flat_map(|s| s.iter().copied()).collect(); pad_to_block_size(&mut level); let offset = level_offsets[level_num]; hash_tree[offset..offset + level.len()].copy_from_slice(level.as_slice()); level_size = level.len() as u64; level_num += 1; } let root_hash = hashes[0]; Ok((salt, root_hash, hash_tree)) } fn append_superblock_and_hashtree( fsimg_path: &Path, fsimg_size: u64, salt: &Salt, hash_tree: &[u8], ) -> Result<(), Error> { let uuid = Uuid::new_v4(); assert_eq!(fsimg_size % BLOCK_SIZE as u64, 0); let data_blocks = fsimg_size / BLOCK_SIZE as u64; let mut fsimg = OpenOptions::new() .write(true) .append(true) .open(&fsimg_path) .map_err(|e| Error::Os { context: format!("Cannot open '{}'", &fsimg_path.display()), error: e, })?; const VERITY_SIGNATURE: &[u8; 8] = b"verity\x00\x00"; const HASH_ALG_NAME: &[u8; 6] = b"sha256"; let mut raw_sb: Vec<u8> = vec![]; raw_sb.extend(VERITY_SIGNATURE); raw_sb.extend(&1_u32.to_ne_bytes()); raw_sb.extend(&1_u32.to_ne_bytes()); raw_sb.extend(&hex::decode(uuid.to_string().replace("-", "")).map_err(|_e| Error::Uuid)?); raw_sb.extend(HASH_ALG_NAME); raw_sb.extend(&[0_u8; 26]); raw_sb.extend(&(BLOCK_SIZE as u32).to_ne_bytes()); raw_sb.extend(&(BLOCK_SIZE as u32).to_ne_bytes()); raw_sb.extend(&data_blocks.to_ne_bytes()); raw_sb.extend(&(SHA256_SIZE as u16).to_ne_bytes()); raw_sb.extend(&[0_u8; 6]); raw_sb.extend(salt); raw_sb.extend(&vec![0_u8; 256 - salt.len()]); || -> Result<(), std::io::Error> { fsimg.write_all(&raw_sb)?; fsimg.write_all(vec![0u8; BLOCK_SIZE - raw_sb.len()].as_slice())?; fsimg.write_all(&hash_tree) }() .map_err(|e| Error::Os { context: "Failed to write to fs-image".to_string(), error: e, })?; Ok(()) } fn pad_to_block_size(data: &mut Vec<u8>) { let pad_size = round_up_to_multiple(data.len(), BLOCK_SIZE) - data.len(); data.append(&mut vec![0_u8; pad_size]); } fn round_up_to_multiple(number: usize, multiple: usize) -> usize { number + ((multiple - (number % multiple)) % multiple) }
fn calc_hash_tree_level_offsets( image_size: usize, block_size: usize, digest_size: usize, ) -> (Vec<usize>, usize) { let mut level_offsets: Vec<usize> = vec![]; let mut level_sizes: Vec<usize> = vec![]; let mut tree_size = 0; let mut num_levels = 0; let mut rem_size = image_size; while rem_size > block_size { let num_blocks = (rem_size + block_size - 1) / block_size; let level_size = round_up_to_multiple(num_blocks * digest_size, block_size); level_sizes.push(level_size); tree_size += level_size; num_levels += 1; rem_size = level_size; } for n in 0..num_levels { let mut offset = 0; #[allow(clippy::needless_range_loop)] for m in (n + 1)..num_levels { offset += level_sizes[m]; } level_offsets.push(offset); } (level_offsets, tree_size) }
function_block-full_function
[ { "content": "fn create_squashfs(out: &Path, src: &Path, pseudo_dirs: &[(String, u32)]) -> Result<(), Error> {\n\n #[cfg(target_os = \"linux\")]\n\n let compression_alg = \"gzip\";\n\n #[cfg(not(target_os = \"linux\"))]\n\n let compression_alg = \"zstd\";\n\n\n\n if which::which(&MKSQUASHFS_BIN).is_err() {\n\n return Err(Error::Squashfs(format!(\n\n \"Failed to locate '{}'\",\n\n &MKSQUASHFS_BIN\n\n )));\n\n }\n\n if !out.exists() {\n\n return Err(Error::Squashfs(format!(\n\n \"Output directory '{}' does not exist\",\n\n &out.display()\n\n )));\n\n }\n\n let mut cmd = Command::new(&MKSQUASHFS_BIN);\n\n cmd.arg(&out.display().to_string())\n", "file_path": "npk/src/npk.rs", "rank": 1, "score": 253281.3513649291 }, { "content": "pub fn unpack(npk: &Path, out: &Path) -> Result<(), Error> {\n\n let mut zip = open_zipped_npk(&npk)?;\n\n zip.extract(&out).map_err(|e| Error::Archive {\n\n context: format!(\"Failed to extract NPK to '{}'\", &out.display()),\n\n error: e,\n\n })?;\n\n let fsimg = out.join(&FS_IMG_NAME);\n\n unpack_squashfs(&fsimg, &out)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 2, "score": 234365.82857513253 }, { "content": "/// Create an NPK for the north runtime.\n\n/// sextant collects the artifacts in a given container directory, creates and signs the necessary metadata\n\n/// and packs the results into a zipped NPK file.\n\n///\n\n/// # Example\n\n///\n\n/// To build the 'hello' example container:\n\n///\n\n/// sextant pack \\\n\n/// --dir examples/container/hello \\\n\n/// --out target/north/registry \\\n\n/// --key examples/keys/north.key \\\n\npub fn pack(dir: &Path, out: &Path, key: &Path) -> Result<(), Error> {\n\n let manifest = read_manifest(dir)?;\n\n\n\n // add manifest and root dir to tmp dir\n\n let tmp = tempfile::TempDir::new().map_err(|e| Error::Os {\n\n context: \"Failed to create temporary directory\".to_string(),\n\n error: e,\n\n })?;\n\n let tmp_root = copy_src_root_to_tmp(&dir, &tmp)?;\n\n let tmp_manifest = write_manifest(&manifest, &tmp)?;\n\n\n\n // create filesystem image\n\n let fsimg = tmp.path().join(&FS_IMG_BASE).with_extension(&FS_IMG_EXT);\n\n create_fs_img(&tmp_root, &manifest, &fsimg)?;\n\n\n\n // create NPK\n\n let signature = sign_npk(&key, &fsimg, &tmp_manifest)?;\n\n write_npk(&out, &manifest, &fsimg, &signature)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 3, "score": 231474.18718334212 }, { "content": "fn sign_npk(key_file: &Path, fsimg: &Path, tmp_manifest: &Path) -> Result<String, Error> {\n\n let fsimg_size = fs::metadata(&fsimg)\n\n .map_err(|e| Error::Os {\n\n context: format!(\"Fail to read read size of '{}'\", &fsimg.display()),\n\n error: e,\n\n })?\n\n .len();\n\n let root_hash = append_dm_verity_block(&fsimg, fsimg_size).map_err(Error::Verity)?;\n\n let key_pair = read_keypair(&key_file)?;\n\n let hashes_yaml = gen_hashes_yaml(&tmp_manifest, &fsimg, fsimg_size, &root_hash)?;\n\n let signature_yaml = sign_hashes(&key_pair, &hashes_yaml);\n\n Ok(signature_yaml)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 4, "score": 220439.5626218088 }, { "content": "/// Generate a keypair suitable for signing and verifying NPKs\n\npub fn gen_key(name: &str, out: &Path) -> Result<(), Error> {\n\n let mut csprng = OsRng {};\n\n let key_pair = Keypair::generate(&mut csprng);\n\n let pub_key = out.join(name).with_extension(\"pub\");\n\n let prv_key = out.join(name).with_extension(\"key\");\n\n assume_non_existing(&pub_key)?;\n\n assume_non_existing(&prv_key)?;\n\n\n\n fn write(data: &[u8], path: &Path) -> Result<(), Error> {\n\n let mut file = File::create(&path).map_err(|e| Error::Os {\n\n context: format!(\"Failed to create '{}'\", &path.display()),\n\n error: e,\n\n })?;\n\n\n\n file.write_all(&data).map_err(|e| Error::Os {\n\n context: format!(\"Failed to write to '{}'\", &path.display()),\n\n error: e,\n\n })?;\n\n Ok(())\n\n }\n\n write(&key_pair.public.to_bytes(), &pub_key)?;\n\n write(&key_pair.secret.to_bytes(), &prv_key)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 5, "score": 211439.49393168764 }, { "content": "fn read_manifest(src: &Path) -> Result<Manifest, Error> {\n\n let manifest_path = src.join(MANIFEST_BASE).with_extension(&MANIFEST_EXT);\n\n let manifest = std::fs::File::open(&manifest_path).map_err(|e| Error::Os {\n\n context: format!(\"Failed to open manifest at '{}'\", &manifest_path.display()),\n\n error: e,\n\n })?;\n\n\n\n serde_yaml::from_reader(manifest).map_err(|_e| {\n\n Error::Manifest(format!(\n\n \"Failed to parse manifest '{}'\",\n\n &manifest_path.display()\n\n ))\n\n })\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 6, "score": 210460.0125385556 }, { "content": "fn read_keypair(key_file: &Path) -> Result<Keypair, Error> {\n\n let mut secret_key_bytes = [0u8; SECRET_KEY_LENGTH];\n\n File::open(&key_file)\n\n .map_err(|e| Error::Os {\n\n context: format!(\"Failed to open '{}'\", &key_file.display()),\n\n error: e,\n\n })?\n\n .read_exact(&mut secret_key_bytes)\n\n .map_err(|e| Error::Os {\n\n context: format!(\"Failed to read key data from '{}'\", &key_file.display()),\n\n error: e,\n\n })?;\n\n let secret_key = SecretKey::from_bytes(&secret_key_bytes).map_err(|e| Error::Key {\n\n context: format!(\"Failed to derive secret key from '{}'\", &key_file.display()),\n\n error: e,\n\n })?;\n\n let public_key = PublicKey::from(&secret_key);\n\n Ok(Keypair {\n\n secret: secret_key,\n\n public: public_key,\n\n })\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 7, "score": 207029.55137348047 }, { "content": "fn create_fs_img(tmp_root: &Path, manifest: &Manifest, fsimg: &Path) -> Result<(), Error> {\n\n let pseudo_files = gen_pseudo_files(&manifest);\n\n create_squashfs(&tmp_root, &fsimg, &pseudo_files)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 8, "score": 204296.64015543467 }, { "content": "pub fn open_zipped_npk(npk: &Path) -> Result<ZipArchive<File>, Error> {\n\n let zip = zip::ZipArchive::new(File::open(&npk).map_err(|e| Error::Os {\n\n context: format!(\"Failed to open NPK at '{}'\", &npk.display()),\n\n error: e,\n\n })?)\n\n .map_err(|e| Error::Archive {\n\n context: format!(\"Failed to parse ZIP format of NPK at '{}'\", &npk.display()),\n\n error: e,\n\n })?;\n\n\n\n Ok(zip)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 9, "score": 201940.90751586674 }, { "content": "fn write_npk(npk: &Path, manifest: &Manifest, fsimg: &Path, signature: &str) -> Result<(), Error> {\n\n let npk = npk\n\n .join(format!(\n\n \"{}-{}.\",\n\n &manifest.name,\n\n &manifest.version.to_string()\n\n ))\n\n .with_extension(&NPK_EXT);\n\n let npk = File::create(&npk).map_err(|e| Error::Os {\n\n context: format!(\"Failed to create NPK at '{}'\", &npk.display()),\n\n error: e,\n\n })?;\n\n\n\n let options =\n\n zip::write::FileOptions::default().compression_method(zip::CompressionMethod::Stored);\n\n let manifest_string = serde_yaml::to_string(&manifest)\n\n .map_err(|e| Error::Manifest(format!(\"Could not serialize manifest: {}\", e)))?;\n\n let mut zip = zip::ZipWriter::new(&npk);\n\n || -> Result<(), ZipError> {\n\n zip.start_file(SIGNATURE_NAME, options)?;\n", "file_path": "npk/src/npk.rs", "rank": 10, "score": 199105.02454484836 }, { "content": "fn assume_non_existing(path: &Path) -> Result<(), Error> {\n\n if path.exists() {\n\n Err(Error::FileOperation(format!(\n\n \"File '{}' already exists\",\n\n &path.display()\n\n )))\n\n } else {\n\n Ok(())\n\n }\n\n}\n", "file_path": "npk/src/npk.rs", "rank": 11, "score": 194228.68436423468 }, { "content": "pub fn read_manifest(\n\n npk: &Path,\n\n signing_keys: &HashMap<String, ed25519_dalek::PublicKey>,\n\n) -> Result<Manifest, NpkError> {\n\n let mut archive_reader = ArchiveReader::new(&npk, &signing_keys).map_err(NpkError::Archive)?;\n\n archive_reader\n\n .extract_manifest_from_archive()\n\n .map_err(NpkError::Archive)\n\n}\n\n\n\nimpl<'a> ArchiveReader<'a> {\n\n pub fn new(\n\n npk: &Path,\n\n signing_keys: &'a HashMap<String, ed25519_dalek::PublicKey>,\n\n ) -> Result<Self, Error> {\n\n let file =\n\n std::fs::File::open(&npk).map_err(|_e| Error::CouldNotOpenFile(PathBuf::from(npk)))?;\n\n\n\n let reader: std::io::BufReader<std::fs::File> = std::io::BufReader::new(file);\n\n let archive: zip::ZipArchive<std::io::BufReader<std::fs::File>> =\n", "file_path": "npk/src/archive.rs", "rank": 12, "score": 185849.28692041323 }, { "content": "fn unpack_squashfs(image: &Path, out: &Path) -> Result<(), Error> {\n\n if which::which(&UNSQUASHFS_BIN).is_err() {\n\n return Err(Error::Squashfs(format!(\n\n \"Failed to locate '{}'\",\n\n &UNSQUASHFS_BIN\n\n )));\n\n }\n\n if !image.exists() {\n\n return Err(Error::Squashfs(format!(\n\n \"Squashfs image at '{}' does not exist\",\n\n &image.display()\n\n )));\n\n }\n\n let squashfs_root = out.join(\"squashfs-root\");\n\n let mut cmd = Command::new(&UNSQUASHFS_BIN);\n\n cmd.arg(\"-dest\")\n\n .arg(&squashfs_root.display().to_string())\n\n .arg(&image.display().to_string())\n\n .output()\n\n .map_err(|_e| Error::Squashfs(format!(\"Error while executing '{}'\", &UNSQUASHFS_BIN)))?;\n\n Ok(())\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 13, "score": 184287.9904023906 }, { "content": "fn write(input: &str, path: &Path) -> Result<()> {\n\n fs::write(path, input).context(format!(\n\n \"Failed to write \\\"{}\\\" to {}\",\n\n input,\n\n path.display()\n\n ))\n\n}\n\n\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 14, "score": 184142.14780389948 }, { "content": "pub fn inspect(npk: &Path) -> Result<()> {\n\n let mut zip = npk::open_zipped_npk(&npk)?;\n\n let mut print_buf: String = String::new();\n\n println!(\n\n \"{}\",\n\n format!(\"# inspection of '{}'\", &npk.display()).green()\n\n );\n\n println!(\"{}\", \"## NPK Content\".to_string().green());\n\n zip.file_names().for_each(|f| println!(\"{}\", f));\n\n println!();\n\n\n\n // print manifest\n\n let mut man = zip\n\n .by_name(npk::MANIFEST_NAME)\n\n .context(\"Failed to find manifest in NPK\")?;\n\n println!(\"{}\", format!(\"## {}\", npk::MANIFEST_NAME).green());\n\n man.read_to_string(&mut print_buf)\n\n .with_context(|| \"Failed to read manifest\")?;\n\n println!(\"{}\", &print_buf);\n\n print!(\"\\n\\n\");\n", "file_path": "sextant/src/inspect.rs", "rank": 15, "score": 178733.58105550974 }, { "content": "fn write_manifest(manifest: &Manifest, tmp: &TempDir) -> Result<PathBuf, Error> {\n\n let tmp_manifest_path = tmp\n\n .path()\n\n .join(&MANIFEST_BASE)\n\n .with_extension(&MANIFEST_EXT);\n\n let tmp_manifest = File::create(&tmp_manifest_path).map_err(|e| Error::Os {\n\n context: format!(\"Failed to create '{}'\", &tmp_manifest_path.display()),\n\n error: e,\n\n })?;\n\n serde_yaml::to_writer(&tmp_manifest, &manifest)\n\n .map_err(|_e| Error::Manifest(\"Failed to serialize manifest\".to_string()))?;\n\n Ok(tmp_manifest_path)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 16, "score": 178554.12454939532 }, { "content": "/// The smallest number divisible by `align_to` and at least `num`.\n\n/// Precondition: `align_to` is a power of 2.\n\n/// Precondition: `num` + `align_to` < usize::MAX + 1.\n\npub fn align_to(num: usize, align_to: usize) -> usize {\n\n let agn = align_to - 1;\n\n\n\n (num + agn) & !agn\n\n}\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 19, "score": 163824.21958710675 }, { "content": "pub fn check_verity_config(verity: &VerityHeader) -> Result<(), Error> {\n\n if &verity.header != b\"verity\" {\n\n return Err(Error::NoVerityHeader);\n\n }\n\n if verity.version != SUPPORTED_VERITY_VERSION {\n\n return Err(Error::UnexpectedVerityVersion(verity.version));\n\n }\n\n if verity.algorithm != \"sha256\" {\n\n return Err(Error::UnexpectedVerityAlgorithm(verity.algorithm.clone()));\n\n }\n\n Ok(())\n\n}\n\n\n\n#[allow(clippy::too_many_arguments)]\n\npub async fn parse_verity_header(buf: &[u8; 512]) -> Result<VerityHeader, Error> {\n\n let s = structure::structure!(\"=6s2xII16s6s26xIIQH6x256s168x\"); // \"a8 L L a16 A32 L L Q S x6 a256\"\n\n let (\n\n header,\n\n version,\n\n _hash_type,\n", "file_path": "npk/src/lib.rs", "rank": 21, "score": 163374.31238811588 }, { "content": "fn sign_hashes(key_pair: &Keypair, hashes_yaml: &str) -> String {\n\n let signature = key_pair.sign(hashes_yaml.as_bytes());\n\n let signature_base64 = base64::encode(signature);\n\n let key_id = \"north\";\n\n let signature_yaml = format!(\n\n \"{}---\\nkey: {}\\nsignature: {}\",\n\n &hashes_yaml, &key_id, &signature_base64\n\n );\n\n signature_yaml\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 22, "score": 160986.2331418533 }, { "content": "fn copy_src_root_to_tmp(src: &Path, tmp: &TempDir) -> Result<PathBuf, Error> {\n\n let src_root = src.join(&ROOT_DIR_NAME);\n\n let tmp_root = tmp.path().join(&ROOT_DIR_NAME);\n\n let options = fs_extra::dir::CopyOptions::new();\n\n if src_root.exists() {\n\n fs_extra::dir::copy(&src_root, &tmp, &options).map_err(|_e| {\n\n Error::FileOperation(format!(\n\n \"Failed to copy from '{}' to '{}'\",\n\n &src_root.display(),\n\n &tmp.path().display()\n\n ))\n\n })?;\n\n } else {\n\n // create empty root dir at destination if we have nothing to copy\n\n fs_extra::dir::create(&tmp_root, false).map_err(|_e| {\n\n Error::FileOperation(format!(\n\n \"Failed to create directory '{}'\",\n\n &tmp_root.display()\n\n ))\n\n })?;\n\n }\n\n Ok(tmp_root)\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 23, "score": 159058.9564744778 }, { "content": "// Count the number of files in the directory specified by `path`.\n\nfn count_dir_entries<P: AsRef<Path>>(path: P) -> io::Result<usize> {\n\n Ok(fs::read_dir(path)?.count())\n\n}\n\n\n", "file_path": "minijail/rust/minijail/src/lib.rs", "rank": 25, "score": 153394.36224926048 }, { "content": "/// Return the list of sub-paths to the given directory except the root.\n\n/// For example, the path '/res/dir/subdir' returns ('/res/dir/subdir', /res/dir/', '/res/').\n\nfn path_trail(path: &Path) -> Vec<&Path> {\n\n let mut current_path = path;\n\n let mut ret = vec![];\n\n while let Some(parent_path) = current_path.parent() {\n\n ret.push(current_path);\n\n current_path = parent_path;\n\n }\n\n ret\n\n}\n\n\n", "file_path": "npk/src/npk.rs", "rank": 26, "score": 152661.96694861518 }, { "content": "fn deserialize_tmpfs<'de, D>(deserializer: D) -> Result<u64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct SizeVisitor;\n\n\n\n impl<'de> Visitor<'de> for SizeVisitor {\n\n type Value = u64;\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a number of bytes or a string with the size (e.g. 25M)\")\n\n }\n\n\n\n fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E> {\n\n Ok(v)\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: serde::de::Error,\n\n {\n", "file_path": "npk/src/manifest.rs", "rank": 27, "score": 151013.8513328271 }, { "content": "fn get_mount_point(cgroup: &str) -> Result<PathBuf, Error> {\n\n let cgroup = String::from(cgroup);\n\n MountIter::new()\n\n .map_err(|e| Error::Mount(\"Failed to access mount points\".to_string(), e))?\n\n .filter_map(|m| m.ok())\n\n .find(|m| m.fstype == \"cgroup\" && m.options.contains(&cgroup))\n\n .map(|m| m.dest)\n\n .ok_or_else(|| {\n\n Error::Mount(\n\n format!(\"No mount point for cgroup {}\", &cgroup),\n\n io::Error::new(ErrorKind::Other, \"\"),\n\n )\n\n })\n\n}\n", "file_path": "north/src/runtime/cgroups.rs", "rank": 28, "score": 146772.23502615187 }, { "content": "/// Return true if path is read and writeable\n\nfn is_rw(path: &Path) -> bool {\n\n match stat::stat(path.as_os_str()) {\n\n Ok(stat) => {\n\n let same_uid = stat.st_uid == unistd::getuid().as_raw();\n\n let same_gid = stat.st_gid == unistd::getgid().as_raw();\n\n let mode = stat::Mode::from_bits_truncate(stat.st_mode);\n\n\n\n let is_readable = (same_uid && mode.contains(stat::Mode::S_IRUSR))\n\n || (same_gid && mode.contains(stat::Mode::S_IRGRP))\n\n || mode.contains(stat::Mode::S_IROTH);\n\n let is_writable = (same_uid && mode.contains(stat::Mode::S_IWUSR))\n\n || (same_gid && mode.contains(stat::Mode::S_IWGRP))\n\n || mode.contains(stat::Mode::S_IWOTH);\n\n\n\n is_readable && is_writable\n\n }\n\n Err(_) => false,\n\n }\n\n}\n\n\n", "file_path": "north/src/runtime/mod.rs", "rank": 29, "score": 144355.0258517832 }, { "content": "fn cat(path: &Path) -> Result<()> {\n\n let mut input =\n\n fs::File::open(&path).with_context(|| format!(\"Failed to open {}\", path.display()))?;\n\n let mut output = std::io::stdout();\n\n io::copy(&mut input, &mut output)\n\n .map(drop)\n\n .with_context(|| format!(\"Failed to cat {}\", path.display()))?;\n\n writeln!(&mut output).context(\"Failed to write to stdout\")\n\n}\n\n\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 30, "score": 141078.47367269982 }, { "content": "fn touch(path: &Path) -> Result<()> {\n\n fs::File::create(path)?;\n\n Ok(())\n\n}\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 31, "score": 141078.47367269982 }, { "content": "pub fn cargo_bin<S: AsRef<str>>(name: S) -> PathBuf {\n\n let path = cargo_bin_str(name.as_ref());\n\n if !path.exists() {\n\n panic!(\"Required binary {} does not exist\", path.display());\n\n }\n\n path\n\n}\n\n\n", "file_path": "north_tests/src/util.rs", "rank": 32, "score": 139715.04861472652 }, { "content": "/// Return slc up to the first \\0, or None\n\nfn slice_to_null(slc: &[u8]) -> Option<&[u8]> {\n\n slc.iter().position(|c| *c == b'\\0').map(|i| &slc[..i])\n\n}\n\n\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 33, "score": 135097.81980792532 }, { "content": "fn copy_file(file: &Path, dir: &Path) {\n\n assert!(file.is_file());\n\n assert!(dir.is_dir());\n\n let filename = file.file_name().unwrap();\n\n std::fs::copy(file, dir.join(filename)).unwrap();\n\n}\n\n\n\n#[ignore]\n\n#[tokio::test]\n\nasync fn check_hello() -> Result<()> {\n\n init();\n\n let mut runtime = Runtime::launch().await?;\n\n\n\n let hello = runtime.start(\"hello\").await?;\n\n\n\n // Here goes some kind of health check for the spawned process\n\n assert!(hello.is_running().await?);\n\n\n\n runtime.stop(\"hello\").await?;\n\n runtime.shutdown().await\n", "file_path": "north_tests/tests/integration_tests.rs", "rank": 34, "score": 134835.18048629782 }, { "content": "fn main() -> Result<(), Error> {\n\n let opt = Opt::from_args();\n\n let config = read_to_string(&opt.config)\n\n .with_context(|| format!(\"Failed to read configuration file {}\", opt.config.display()))?;\n\n let config: Config = toml::from_str(&config)\n\n .with_context(|| format!(\"Failed to read configuration file {}\", opt.config.display()))?;\n\n\n\n let log_filter = if opt.debug || config.debug {\n\n \"north=debug\"\n\n } else {\n\n \"north=info\"\n\n };\n\n {\n\n logd_logger::builder()\n\n .parse_filters(log_filter)\n\n .tag(\"north\")\n\n .init();\n\n }\n\n\n\n info!(\n", "file_path": "north/src/main.rs", "rank": 35, "score": 132504.07192465142 }, { "content": "fn print_squashfs(fsimg_path: &Path) -> Result<()> {\n\n which::which(&npk::UNSQUASHFS_BIN)\n\n .with_context(|| anyhow!(\"Failed to find '{}'\", &npk::UNSQUASHFS_BIN))?;\n\n\n\n let mut cmd = Command::new(&npk::UNSQUASHFS_BIN);\n\n cmd.arg(\"-ll\").arg(fsimg_path.display().to_string());\n\n\n\n let output = cmd\n\n .output()\n\n .with_context(|| format!(\"Failed to execute '{}'\", &npk::UNSQUASHFS_BIN))?;\n\n\n\n println!(\"{}\", String::from_utf8_lossy(&output.stdout));\n\n\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::inspect;\n\n use npk::npk::{gen_key, pack};\n", "file_path": "sextant/src/inspect.rs", "rank": 36, "score": 132333.42493145954 }, { "content": "/// This is a 1:1 port of gen_constants.sh in libminijail\n\nfn generate_syscall_constants(target_os: &str) -> io::Result<PathBuf> {\n\n let artifact = PathBuf::from(env::var(\"OUT_DIR\").unwrap()).join(\"libconstants.gen.c\");\n\n\n\n let expanded = cc::Build::new()\n\n .flag(\"-dD\")\n\n .flag(\"../../gen_constants.c\")\n\n .expand();\n\n let preproc = String::from_utf8(expanded).expect(\"Invalid compiler output\");\n\n\n\n let mut out = fs::File::create(&artifact)?;\n\n writeln!(out, \"/* GENERATED by build.rs */\")?;\n\n writeln!(out, \"#include \\\"gen_constants-inl.h\\\"\")?;\n\n writeln!(out, \"#include \\\"libconstants.h\\\"\")?;\n\n writeln!(out, \"const struct constant_entry constant_table[] = {{\")?;\n\n\n\n let re = Regex::new(\"#define ([[:upper:]][[:upper:]0-9_]*).*$\").expect(\"Invalid regex\");\n\n let f = Regex::new(\"^#define [[:upper:]][[:upper:]0-9_]*(\\\\s)+[[:alnum:]_]\")\n\n .expect(\"Invalid redgex\");\n\n preproc\n\n .lines()\n", "file_path": "minijail/rust/minijail-sys/build.rs", "rank": 37, "score": 128259.83249271473 }, { "content": "fn echo(message: &[String]) {\n\n println!(\"{}\", message.join(\" \"));\n\n}\n\n\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 38, "score": 126785.33668219534 }, { "content": "#[derive(Clone, Eq, PartialEq, Debug, Serialize, Deserialize)]\n\n#[serde(untagged)]\n\nenum MountSource {\n\n Resource {\n\n resource: String,\n\n },\n\n Tmpfs {\n\n #[serde(deserialize_with = \"deserialize_tmpfs\")]\n\n tmpfs: u64,\n\n },\n\n Bind {\n\n host: PathBuf,\n\n #[serde(default, skip_serializing_if = \"HashSet::is_empty\")]\n\n flags: HashSet<MountFlag>,\n\n },\n\n Dev(Dev),\n\n Persist(Persist),\n\n}\n\n\n", "file_path": "npk/src/manifest.rs", "rank": 39, "score": 123564.58240636637 }, { "content": "#[test]\n\nfn test_signature_parsing() -> std::io::Result<()> {\n\n let signature = \"manifest.yaml:\n\n hash: 0cbc141c2ef274989683d9ec03edcf41c57688ef5c422c647239328de2c3f306\n\nfs.img:\n\n hash: 3920b5cdb472a9b82a31a77192d9de8c0200718c6eeaf0f6c5cabba80de852f3\n\n verity-hash: 39d01c334d0800e39674005ff52238160b36078dd44839cfefa89f1d12cc3cfa\n", "file_path": "npk/src/archive.rs", "rank": 40, "score": 121451.30450418856 }, { "content": "enum use_logging {\n\n NO_LOGGING = 0,\n\n USE_SIGSYS_LOGGING = 1,\n\n USE_RET_LOG_LOGGING = 2,\n\n};\n\n\n\nint test_compile_filter(\n\n std::string filename,\n\n FILE* policy_file,\n\n struct sock_fprog* prog,\n\n enum block_action action = ACTION_RET_KILL,\n\n enum use_logging allow_logging = NO_LOGGING) {\n\n struct filter_options filteropts {\n\n .action = action,\n\n .allow_logging = allow_logging != NO_LOGGING,\n\n .allow_syscalls_for_logging = allow_logging == USE_SIGSYS_LOGGING,\n\n };\n\n return compile_filter(filename.c_str(), policy_file, prog, &filteropts);\n\n}\n\n\n", "file_path": "minijail/syscall_filter_unittest.cc", "rank": 41, "score": 121401.79628888305 }, { "content": "fn gen_hashes_yaml(\n\n tmp_manifest_path: &Path,\n\n fsimg_path: &Path,\n\n fsimg_size: u64,\n\n verity_hash: &[u8],\n\n) -> Result<String, Error> {\n\n // Create hashes YAML\n\n let mut sha256 = Sha256::new();\n\n let mut tmp_manifest = File::open(&tmp_manifest_path).map_err(|e| Error::Os {\n\n context: format!(\"Failed to open '{}'\", &tmp_manifest_path.display()),\n\n error: e,\n\n })?;\n\n io::copy(&mut tmp_manifest, &mut sha256)\n\n .map_err(|_e| Error::Manifest(\"Failed to calculate manifest checksum\".to_string()))?;\n\n\n\n let manifest_hash = sha256.finalize();\n\n let mut sha256 = Sha256::new();\n\n let mut fsimg = File::open(&fsimg_path).map_err(|e| Error::Os {\n\n context: format!(\"Failed to open '{}'\", &fsimg_path.display()),\n\n error: e,\n", "file_path": "npk/src/npk.rs", "rank": 42, "score": 120871.51531003066 }, { "content": "#[async_trait]\n\npub trait Process: Debug + Sync + Send {\n\n fn pid(&self) -> Pid;\n\n async fn stop(&mut self, timeout: time::Duration) -> Result<ExitStatus, Error>;\n\n}\n\n\n\n/// Spawn a task that waits for the process to exit. Once the process is exited send the return code\n\n// (if any) to the exit_tx handle passed\n\npub(crate) async fn waitpid(\n\n name: &str,\n\n pid: u32,\n\n exit_handle: ExitHandleSignal,\n\n event_handle: EventTx,\n\n) {\n\n let name = name.to_string();\n\n task::spawn_blocking(move || {\n\n let pid = unistd::Pid::from_raw(pid as i32);\n\n let status = loop {\n\n let result = wait::waitpid(Some(pid), None);\n\n debug!(\"Result of wait_pid is {:?}\", result);\n\n\n", "file_path": "north/src/runtime/process.rs", "rank": 43, "score": 117694.10043474878 }, { "content": "pub fn exit_handle() -> (ExitHandleSignal, ExitHandleWait) {\n\n mpsc::channel(1)\n\n}\n\n\n\n#[derive(Error, Debug)]\n\npub enum Error {\n\n #[error(\"Failed to start process: {0}\")]\n\n Start(String),\n\n #[error(\"Failed to stop process\")]\n\n Stop,\n\n #[error(\"Wrong container type: {0}\")]\n\n WrongContainerType(String),\n\n #[error(\"Minijail error: {0}\")]\n\n Minijail(#[from] ::minijail::Error),\n\n #[error(\"IO error: {0}: {1:?}\")]\n\n Io(String, std::io::Error),\n\n #[error(\"OS error: {0}: {1:?}\")]\n\n Os(String, nix::Error),\n\n}\n\n\n", "file_path": "north/src/runtime/process.rs", "rank": 44, "score": 114647.24872140144 }, { "content": "fn target_dir() -> PathBuf {\n\n env::current_exe()\n\n .ok()\n\n .map(|mut path| {\n\n path.pop();\n\n if path.ends_with(\"deps\") {\n\n path.pop();\n\n }\n\n path\n\n })\n\n .unwrap()\n\n}\n\n\n", "file_path": "north_tests/src/util.rs", "rank": 46, "score": 111577.01412523072 }, { "content": "fn gen_pseudo_files(manifest: &Manifest) -> Vec<(String, u32)> {\n\n let mut pseudo_files: Vec<(String, u32)> = vec![];\n\n if manifest.init.is_some() {\n\n pseudo_files = vec![\n\n // The default is to have at least a minimal /dev mount\n\n (\"/dev\".to_string(), 444),\n\n (\"/proc\".to_string(), 444),\n\n ];\n\n }\n\n\n\n for (target, mount) in &manifest.mounts {\n\n match mount {\n\n Mount::Bind { flags, .. } => {\n\n let mode = if flags.contains(&MountFlag::Rw) {\n\n 777\n\n } else {\n\n 555\n\n };\n\n pseudo_files.push((target.display().to_string(), mode));\n\n }\n", "file_path": "npk/src/npk.rs", "rank": 47, "score": 110535.30881733041 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __u8 = ::libc::c_uchar;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 48, "score": 108498.94581776952 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __u64 = ::libc::c_ulonglong;\n\n\n\n#[repr(C)]\n\npub struct Struct_dm_ioctl {\n\n pub version: [__u32; 3usize],\n\n pub data_size: __u32,\n\n pub data_start: __u32,\n\n pub target_count: __u32,\n\n pub open_count: __s32,\n\n pub flags: __u32,\n\n pub event_nr: __u32,\n\n pub padding: __u32,\n\n pub dev: __u64,\n\n pub name: [u8; 128usize],\n\n pub uuid: [u8; 129usize],\n\n pub data: [u8; 7usize],\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone)]\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 49, "score": 108498.94581776952 }, { "content": "fn cargo_bin_str(name: &str) -> PathBuf {\n\n target_dir().join(format!(\"{}{}\", name, env::consts::EXE_SUFFIX))\n\n}\n", "file_path": "north_tests/src/util.rs", "rank": 50, "score": 99934.15743983311 }, { "content": "/// This is a 1:1 port of gen_syscalls.sh in libminijail\n\nfn generate_syscall_table() -> io::Result<PathBuf> {\n\n let artifact = PathBuf::from(env::var(\"OUT_DIR\").unwrap()).join(\"libsyscalls.gen.c\");\n\n\n\n let expanded = cc::Build::new()\n\n .flag(\"-dD\")\n\n .flag(\"../../gen_syscalls.c\")\n\n .expand();\n\n let preproc = String::from_utf8(expanded).expect(\"Invalid compiler output\");\n\n\n\n let mut out = fs::File::create(&artifact)?;\n\n writeln!(out, \"/* GENERATED by build.rs */\")?;\n\n writeln!(out, \"#include <stddef.h>\")?;\n\n writeln!(out, \"#include <asm/unistd.h>\")?;\n\n writeln!(out, \"#include \\\"libsyscalls.h\\\"\")?;\n\n writeln!(out, \"const struct syscall_entry syscall_table[] = {{\")?;\n\n\n\n let re = Regex::new(\"#define __(ARM_)?(NR_)([[:lower:]0-9_]*) (.*)$\").expect(\"Invalid regex\");\n\n preproc.lines().try_for_each(|line| -> io::Result<()> {\n\n if let Some(c) = re.captures(&line) {\n\n let nr = &c[2];\n", "file_path": "minijail/rust/minijail-sys/build.rs", "rank": 51, "score": 98290.30617256918 }, { "content": "int write_pid_to_path(pid_t pid, const char *path)\n\n{\n\n\tFILE *fp = fopen(path, \"we\");\n\n\n\n\tif (!fp) {\n\n\t\tpwarn(\"failed to open '%s'\", path);\n\n\t\treturn -errno;\n\n\t}\n\n\tif (fprintf(fp, \"%d\\n\", (int)pid) < 0) {\n\n\t\t/* fprintf(3) does not set errno on failure. */\n\n\t\twarn(\"fprintf(%s) failed\", path);\n\n\t\treturn -1;\n\n\t}\n\n\tif (fclose(fp)) {\n\n\t\tpwarn(\"fclose(%s) failed\", path);\n\n\t\treturn -errno;\n\n\t}\n\n\n\n\treturn 0;\n", "file_path": "minijail/system.c", "rank": 52, "score": 87834.25977976284 }, { "content": "\tchar *type;\n", "file_path": "minijail/libminijail.c", "rank": 53, "score": 83474.15912001912 }, { "content": " def error(self, message, token=None):\n\n \"\"\"Raise a ParserException with the provided message.\"\"\"\n\n raise ParseException(\n\n message,\n\n self.filename,\n\n line=self._line,\n\n line_number=self._line_number,\n", "file_path": "minijail/tools/parser.py", "rank": 54, "score": 81392.1903464649 }, { "content": "# os detection\n\nmodule OS\n", "file_path": "rakefile.rb", "rank": 55, "score": 80082.27380610754 }, { "content": "fn main() {\n\n let flags = ConstantsFlags::BUILD_TIMESTAMP\n\n | ConstantsFlags::TARGET_TRIPLE\n\n | ConstantsFlags::SHA_SHORT\n\n | ConstantsFlags::SEMVER_FROM_CARGO_PKG;\n\n generate_cargo_keys(flags).expect(\"Unable to generate the cargo keys!\");\n\n}\n", "file_path": "north/build.rs", "rank": 56, "score": 78220.10707550024 }, { "content": "#[derive(Debug, StructOpt)]\n\n#[structopt(about = \"Northstar CLI\")]\n\nenum Opt {\n\n /// Pack Northstar containers\n\n Pack {\n\n /// Container source dir\n\n #[structopt(short, long)]\n\n dir: PathBuf,\n\n /// Key file\n\n #[structopt(short, long)]\n\n key: PathBuf,\n\n /// Output directory\n\n #[structopt(short, long)]\n\n out: PathBuf,\n\n },\n\n /// Unpack Northstar containers\n\n Unpack {\n\n /// NPK path\n\n #[structopt(short, long)]\n\n npk: PathBuf,\n\n /// Output directory\n\n #[structopt(short, long)]\n", "file_path": "sextant/src/main.rs", "rank": 57, "score": 77629.30100290732 }, { "content": "enum ret_trap {\n\n USE_RET_KILL = 0,\n\n USE_RET_TRAP = 1,\n\n};\n\n\n", "file_path": "minijail/syscall_filter_unittest.cc", "rank": 58, "score": 75436.70239124038 }, { "content": "// Events from message received by clients in deserialized form\n\nenum ConnectionEvent {\n\n Request(api::Message),\n\n Install(api::Message, PathBuf),\n\n}\n\n\n", "file_path": "north/src/runtime/console.rs", "rank": 59, "score": 75436.70239124038 }, { "content": "fn main() {\n\n let mut n = 10;\n\n loop {\n\n if n == 0 {\n\n println!(\"BOOM!\");\n\n panic!(\"BOOM\");\n\n }\n\n println!(\"Crashing in {} seconds\", n);\n\n std::thread::sleep(Duration::from_secs(1));\n\n n -= 1;\n\n }\n\n}\n", "file_path": "examples/container/crashing/src/main.rs", "rank": 60, "score": 74876.56365543435 }, { "content": "fn main() {\n\n let hello = std::env::var(\"HELLO\").unwrap_or_else(|_| \"unknown\".into());\n\n let version = std::env::var(\"VERSION\").unwrap_or_else(|_| \"unknown\".into());\n\n\n\n println!(\"Hello again {} from version {}!\", hello, version);\n\n for i in 0..u64::MAX {\n\n println!(\n\n \"...and hello again #{} {} from version {}...\",\n\n i, hello, version\n\n );\n\n std::thread::sleep(std::time::Duration::from_secs(1));\n\n }\n\n}\n", "file_path": "examples/container/hello/src/main.rs", "rank": 61, "score": 74876.56365543435 }, { "content": "fn main() {\n\n let version = var(\"VERSION\").expect(\"Failed to read VERSION\");\n\n let threads = var(\"THREADS\")\n\n .expect(\"Failed to read THREADS\")\n\n .parse::<i32>()\n\n .expect(\"Invalid thread count\");\n\n\n\n println!(\"Eating CPU with {} threads (v{})!\", threads, version);\n\n\n\n for _ in 0..(threads - 1) {\n\n std::thread::spawn(move || loop {\n\n let (tx, rx) = std::sync::mpsc::channel();\n\n tx.send(0).expect(\"Channel error\");\n\n rx.recv().expect(\"Channel error\");\n\n });\n\n }\n\n\n\n loop {\n\n let (tx, rx) = std::sync::mpsc::channel();\n\n tx.send(0).expect(\"Channel error\");\n\n rx.recv().expect(\"Channel error\");\n\n }\n\n}\n", "file_path": "examples/container/cpueater/src/main.rs", "rank": 62, "score": 74876.56365543435 }, { "content": "#[allow(clippy::all)]\n\nfn main() {\n\n let mut mem = vec![];\n\n for _ in 0..9_999_999 {\n\n println!(\"Eating a Megabyte... have {}\", mem.len());\n\n let mut chunk = vec![];\n\n for i in 0..1_000_000 {\n\n chunk.push((i % 8) as u8);\n\n }\n\n mem.push(chunk);\n\n std::thread::sleep(std::time::Duration::from_millis(400));\n\n }\n\n\n\n // just something to make the compiler not optimize....\n\n for x in &mem {\n\n println!(\"{}\", x[0]);\n\n }\n\n}\n", "file_path": "examples/container/memeater/src/main.rs", "rank": 63, "score": 74876.56365543435 }, { "content": "fn init() {\n\n INIT.call_once(|| {\n\n color_eyre::install().unwrap();\n\n env_logger::builder().is_test(true).try_init().ok();\n\n })\n\n}\n\n\n\nlazy_static! {\n\n static ref REPOSITORIES_DIR: TempDir = TempDir::new().unwrap();\n\n static ref TEST_CONTAINER_NPK: PathBuf = {\n\n let build_dir = TempDir::new().unwrap();\n\n let package_dir = TempDir::new().unwrap();\n\n let root = package_dir.path().join(\"root\");\n\n\n\n let binary_path = CargoBuild::new()\n\n .manifest_path(\"test_container/Cargo.toml\")\n\n .target_dir(build_dir.path())\n\n .run()\n\n .unwrap()\n\n .path()\n", "file_path": "north_tests/tests/integration_tests.rs", "rank": 64, "score": 74876.56365543435 }, { "content": "fn crash() {\n\n panic!(\"witness me!\");\n\n}\n\n\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 65, "score": 73861.02798285999 }, { "content": "#[derive(StructOpt)]\n\nenum TestCommands {\n\n Cat {\n\n #[structopt(parse(from_os_str))]\n\n path: PathBuf,\n\n },\n\n Crash,\n\n Echo {\n\n message: Vec<String>,\n\n },\n\n Write {\n\n message: String,\n\n path: PathBuf,\n\n },\n\n Touch {\n\n path: PathBuf,\n\n },\n\n}\n\n\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 67, "score": 73438.8685677028 }, { "content": "fn main() -> Result<()> {\n\n env_logger::init();\n\n\n\n match Opt::from_args() {\n\n Opt::Pack { dir, out, key } => npk::npk::pack(&dir, &out, &key)?,\n\n Opt::Unpack { npk, out } => npk::npk::unpack(&npk, &out)?,\n\n Opt::Inspect { npk } => inspect::inspect(&npk)?,\n\n Opt::GenKey { name, out } => npk::npk::gen_key(&name, &out)?,\n\n }\n\n Ok(())\n\n}\n", "file_path": "sextant/src/main.rs", "rank": 68, "score": 70760.8071945782 }, { "content": "#[allow(clippy::all)]\n\nfn main() -> Result<()> {\n\n let data = Path::new(\"/data\").join(\"input.txt\");\n\n let commands = fs::read_to_string(&data)\n\n .with_context(|| format!(\"Failed to read commands from {}\", data.display()))?;\n\n\n\n // Execute commands\n\n for command in commands.lines() {\n\n println!(\"Executing command \\\"{}\\\"\", command);\n\n let command = iter::once(\"test_container\").chain(command.split_whitespace());\n\n match TestCommands::from_iter(command) {\n\n TestCommands::Cat { path } => cat(&path)?,\n\n TestCommands::Crash => crash(),\n\n TestCommands::Echo { message } => echo(&message),\n\n TestCommands::Write { message, path } => write(&message, path.as_path())?,\n\n TestCommands::Touch { path } => touch(&path)?,\n\n };\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "north_tests/test_container/src/main.rs", "rank": 69, "score": 67711.12529481402 }, { "content": "fn format_containers(containers: &[Container]) {\n\n let mut table = Table::new();\n\n table.set_format(*format::consts::FORMAT_NO_BORDER_LINE_SEPARATOR);\n\n table.set_titles(Row::new(vec![\n\n Cell::new(\"Name\").with_style(Attr::Bold),\n\n Cell::new(\"Version\").with_style(Attr::Bold),\n\n Cell::new(\"Type\").with_style(Attr::Bold),\n\n Cell::new(\"PID\").with_style(Attr::Bold),\n\n Cell::new(\"Uptime\").with_style(Attr::Bold),\n\n ]));\n\n for container in containers\n\n .iter()\n\n .sorted_by_key(|c| &c.manifest.name) // Sort by name\n\n .sorted_by_key(|c| c.manifest.init.is_none())\n\n {\n\n table.add_row(Row::new(vec![\n\n Cell::new(&container.manifest.name).with_style(Attr::Bold),\n\n Cell::new(&container.manifest.version.to_string()),\n\n Cell::new(\n\n container\n", "file_path": "nstar/src/main.rs", "rank": 70, "score": 65067.04532683733 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __u16 = ::libc::c_ushort;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 71, "score": 64493.7278014488 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __s8 = ::libc::c_char;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 72, "score": 64493.7278014488 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __s64 = ::libc::c_longlong;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 73, "score": 64493.7278014488 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __s32 = ::libc::c_int;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 74, "score": 64493.7278014488 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __u32 = ::libc::c_uint;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 75, "score": 64493.7278014488 }, { "content": "#[allow(non_camel_case_types)]\n\ntype __s16 = ::libc::c_short;\n", "file_path": "north/src/runtime/device_mapper.rs", "rank": 76, "score": 64493.7278014488 }, { "content": "fn main() -> io::Result<()> {\n\n let target_os = env::var(\"CARGO_CFG_TARGET_OS\").expect(\"Failed to get CARGO_CFG_TARGET_OS\");\n\n let target_env = env::var(\"CARGO_CFG_TARGET_ENV\").expect(\"Failed to get CARGO_CFG_TARGET_ENV\");\n\n\n\n match target_os.as_str() {\n\n \"linux\" | \"android\" => (),\n\n _ => return Ok(()),\n\n };\n\n\n\n let minijail_dir = env::var(\"CARGO_MANIFEST_DIR\")\n\n .map(PathBuf::from)\n\n .expect(\"Faild to get CARGO_MANIFEST_DIR\")\n\n .join(\"../..\");\n\n\n\n let sources = &[\n\n \"../../bpf.c\",\n\n \"../../libminijail.c\",\n\n \"../../signal_handler.c\",\n\n \"../../syscall_filter.c\",\n\n \"../../syscall_wrapper.c\",\n", "file_path": "minijail/rust/minijail-sys/build.rs", "rank": 77, "score": 64095.62450773761 }, { "content": "fn main() -> io::Result<()> {\n\n // In the manifest a mount of type data is configured on target \"/data\"\n\n let file = Path::new(\"/data\").join(\"file\");\n\n let text = \"Hello!\";\n\n\n\n // Write\n\n let mut f = fs::File::create(&file).expect(\"Failed to create foo\");\n\n println!(\"Writing {} to {}\", text, file.display());\n\n f.write_all(text.as_bytes())?;\n\n\n\n std::thread::sleep(time::Duration::from_secs(1));\n\n\n\n // Read\n\n let text = fs::read_to_string(&file)?;\n\n println!(\"Context of {}: {}\", file.display(), text);\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/container/datarw/src/main.rs", "rank": 78, "score": 64095.62450773761 }, { "content": "fn main() -> io::Result<()> {\n\n for arg in env::args().skip(1) {\n\n let greet = fs::read_to_string(&arg).unwrap_or(format!(\"No such file: {}\", arg));\n\n ferris_says::say(greet.as_bytes(), 100, &mut std::io::stdout())?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "examples/container/resource/ferris/src/main.rs", "rank": 79, "score": 63165.50512667294 }, { "content": "// Return true if the current thread is the only thread in the process.\n\nfn is_single_threaded() -> io::Result<bool> {\n\n match count_dir_entries(\"/proc/self/task\") {\n\n Ok(1) => Ok(true),\n\n Ok(_) => Ok(false),\n\n Err(e) => Err(e),\n\n }\n\n}\n\n\n\n/*\n\n#[cfg(test)]\n\nmod tests {\n\n use std::process::exit;\n\n\n\n use super::*;\n\n\n\n const SHELL: &str = \"/bin/sh\";\n\n\n\n #[test]\n\n fn create_and_free() {\n\n unsafe {\n", "file_path": "minijail/rust/minijail/src/lib.rs", "rank": 80, "score": 59606.75916267613 }, { "content": "type ExitHandleSignal = mpsc::Sender<ExitStatus>;\n\n\n", "file_path": "north/src/runtime/process.rs", "rank": 81, "score": 59107.28700601352 }, { "content": "fn format_notification(notification: &Notification, json: bool) {\n\n if json {\n\n println!(\"{}\", serde_json::to_string_pretty(&notification).unwrap());\n\n } else {\n\n match notification {\n\n api::Notification::OutOfMemory(name) => println!(\"{} is out of memory\", name),\n\n api::Notification::ApplicationExited {\n\n id,\n\n version,\n\n exit_info,\n\n } => {\n\n println!(\"Application {}-{} exited with {}\", id, version, exit_info);\n\n }\n\n api::Notification::Install(name, version) => println!(\"Installed {}-{}\", name, version),\n\n api::Notification::Uninstalled(name, version) => {\n\n println!(\"Uninstallation {}-{}\", name, version)\n\n }\n\n api::Notification::ApplicationStarted(name, version) => {\n\n println!(\"Started {}-{}\", name, version)\n\n }\n\n api::Notification::ApplicationStopped(name, version) => {\n\n println!(\"Stopped {}-{}\", name, version)\n\n }\n\n api::Notification::Shutdown => println!(\"Shutdown\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "nstar/src/main.rs", "rank": 82, "score": 58449.221517855156 }, { "content": "type NotificationRx = broadcast::Receiver<api::Notification>;\n\n\n\n// Request from the main loop to the console\n\n#[derive(Debug)]\n\npub(crate) enum Request {\n\n Message(api::Message),\n\n Install(api::Message, PathBuf),\n\n}\n\n\n\n/// A console is responsible for monitoring and serving incoming client connections\n\n/// It feeds relevant events back to the runtime and forwards responses and notifications\n\n/// to connected clients\n\npub(crate) struct Console {\n\n event_tx: EventTx,\n\n address: String,\n\n notification_tx: broadcast::Sender<api::Notification>,\n\n}\n\n\n\n#[derive(Error, Debug)]\n\npub enum Error {\n", "file_path": "north/src/runtime/console.rs", "rank": 83, "score": 57897.69224448488 }, { "content": "fn list_containers(state: &State) -> Vec<api::Container> {\n\n let mut app_containers: Vec<api::Container> = state\n\n .applications()\n\n .map(|app| api::Container {\n\n manifest: app.manifest().clone(),\n\n process: app.process_context().map(|f| api::Process {\n\n pid: f.process().pid(),\n\n uptime: f.uptime().as_nanos() as u64,\n\n memory: {\n\n {\n\n const PAGE_SIZE: usize = 4096;\n\n let pid = f.process().pid();\n\n\n\n procinfo::pid::statm(pid as i32)\n\n .ok()\n\n .map(|statm| api::Memory {\n\n size: (statm.size * PAGE_SIZE) as u64,\n\n resident: (statm.resident * PAGE_SIZE) as u64,\n\n shared: (statm.share * PAGE_SIZE) as u64,\n\n text: (statm.text * PAGE_SIZE) as u64,\n", "file_path": "north/src/runtime/console.rs", "rank": 84, "score": 55079.53606591482 }, { "content": "// Copyright (c) 2019 - 2020 ESRLabs\n\n//\n\n// Licensed under the Apache License, Version 2.0 (the \"License\");\n\n// you may not use this file except in compliance with the License.\n\n// You may obtain a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n// Unless required by applicable law or agreed to in writing, software\n\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n// See the License for the specific language governing permissions and\n\n// limitations under the License.\n\n\n\nuse crate::api;\n\nuse std::io;\n\nuse thiserror::Error;\n\n\n\n#[derive(Error, Debug)]\n\npub enum Error {\n", "file_path": "north/src/runtime/error.rs", "rank": 85, "score": 48118.23277830127 }, { "content": " api::Error::ApplicationAlreadyInstalled(name)\n\n }\n\n Error::ResourceAlreadyInstalled(resource) => {\n\n api::Error::ResourceAlreadyInstalled(resource)\n\n }\n\n Error::Npk(error) => api::Error::Npk(error.to_string()),\n\n Error::Process(error) => api::Error::Process(error.to_string()),\n\n Error::Console(error) => api::Error::Console(error.to_string()),\n\n Error::Cgroups(error) => api::Error::Cgroups(error.to_string()),\n\n Error::Mount(error) => api::Error::Mount(error.to_string()),\n\n Error::Key(error) => api::Error::Key(error.to_string()),\n\n Error::Io(cause, error) => api::Error::Io(format!(\"{}: {}\", cause, error)),\n\n Error::Os(cause, error) => api::Error::Os(format!(\"{}: {}\", cause, error)),\n\n }\n\n }\n\n}\n", "file_path": "north/src/runtime/error.rs", "rank": 86, "score": 48112.726186182335 }, { "content": " Cgroups(#[from] super::cgroups::Error),\n\n #[error(\"Mount: {0:?}\")]\n\n Mount(super::mount::Error),\n\n #[error(\"Key: {0:?}\")]\n\n Key(super::keys::Error),\n\n\n\n #[error(\"Io: {0}: {1:?}\")]\n\n Io(String, io::Error),\n\n #[error(\"Os: {0}: {1:?}\")]\n\n Os(String, nix::Error),\n\n}\n\n\n\nimpl From<Error> for api::Error {\n\n fn from(error: Error) -> api::Error {\n\n match error {\n\n Error::ApplicationNotFound => api::Error::ApplicationNotFound,\n\n Error::ApplicationNotRunning => api::Error::ApplicationNotRunning,\n\n Error::ApplicationRunning(name) => api::Error::ApplicationRunning(name),\n\n Error::MissingResource(resource) => api::Error::MissingResource(resource),\n\n Error::ApplicationAlreadyInstalled(name) => {\n", "file_path": "north/src/runtime/error.rs", "rank": 87, "score": 48111.35269101907 }, { "content": " #[error(\"No application found\")]\n\n ApplicationNotFound,\n\n #[error(\"Application is not running\")]\n\n ApplicationNotRunning,\n\n #[error(\"Application {0} is running\")]\n\n ApplicationRunning(String),\n\n #[error(\"Missing resource {0}\")]\n\n MissingResource(String),\n\n #[error(\"Application {0} already installed\")]\n\n ApplicationAlreadyInstalled(String),\n\n #[error(\"Resource {0} is already installed\")]\n\n ResourceAlreadyInstalled(String),\n\n\n\n #[error(\"NPK: {0:?}\")]\n\n Npk(npk::Error),\n\n #[error(\"Process: {0:?}\")]\n\n Process(super::process::Error),\n\n #[error(\"Console: {0:?}\")]\n\n Console(super::console::Error),\n\n #[error(\"Cgroups: {0}\")]\n", "file_path": "north/src/runtime/error.rs", "rank": 88, "score": 48108.903899881974 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct SerdeHashes {\n\n #[serde(rename(serialize = \"manifest.yaml\"))]\n\n #[serde(rename(deserialize = \"manifest.yaml\"))]\n\n manifest: HashMap<String, String>,\n\n #[serde(rename(serialize = \"fs.img\"))]\n\n #[serde(rename(deserialize = \"fs.img\"))]\n\n fs: HashMap<String, String>,\n\n}\n\n\n\nimpl TryFrom<SerdeHashes> for Hashes {\n\n type Error = Error;\n\n fn try_from(s: SerdeHashes) -> Result<Hashes, Error> {\n\n let manifest_hash = s\n\n .manifest\n\n .get(\"hash\")\n\n .map(ToOwned::to_owned)\n\n .ok_or_else(|| {\n\n Error::MalformedManifest(\"Missing hash for manifest.yaml\".to_string())\n\n })?;\n\n\n", "file_path": "npk/src/archive.rs", "rank": 89, "score": 47068.4673392612 }, { "content": "\tconst char *source, *dest;\n", "file_path": "minijail/libminijail.c", "rank": 90, "score": 45986.394286813695 }, { "content": "\tElf64_Word p_type; /* Segment type */\n", "file_path": "minijail/elfparse.h", "rank": 91, "score": 45948.05106667875 }, { "content": "\tElf64_Half e_type; /* Object file type */\n", "file_path": "minijail/elfparse.h", "rank": 92, "score": 45948.05106667875 }, { "content": "#define _GNU_SOURCE\n\n\n", "file_path": "minijail/util.c", "rank": 93, "score": 44977.79964201186 }, { "content": "#define _BSD_SOURCE\n", "file_path": "minijail/libminijail.c", "rank": 94, "score": 44977.79964201186 }, { "content": "#define _GNU_SOURCE\n\n\n", "file_path": "minijail/libminijail.c", "rank": 95, "score": 44965.394051382784 }, { "content": "#define _DEFAULT_SOURCE\n", "file_path": "minijail/libminijail.c", "rank": 96, "score": 44965.394051382784 }, { "content": " dm_device_size: u64,\n\n verity_hash: &str,\n\n fs_path: &Path,\n\n fs: &mut fs::File,\n\n fs_offset: u64,\n\n lo_size: u64,\n\n root: &Path,\n\n) -> Result<PathBuf, Error> {\n\n let mut fstype = [0u8; 4];\n\n fs.seek(io::SeekFrom::Start(fs_offset))\n\n .await\n\n .map_err(|e| Error::Io(\"Failed seek to fs type\".into(), e))?;\n\n fs.read_exact(&mut fstype)\n\n .await\n\n .map_err(|e| Error::Io(\"Failed read fs type\".into(), e))?;\n\n let fs_type = if &fstype == b\"hsqs\" {\n\n debug!(\"Detected SquashFS file system\");\n\n \"squashfs\"\n\n } else {\n\n debug!(\"Defaulting to ext filesystem type\");\n", "file_path": "north/src/runtime/mount.rs", "rank": 99, "score": 36.77877520353845 } ]
Rust
src/dovi/general_read_write.rs
amsokol/dovi_tool
458aadecf266575f199814a0479deb2dbe0fc95c
use std::io::{stdout, BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use std::{fs::File, path::Path}; use anyhow::{bail, Result}; use indicatif::ProgressBar; use hevc_parser::hevc::{NALUnit, NAL_SEI_PREFIX, NAL_UNSPEC62, NAL_UNSPEC63}; use hevc_parser::io::{processor, IoFormat, IoProcessor}; use hevc_parser::{HevcParser, NALUStartCode}; use processor::{HevcProcessor, HevcProcessorOpts}; use super::{convert_encoded_from_opts, is_st2094_40_sei, CliOptions}; pub struct DoviProcessor { input: PathBuf, options: CliOptions, rpu_nals: Vec<RpuNal>, previous_rpu_index: u64, progress_bar: ProgressBar, dovi_writer: DoviWriter, } pub struct DoviWriter { bl_writer: Option<BufWriter<File>>, el_writer: Option<BufWriter<File>>, rpu_writer: Option<BufWriter<File>>, sl_writer: Option<BufWriter<File>>, } #[derive(Debug)] pub struct RpuNal { decoded_index: usize, presentation_number: usize, data: Vec<u8>, } impl DoviWriter { pub fn new( bl_out: Option<&Path>, el_out: Option<&Path>, rpu_out: Option<&Path>, single_layer_out: Option<&Path>, ) -> DoviWriter { let chunk_size = 100_000; let bl_writer = bl_out.map(|bl_out| { BufWriter::with_capacity( chunk_size, File::create(bl_out).expect("Can't create file for BL"), ) }); let el_writer = el_out.map(|el_out| { BufWriter::with_capacity( chunk_size, File::create(el_out).expect("Can't create file for EL"), ) }); let rpu_writer = rpu_out.map(|rpu_out| { BufWriter::with_capacity( chunk_size, File::create(rpu_out).expect("Can't create file for RPU"), ) }); let sl_writer = single_layer_out.map(|single_layer_out| { BufWriter::with_capacity( chunk_size, File::create(single_layer_out).expect("Can't create file for SL output"), ) }); DoviWriter { bl_writer, el_writer, rpu_writer, sl_writer, } } } impl DoviProcessor { pub fn new( options: CliOptions, input: PathBuf, dovi_writer: DoviWriter, progress_bar: ProgressBar, ) -> DoviProcessor { DoviProcessor { input, options, rpu_nals: Vec::new(), previous_rpu_index: 0, progress_bar, dovi_writer, } } pub fn read_write_from_io(&mut self, format: &IoFormat) -> Result<()> { let chunk_size = 100_000; let parse_nals = self.dovi_writer.rpu_writer.is_some(); let processor_opts = HevcProcessorOpts { parse_nals, ..Default::default() }; let mut processor = HevcProcessor::new(format.clone(), processor_opts, chunk_size); let stdin = std::io::stdin(); let mut reader = Box::new(stdin.lock()) as Box<dyn BufRead>; if let IoFormat::Raw = format { let file = File::open(&self.input)?; reader = Box::new(BufReader::with_capacity(100_000, file)); } processor.process_io(&mut reader, self) } pub fn write_nals(&mut self, chunk: &[u8], nals: &[NALUnit]) -> Result<()> { for nal in nals { if self.options.drop_hdr10plus && nal.nal_type == NAL_SEI_PREFIX && is_st2094_40_sei(&chunk[nal.start..nal.end])? { continue; } if self.previous_rpu_index > 0 && nal.nal_type == NAL_UNSPEC62 && nal.decoded_frame_index == self.previous_rpu_index { println!( "Warning: Unexpected RPU NALU found for frame {}. Discarding.", self.previous_rpu_index ); continue; } if let Some(ref mut sl_writer) = self.dovi_writer.sl_writer { if nal.nal_type == NAL_UNSPEC63 && self.options.discard_el { continue; } sl_writer.write_all(hevc_parser::NALUStartCode::Length4.slice())?; if nal.nal_type == NAL_UNSPEC62 && (self.options.mode.is_some() || self.options.edit_config.is_some()) { let modified_data = convert_encoded_from_opts(&self.options, &chunk[nal.start..nal.end])?; sl_writer.write_all(&modified_data)?; continue; } sl_writer.write_all(&chunk[nal.start..nal.end])?; continue; } match nal.nal_type { NAL_UNSPEC63 => { if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(NALUStartCode::Length4.slice())?; el_writer.write_all(&chunk[nal.start + 2..nal.end])?; } } NAL_UNSPEC62 => { self.previous_rpu_index = nal.decoded_frame_index; if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(NALUStartCode::Length4.slice())?; } let rpu_data = &chunk[nal.start..nal.end]; if self.options.mode.is_some() || self.options.edit_config.is_some() { let modified_data = convert_encoded_from_opts(&self.options, rpu_data)?; if let Some(ref mut _rpu_writer) = self.dovi_writer.rpu_writer { self.rpu_nals.push(RpuNal { decoded_index: self.rpu_nals.len(), presentation_number: 0, data: modified_data[2..].to_owned(), }); } else if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(&modified_data)?; } } else if let Some(ref mut _rpu_writer) = self.dovi_writer.rpu_writer { self.rpu_nals.push(RpuNal { decoded_index: self.rpu_nals.len(), presentation_number: 0, data: rpu_data[2..].to_vec(), }); } else if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(rpu_data)?; } } _ => { if let Some(ref mut bl_writer) = self.dovi_writer.bl_writer { bl_writer.write_all(NALUStartCode::Length4.slice())?; bl_writer.write_all(&chunk[nal.start..nal.end])?; } } } } Ok(()) } fn flush_writer(&mut self, parser: &HevcParser) -> Result<()> { if let Some(ref mut bl_writer) = self.dovi_writer.bl_writer { bl_writer.flush()?; } if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.flush()?; } if let Some(ref mut rpu_writer) = self.dovi_writer.rpu_writer { let frames = parser.ordered_frames(); if frames.is_empty() { bail!("No frames parsed!"); } print!("Reordering metadata... "); stdout().flush().ok(); self.rpu_nals.sort_by_cached_key(|rpu| { let matching_index = frames .iter() .position(|f| rpu.decoded_index == f.decoded_number as usize); if let Some(i) = matching_index { frames[i].presentation_number } else { panic!( "Missing frame/slices for metadata! Decoded index {}", rpu.decoded_index ); } }); self.rpu_nals .iter_mut() .enumerate() .for_each(|(idx, rpu)| rpu.presentation_number = idx); println!("Done."); for rpu in self.rpu_nals.iter_mut() { rpu_writer.write_all(NALUStartCode::Length4.slice())?; rpu_writer.write_all(&rpu.data)?; } rpu_writer.flush()?; } Ok(()) } } impl IoProcessor for DoviProcessor { fn input(&self) -> &std::path::PathBuf { &self.input } fn update_progress(&mut self, delta: u64) { self.progress_bar.inc(delta); } fn process_nals(&mut self, _parser: &HevcParser, nals: &[NALUnit], chunk: &[u8]) -> Result<()> { self.write_nals(chunk, nals) } fn finalize(&mut self, parser: &HevcParser) -> Result<()> { self.progress_bar.finish_and_clear(); self.flush_writer(parser) } }
use std::io::{stdout, BufRead, BufReader, BufWriter, Write}; use std::path::PathBuf; use std::{fs::File, path::Path}; use anyhow::{bail, Result}; use indicatif::ProgressBar; use hevc_parser::hevc::{NALUnit, NAL_SEI_PREFIX, NAL_UNSPEC62, NAL_UNSPEC63}; use hevc_parser::io::{processor, IoFormat, IoProcessor}; use hevc_parser::{HevcParser, NALUStartCode}; use processor::{HevcProcessor, HevcProcessorOpts}; use super::{convert_encoded_from_opts, is_st2094_40_sei, CliOptions}; pub struct DoviProcessor { input: PathBuf, options: CliOptions, rpu_nals: Vec<RpuNal>, previous_rpu_index: u64, progress_bar: ProgressBar, dovi_writer: DoviWriter, } pub struct DoviWriter { bl_writer: Option<BufWriter<File>>, el_writer: Option<BufWriter<File>>, rpu_writer: Option<BufWriter<File>>, sl_writer: Option<BufWriter<File>>, } #[derive(Debug)] pub struct RpuNal { decoded_index: usize, presentation_number: usize, data: Vec<u8>, } impl DoviWriter { pub fn new( bl_out: Option<&Path>, el_out: Option<&Path>, rpu_out: Option<&Path>, single_layer_out: Option<&Path>, ) -> DoviWriter { let chunk_size = 100_000; let bl_writer = bl_out.map(|bl_out| { BufWriter::with_capacity( chunk_size, File::create(bl_out).expect("Can't create file for BL"), ) }); let el_writer = el_out.map(|el_out| { BufWriter::with_capacity( chunk_size, File::create(el_out).expect("Can't create file for EL"), ) }); let rpu_writer = rpu_out.map(|rpu_out| { BufWriter::with_capacity( chunk_size, File::create(rpu_out).expect("Can't create file for RPU"), ) }); let sl_writer = single_layer_out.map(|single_layer_out| { BufWriter::with_capacity( chunk_size, File::create(single_layer_out).expect("Can't create file for SL output"), ) }); DoviWriter { bl_writer, el_writer, rpu_writer, sl_writer, } } } impl DoviProcessor { pub fn new( options: CliOptions, input: PathBuf, dovi_writer: DoviWriter, progress_bar: ProgressBar, ) -> DoviProcessor { DoviProcessor { input, options, rpu_nals: Vec::new(), previous_rpu_index: 0, progress_bar, dovi_writer, } } pub fn read_write_from_io(&mut self, format: &IoFormat) -> Result<()> { let chunk_size = 100_000; let parse_nals = self.dovi_writer.rpu_writer.is_some(); let processor_opts = HevcProcessorOpts { parse_nals, ..Default::default() }; let mut processor = HevcProcessor::new(format.clone(), processor_opts, chunk_size); let stdin = std::io::stdin(); let mut reader = Box::new(stdin.lock()) as Box<dyn BufRead>; if let IoFormat::Raw = format { let file = File::open(&self.input)?; reader = Box::new(BufReader::with_capacity(100_000, file)); } processor.process_io(&mut reader, self) } pub fn write_nals(&mut self, chunk: &[u8], nals: &[NALUnit]) -> Result<()> { for nal in nals { if self.options.drop_hdr10plus && nal.nal_type == NAL_SEI_PREFIX && is_st2094_40_sei(&chunk[nal.start..nal.end])? { continue; } if self.previous_rpu_index > 0 && nal.nal_type == NAL_UNSPEC62 && nal.decoded_frame_index == self.previous_rpu_index { println!( "Warning: Unexpected RPU NALU found for frame {}. Discarding.", self.previous_rpu_index ); continue; } if let Some(ref mut sl_writer) = self.dovi_writer.sl_writer { if nal.nal_type == NAL_UNSPEC63 && self.options.discard_el { continue; } sl_writer.write_all(hevc_parser::NALUStartCode::Length4.slice())?; if nal.nal_type == NAL_UNSPEC62 && (self.options.mode.is_some() || self.options.edit_config.is_some()) { let modified_data = convert_encoded_from_opts(&self.options, &chunk[nal.start..nal.end])?; sl_writer.write_all(&modified_data)?; continue; } sl_writer.write_all(&chunk[nal.start..nal.end])?; continue; } match nal.nal_type { NAL_UNSPEC63 => { if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(NALUStartCode::Length4.slice())?; el_writer.write_all(&chunk[nal.start + 2..nal.end])?; } } NAL_UNSPEC62 => { self.previous_rpu_index = nal.decoded_frame_index; if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(NALUStartCode::Length4.slice())?; } let rpu_data = &chunk[nal.start..nal.end]; if self.options.mode.is_some() || self.options.edit_config.is_some() { let modified_data = convert_encoded_from_opts(&self.options, rpu_data)?; if let Some(ref mut _rpu_writer) = self.dovi_writer.rpu_writer { self.rpu_nals.push(RpuNal { decoded_index: self.rpu_nals.len(), presentation_number: 0, data: modified_data[2..].to_owned(), }); } else if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(&modified_data)?; } } else if let Some(ref mut _rpu_writer) = self.dovi_writer.rpu_writer { self.rpu_nals.push(RpuNal { decoded_index: self.rpu_nals.len(), presentation_number: 0, data: rpu_data[2..].to_vec(), }); } else if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.write_all(rpu_data)?; } } _ => { if let Some(ref mut bl_writer) = self.dovi_writer.bl_writer { bl_writer.write_all(NALUStartCode::Length4.slice())?; bl_writer.write_all(&chunk[nal.start..nal.end])?; } } } } Ok(()) }
} impl IoProcessor for DoviProcessor { fn input(&self) -> &std::path::PathBuf { &self.input } fn update_progress(&mut self, delta: u64) { self.progress_bar.inc(delta); } fn process_nals(&mut self, _parser: &HevcParser, nals: &[NALUnit], chunk: &[u8]) -> Result<()> { self.write_nals(chunk, nals) } fn finalize(&mut self, parser: &HevcParser) -> Result<()> { self.progress_bar.finish_and_clear(); self.flush_writer(parser) } }
fn flush_writer(&mut self, parser: &HevcParser) -> Result<()> { if let Some(ref mut bl_writer) = self.dovi_writer.bl_writer { bl_writer.flush()?; } if let Some(ref mut el_writer) = self.dovi_writer.el_writer { el_writer.flush()?; } if let Some(ref mut rpu_writer) = self.dovi_writer.rpu_writer { let frames = parser.ordered_frames(); if frames.is_empty() { bail!("No frames parsed!"); } print!("Reordering metadata... "); stdout().flush().ok(); self.rpu_nals.sort_by_cached_key(|rpu| { let matching_index = frames .iter() .position(|f| rpu.decoded_index == f.decoded_number as usize); if let Some(i) = matching_index { frames[i].presentation_number } else { panic!( "Missing frame/slices for metadata! Decoded index {}", rpu.decoded_index ); } }); self.rpu_nals .iter_mut() .enumerate() .for_each(|(idx, rpu)| rpu.presentation_number = idx); println!("Done."); for rpu in self.rpu_nals.iter_mut() { rpu_writer.write_all(NALUStartCode::Length4.slice())?; rpu_writer.write_all(&rpu.data)?; } rpu_writer.flush()?; } Ok(()) }
function_block-full_function
[ { "content": "pub fn write_rpu_file(output_path: &Path, data: Vec<Vec<u8>>) -> Result<()> {\n\n println!(\"Writing RPU file...\");\n\n let mut writer = BufWriter::with_capacity(\n\n 100_000,\n\n File::create(output_path).expect(\"Can't create file\"),\n\n );\n\n\n\n for encoded_rpu in data {\n\n writer.write_all(NALUStartCode::Length4.slice())?;\n\n\n\n // Remove 0x7C01\n\n writer.write_all(&encoded_rpu[2..])?;\n\n }\n\n\n\n writer.flush()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/dovi/mod.rs", "rank": 0, "score": 350274.374527926 }, { "content": "pub fn _parse_file(input: PathBuf) -> Result<(Vec<u8>, DoviRpu)> {\n\n let mut f = File::open(input)?;\n\n let metadata = f.metadata()?;\n\n\n\n let mut original_data = vec![0; metadata.len() as usize];\n\n f.read_exact(&mut original_data)?;\n\n\n\n let dovi_rpu = DoviRpu::parse_unspec62_nalu(&original_data)?;\n\n\n\n Ok((original_data, dovi_rpu))\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 1, "score": 320175.85159514385 }, { "content": "pub fn rpu_data_header(dovi_rpu: &mut DoviRpu, reader: &mut BitVecReader) -> Result<()> {\n\n dovi_rpu.header = RpuDataHeader::parse(reader)?;\n\n\n\n Ok(())\n\n}\n\n\n\nimpl RpuDataHeader {\n\n pub fn parse(reader: &mut BitVecReader) -> Result<RpuDataHeader> {\n\n let mut rpu_nal = RpuDataHeader {\n\n rpu_nal_prefix: reader.get_n(8),\n\n ..Default::default()\n\n };\n\n\n\n if rpu_nal.rpu_nal_prefix == 25 {\n\n rpu_nal.rpu_type = reader.get_n(6);\n\n rpu_nal.rpu_format = reader.get_n(11);\n\n\n\n if rpu_nal.rpu_type == 2 {\n\n rpu_nal.vdr_rpu_profile = reader.get_n(4);\n\n\n", "file_path": "dolby_vision/src/rpu/rpu_data_header.rs", "rank": 2, "score": 319761.54592810356 }, { "content": "pub fn vdr_rpu_data_payload(dovi_rpu: &mut DoviRpu, reader: &mut BitVecReader) -> Result<()> {\n\n dovi_rpu.rpu_data_mapping = Some(RpuDataMapping::parse(reader, &mut dovi_rpu.header)?);\n\n\n\n if dovi_rpu.header.nlq_method_idc.is_some() {\n\n dovi_rpu.rpu_data_nlq = Some(RpuDataNlq::parse(reader, &mut dovi_rpu.header)?);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\nimpl RpuDataMapping {\n\n pub fn parse(reader: &mut BitVecReader, header: &mut RpuDataHeader) -> Result<RpuDataMapping> {\n\n let mut data = RpuDataMapping::default();\n\n\n\n let coefficient_log2_denom_length = if header.coefficient_data_type == 0 {\n\n header.coefficient_log2_denom as usize\n\n } else if header.coefficient_data_type == 1 {\n\n 32\n\n } else {\n\n bail!(\"Invalid coefficient_data_type value!\");\n", "file_path": "dolby_vision/src/rpu/rpu_data_mapping.rs", "rank": 3, "score": 316421.301306279 }, { "content": "pub fn parse_rpu_file(input: &Path) -> Result<Option<Vec<DoviRpu>>> {\n\n let rpu_file = File::open(input)?;\n\n let metadata = rpu_file.metadata()?;\n\n\n\n // Should never be this large, avoid mistakes\n\n if metadata.len() > 250_000_000 {\n\n bail!(\"Input file probably too large\");\n\n }\n\n\n\n let mut reader = BufReader::new(rpu_file);\n\n\n\n // Should be small enough to fit in the memory\n\n let mut data = vec![0; metadata.len() as usize];\n\n reader.read_exact(&mut data)?;\n\n\n\n let mut offsets = Vec::with_capacity(200_000);\n\n let mut parser = HevcParser::with_nalu_start_code(NALUStartCode::Length4);\n\n\n\n parser.get_offsets(&data, &mut offsets);\n\n\n", "file_path": "utilities_dovi/src/lib.rs", "rank": 4, "score": 300627.15968089685 }, { "content": "pub fn convert_encoded_from_opts(opts: &CliOptions, data: &[u8]) -> Result<Vec<u8>> {\n\n let mut dovi_rpu = DoviRpu::parse_unspec62_nalu(data)?;\n\n\n\n // Config overrides manual arguments\n\n if let Some(edit_config) = &opts.edit_config {\n\n edit_config.execute_single_rpu(&mut dovi_rpu)?;\n\n } else {\n\n if let Some(mode) = opts.mode {\n\n dovi_rpu.convert_with_mode(mode)?;\n\n }\n\n\n\n if opts.crop {\n\n dovi_rpu.crop()?;\n\n }\n\n }\n\n\n\n dovi_rpu.write_hevc_unspec62_nalu()\n\n}\n\n\n", "file_path": "src/dovi/mod.rs", "rank": 5, "score": 275489.9082418484 }, { "content": "pub fn initialize_progress_bar(format: &IoFormat, input: &Path) -> Result<ProgressBar> {\n\n let pb: ProgressBar;\n\n let bytes_count;\n\n\n\n if let IoFormat::RawStdin = format {\n\n pb = ProgressBar::hidden();\n\n } else {\n\n let file = File::open(input).expect(\"No file found\");\n\n\n\n //Info for indicatif ProgressBar\n\n let file_meta = file.metadata()?;\n\n bytes_count = file_meta.len() / 100_000_000;\n\n\n\n pb = ProgressBar::new(bytes_count);\n\n pb.set_style(\n\n ProgressStyle::default_bar().template(\"[{elapsed_precise}] {bar:60.cyan} {percent}%\"),\n\n );\n\n }\n\n\n\n Ok(pb)\n\n}\n\n\n", "file_path": "src/dovi/mod.rs", "rank": 6, "score": 270705.8381240093 }, { "content": "fn _debug(data: &[u8]) -> Result<()> {\n\n use std::fs::OpenOptions;\n\n use std::io::Write;\n\n\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .truncate(true)\n\n .open(\"test.bin\")?;\n\n\n\n file.write_all(NALUStartCode::Length4.slice())?;\n\n file.write_all(&data[2..])?;\n\n\n\n file.flush()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 7, "score": 262514.23875234945 }, { "content": "/// Escapes the vec to annexb to avoid emulating a start code by accident\n\npub fn add_start_code_emulation_prevention_3_byte(data: &mut Vec<u8>) {\n\n let mut count = data.len();\n\n let mut i = 0;\n\n\n\n while i < count {\n\n if i > 2 && i < count - 2 && data[i - 2] == 0 && data[i - 1] == 0 && data[i] <= 3 {\n\n data.insert(i, 3);\n\n count += 1;\n\n }\n\n\n\n i += 1;\n\n }\n\n}\n\n\n\n/// Serializing a bitvec as a vec of bits\n", "file_path": "dolby_vision/src/utils.rs", "rank": 8, "score": 242939.7738477729 }, { "content": "pub fn input_from_either(cmd: &str, in1: Option<PathBuf>, in2: Option<PathBuf>) -> Result<PathBuf> {\n\n match in1 {\n\n Some(in1) => Ok(in1),\n\n None => match in2 {\n\n Some(in2) => Ok(in2),\n\n None => bail!(\"No input file provided. See `dovi_tool {} --help`\", cmd),\n\n },\n\n }\n\n}\n", "file_path": "src/dovi/mod.rs", "rank": 9, "score": 233137.5216606884 }, { "content": "pub fn is_st2094_40_sei(sei_payload: &[u8]) -> Result<bool> {\n\n if sei_payload.len() >= 4 {\n\n let sei = SeiMessage::from_bytes(sei_payload)?;\n\n\n\n if sei.payload_type == USER_DATA_REGISTERED_ITU_T_35 {\n\n // FIXME: Not sure why 4 bytes..\n\n let itu_t35_bytes = &sei_payload[4..];\n\n\n\n if itu_t35_bytes.len() >= 7 {\n\n let itu_t_t35_country_code = itu_t35_bytes[0];\n\n let itu_t_t35_terminal_provider_code =\n\n u16::from_be_bytes(itu_t35_bytes[1..3].try_into()?);\n\n let itu_t_t35_terminal_provider_oriented_code =\n\n u16::from_be_bytes(itu_t35_bytes[3..5].try_into()?);\n\n\n\n if itu_t_t35_country_code == 0xB5\n\n && itu_t_t35_terminal_provider_code == 0x003C\n\n && itu_t_t35_terminal_provider_oriented_code == 0x0001\n\n {\n\n let application_identifier = itu_t35_bytes[5];\n", "file_path": "src/dovi/mod.rs", "rank": 10, "score": 219593.51491279295 }, { "content": "/// Copied from hevc_parser for convenience, and to avoid a dependency\n\n/// Unescapes a byte slice from annexb.\n\n/// Allocates a new Vec.\n\npub fn clear_start_code_emulation_prevention_3_byte(data: &[u8]) -> Vec<u8> {\n\n data.iter()\n\n .enumerate()\n\n .filter_map(|(index, value)| {\n\n if index > 2\n\n && index < data.len() - 2\n\n && data[index - 2] == 0\n\n && data[index - 1] == 0\n\n && data[index] <= 3\n\n {\n\n None\n\n } else {\n\n Some(*value)\n\n }\n\n })\n\n .collect::<Vec<u8>>()\n\n}\n\n\n", "file_path": "dolby_vision/src/utils.rs", "rank": 11, "score": 212568.37942598702 }, { "content": "#[test]\n\nfn el_only() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n let expected_el = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let output_bl = temp.child(\"BL.hevc\");\n\n let output_el = temp.child(\"EL.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--bl-out\")\n\n .arg(output_bl.as_ref())\n\n .arg(\"--el-out\")\n\n .arg(output_el.as_ref())\n\n .arg(\"--el-only\")\n\n .assert();\n\n\n", "file_path": "tests/hevc/demux.rs", "rank": 12, "score": 203894.0234328258 }, { "content": "#[test]\n\nfn eos_before_el() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_bl = Path::new(\"assets/hevc_tests/regular_bl_start_code_4.hevc\");\n\n let input_el = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n let expected_bl_el_rpu = Path::new(\"assets/hevc_tests/yusesope_regular_muxed.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--bl\")\n\n .arg(input_bl)\n\n .arg(\"--el\")\n\n .arg(input_el)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .arg(\"--eos-before-el\")\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_bl_el_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/mux.rs", "rank": 13, "score": 199600.17625043378 }, { "content": "#[inline(always)]\n\nfn compute_crc32(data: &[u8]) -> u32 {\n\n let crc = Crc::<u32>::new(&CRC_32_MPEG_2);\n\n let mut digest = crc.digest();\n\n digest.update(data);\n\n\n\n digest.finalize()\n\n}\n", "file_path": "dolby_vision/src/rpu/mod.rs", "rank": 14, "score": 196534.93666298228 }, { "content": "#[test]\n\nfn mode_lossless_el_only() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n let expected_el = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let output_bl = temp.child(\"BL.hevc\");\n\n let output_el = temp.child(\"EL.hevc\");\n\n\n\n let assert = cmd\n\n .arg(\"--mode\")\n\n .arg(\"0\")\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--bl-out\")\n\n .arg(output_bl.as_ref())\n\n .arg(\"--el-out\")\n\n .arg(output_el.as_ref())\n\n .arg(\"--el-only\")\n", "file_path": "tests/hevc/demux.rs", "rank": 15, "score": 195526.33898896008 }, { "content": "#[test]\n\nfn data_before_crc32() -> Result<()> {\n\n let (original_data, dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/data_before_crc32.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 16, "score": 194080.01215164276 }, { "content": "#[test]\n\nfn fix_se_write() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fix_se_write.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 17, "score": 190982.2939849086 }, { "content": "#[cfg(feature = \"serde_feature\")]\n\npub fn bitvec_ser_bits<S: Serializer>(bitvec: &BitVec<u8, Msb0>, s: S) -> Result<S::Ok, S::Error> {\n\n let bits: Vec<u8> = bitvec.iter().map(|b| *b as u8).collect();\n\n bits.serialize(s)\n\n}\n", "file_path": "dolby_vision/src/utils.rs", "rank": 18, "score": 189111.1345893755 }, { "content": "pub fn vdr_dm_data_payload(\n\n dovi_rpu: &mut DoviRpu,\n\n reader: &mut BitVecReader,\n\n final_length: usize,\n\n) -> Result<()> {\n\n let compressed_dm_data = dovi_rpu.header.reserved_zero_3bits == 1;\n\n\n\n let mut vdr_dm_data = if compressed_dm_data {\n\n VdrDmData {\n\n compressed: true,\n\n\n\n affected_dm_metadata_id: reader.get_ue()?,\n\n current_dm_metadata_id: reader.get_ue()?,\n\n scene_refresh_flag: reader.get_ue()?,\n\n ..Default::default()\n\n }\n\n } else {\n\n VdrDmData::parse(reader)?\n\n };\n\n\n", "file_path": "dolby_vision/src/rpu/vdr_dm_data.rs", "rank": 19, "score": 188108.5573748337 }, { "content": "#[test]\n\nfn discard() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_bl = Path::new(\"assets/hevc_tests/regular_bl_start_code_4.hevc\");\n\n let input_el = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n let expected_bl_el_rpu = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--bl\")\n\n .arg(input_bl)\n\n .arg(\"--el\")\n\n .arg(input_el)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .arg(\"--discard\")\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_bl_el_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/mux.rs", "rank": 20, "score": 188006.92311042623 }, { "content": "#[test]\n\nfn generated_rpu() -> Result<()> {\n\n use dolby_vision::rpu::extension_metadata::blocks::*;\n\n use dolby_vision::rpu::generate::GenerateConfig;\n\n\n\n let config = GenerateConfig {\n\n length: 1000,\n\n source_min_pq: None,\n\n source_max_pq: None,\n\n level5: ExtMetadataBlockLevel5::from_offsets(0, 0, 280, 280),\n\n level6: ExtMetadataBlockLevel6 {\n\n max_display_mastering_luminance: 1000,\n\n min_display_mastering_luminance: 1,\n\n max_content_light_level: 1000,\n\n max_frame_average_light_level: 400,\n\n },\n\n default_metadata_blocks: vec![ExtMetadataBlock::Level2(ExtMetadataBlockLevel2::from_nits(\n\n 600,\n\n ))],\n\n ..Default::default()\n\n };\n", "file_path": "src/tests/rpu.rs", "rank": 21, "score": 163586.95308116838 }, { "content": "#[test]\n\nfn eof_rpu() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/eof_rpu.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 22, "score": 163586.95308116838 }, { "content": "#[test]\n\nfn extract_rpu() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n let expected_rpu = Path::new(\"assets/hevc_tests/regular_rpu.bin\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/extract_rpu.rs", "rank": 23, "score": 160857.873517577 }, { "content": "#[test]\n\nfn cmv40_full_rpu() -> Result<()> {\n\n use dolby_vision::rpu::extension_metadata::blocks::*;\n\n use dolby_vision::rpu::generate::GenerateConfig;\n\n use dolby_vision::rpu::generate::VideoShot;\n\n\n\n let mut config = GenerateConfig {\n\n length: 10,\n\n source_min_pq: None,\n\n source_max_pq: None,\n\n level5: ExtMetadataBlockLevel5::from_offsets(0, 0, 280, 280),\n\n level6: ExtMetadataBlockLevel6 {\n\n max_display_mastering_luminance: 1000,\n\n min_display_mastering_luminance: 1,\n\n max_content_light_level: 1000,\n\n max_frame_average_light_level: 400,\n\n },\n\n default_metadata_blocks: vec![ExtMetadataBlock::Level2(ExtMetadataBlockLevel2::from_nits(\n\n 600,\n\n ))],\n\n ..Default::default()\n", "file_path": "src/tests/rpu.rs", "rank": 24, "score": 160857.873517577 }, { "content": "#[test]\n\nfn trailing_bytes_rpu() -> Result<()> {\n\n let (original_data, mut dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/trailing_bytes_rpu.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n dovi_rpu.convert_with_mode(0)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n", "file_path": "src/tests/rpu.rs", "rank": 25, "score": 160857.873517577 }, { "content": "#[test]\n\nfn copy_discard() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n let non_el_file = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--discard\")\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(non_el_file));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/convert.rs", "rank": 26, "score": 159435.5636491498 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\"dovi_tool generate [OPTIONS]\"));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/rpu/generate.rs", "rank": 27, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool editor [OPTIONS] --json <json> [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/rpu/editor.rs", "rank": 28, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn fel() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_rpu.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 29, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn profile4() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/profile4.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 4);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 30, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn profile5() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/profile5.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 5);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 31, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn mode() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_rpu = Path::new(\"assets/tests/fel_orig.bin\");\n\n let edit_config = Path::new(\"assets/editor_examples/mode.json\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_rpu)\n\n .arg(\"--json\")\n\n .arg(edit_config)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n", "file_path": "tests/rpu/editor.rs", "rank": 32, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn mel() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/mel_rpu.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 33, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn profile8() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/profile8.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 34, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(\"info\").arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool info [OPTIONS] [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n", "file_path": "tests/rpu/info.rs", "rank": 35, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(\"export\").arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool export [OPTIONS] [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n", "file_path": "tests/rpu/export.rs", "rank": 36, "score": 157974.86523163627 }, { "content": "#[test]\n\nfn mode_lossless_discard() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n let non_el_file = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n\n\n let assert = cmd\n\n .arg(\"--mode\")\n\n .arg(\"0\")\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--discard\")\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(non_el_file));\n\n\n\n Ok(())\n\n}\n\n\n\n/// Edit config with specific active area\n", "file_path": "tests/hevc/convert.rs", "rank": 37, "score": 156390.7119182235 }, { "content": "#[test]\n\nfn poly_coef_int_logic_rpu() -> Result<()> {\n\n let (original_data, dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/poly_coef_int_logic.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 38, "score": 155761.20270431315 }, { "content": "#[test]\n\nfn xml_cmv4_2_510() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let xml = Path::new(\"assets/tests/cmv4_2_xml_510.xml\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let expected_rpu = Path::new(\"assets/tests/cmv4_2_510_xml_rpu.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--xml\")\n\n .arg(xml)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/rpu/generate.rs", "rank": 39, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn generate_full() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let generate_config = Path::new(\"assets/generator_examples/full_example.json\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--json\")\n\n .arg(generate_config)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu.assert(predicate::path::is_file());\n\n\n\n let rpus = utilities_dovi::parse_rpu_file(output_rpu.as_ref())?.unwrap();\n", "file_path": "tests/rpu/generate.rs", "rank": 40, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn fel_to_profile8() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_to_81.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 41, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn mel_conversions() -> Result<()> {\n\n let (original_data, mut dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/mel_orig.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n // MEL to MEL\n\n let (mel_data, mel_rpu) = _parse_file(PathBuf::from(\"./assets/tests/mel_to_mel.bin\"))?;\n\n assert_eq!(mel_rpu.dovi_profile, 7);\n\n\n\n dovi_rpu.convert_with_mode(1)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&mel_data[4..], &parsed_data[2..]);\n\n\n\n // MEL to 8.1\n\n let (p81_data, p81_rpu) = _parse_file(PathBuf::from(\"./assets/tests/mel_to_81.bin\"))?;\n\n assert_eq!(p81_rpu.dovi_profile, 8);\n\n\n\n dovi_rpu.convert_with_mode(2)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&p81_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 42, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn profile5_to_p81() -> Result<()> {\n\n let (original_data, mut dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/profile5.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 5);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n // Profile 5 to 8.1\n\n let (p81_data, p81_rpu) = _parse_file(PathBuf::from(\"./assets/tests/profile8.bin\"))?;\n\n assert_eq!(p81_rpu.dovi_profile, 8);\n\n\n\n dovi_rpu.convert_with_mode(3)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&p81_data[4..], &parsed_data[2..]);\n\n\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 43, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn p8_to_mel() -> Result<()> {\n\n let (original_data, mut dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/mel_orig.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n // MEL to 8.1\n\n let (p81_data, p81_rpu) = _parse_file(PathBuf::from(\"./assets/tests/mel_to_81.bin\"))?;\n\n assert_eq!(p81_rpu.dovi_profile, 8);\n\n\n\n dovi_rpu.convert_with_mode(2)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&p81_data[4..], &parsed_data[2..]);\n\n\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n\n\n // 8.1 to MEL\n\n dovi_rpu.convert_with_mode(1)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 44, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn xml_cmv4_0_2() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let xml = Path::new(\"assets/tests/cmv4_0_2.xml\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let expected_rpu = Path::new(\"assets/tests/cmv4_0_2_xml_rpu.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--xml\")\n\n .arg(xml)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/rpu/generate.rs", "rank": 45, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool extract-rpu [OPTIONS] [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/extract_rpu.rs", "rank": 46, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn convert_to_cmv4() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_rpu = Path::new(\"assets/tests/fel_orig.bin\");\n\n let edit_config = Path::new(\"assets/editor_examples/convert_to_cmv4.json\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_rpu)\n\n .arg(\"--json\")\n\n .arg(edit_config)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n", "file_path": "tests/rpu/editor.rs", "rank": 47, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn fel_conversions() -> Result<()> {\n\n let (original_data, mut dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_orig.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n // FEL to MEL\n\n let (mel_data, mel_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_to_mel.bin\"))?;\n\n assert_eq!(mel_rpu.dovi_profile, 7);\n\n\n\n dovi_rpu.convert_with_mode(1)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&mel_data[4..], &parsed_data[2..]);\n\n\n\n // FEL to 8.1\n\n let (p81_data, p81_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_to_81.bin\"))?;\n\n assert_eq!(p81_rpu.dovi_profile, 8);\n\n\n\n dovi_rpu.convert_with_mode(2)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&p81_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 48, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool inject-rpu [OPTIONS] --rpu-in <RPU_IN> [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/inject_rpu.rs", "rank": 49, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn fel_to_mel() -> Result<()> {\n\n let (original_data, dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_to_mel.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 50, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn p8_bypass() -> Result<()> {\n\n let (original_data, mut dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/profile8.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n dovi_rpu.convert_with_mode(2)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 51, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn inject() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_bl_start_code_4.hevc\");\n\n let input_rpu = Path::new(\"assets/hevc_tests/regular_rpu.bin\");\n\n\n\n let output_file = temp.child(\"injected_output.hevc\");\n\n let expected_bl_rpu = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--rpu-in\")\n\n .arg(input_rpu)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_bl_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/inject_rpu.rs", "rank": 52, "score": 154888.36166171622 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn generate_full() -> Result<()> {\n\n let args = GenerateArgs {\n\n json_file: Some(PathBuf::from(\n\n \"./assets/generator_examples/full_example.json\",\n\n )),\n\n rpu_out: Some(PathBuf::from(\"/dev/null\")),\n\n hdr10plus_json: None,\n\n xml: None,\n\n canvas_width: None,\n\n canvas_height: None,\n\n madvr_file: None,\n\n use_custom_targets: false,\n\n };\n\n\n\n let mut generator = Generator::from_args(args)?;\n\n generator.execute()?;\n\n\n\n // Get updated config\n\n let config = generator.config.unwrap();\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 53, "score": 154888.36166171622 }, { "content": "#[test]\n\nfn profile5_to_p81_2() -> Result<()> {\n\n let (original_data, mut dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/profile5-02.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 5);\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n // Profile 5 to 8.1\n\n let (p81_data, p81_rpu) = _parse_file(PathBuf::from(\n\n \"./assets/tests/profile8_from_profile5-02.bin\",\n\n ))?;\n\n assert_eq!(p81_rpu.dovi_profile, 8);\n\n\n\n dovi_rpu.convert_with_mode(3)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n assert_eq!(&p81_data[4..], &parsed_data[2..]);\n\n\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 54, "score": 154888.36166171622 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn generate_default_cmv40() -> Result<()> {\n\n let args = GenerateArgs {\n\n json_file: Some(PathBuf::from(\n\n \"./assets/generator_examples/default_cmv40.json\",\n\n )),\n\n rpu_out: Some(PathBuf::from(\"/dev/null\")),\n\n hdr10plus_json: None,\n\n xml: None,\n\n canvas_width: None,\n\n canvas_height: None,\n\n madvr_file: None,\n\n use_custom_targets: false,\n\n };\n\n\n\n let mut generator = Generator::from_args(args)?;\n\n generator.execute()?;\n\n\n\n // Get updated config\n\n let config = generator.config.unwrap();\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 55, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn edit_config() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n let edit_config = Path::new(\"assets/editor_examples/active_area_all.json\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(\"--edit-config\")\n\n .arg(edit_config)\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n", "file_path": "tests/hevc/extract_rpu.rs", "rank": 56, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn inject_aud() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/no_aud_bl.hevc\");\n\n let input_rpu = Path::new(\"assets/hevc_tests/regular_rpu.bin\");\n\n\n\n let output_file = temp.child(\"injected_output.hevc\");\n\n let expected_bl_rpu = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--rpu-in\")\n\n .arg(input_rpu)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_bl_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/inject_rpu.rs", "rank": 57, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn st2094_10_level3_invalid() -> Result<()> {\n\n let res = _parse_file(PathBuf::from(\"./assets/tests/st2094_10_level3.bin\"));\n\n assert!(res.is_err());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 58, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn active_area_specific() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_rpu = Path::new(\"assets/hevc_tests/regular_rpu.bin\");\n\n let edit_config = Path::new(\"assets/editor_examples/active_area.json\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_rpu)\n\n .arg(\"--json\")\n\n .arg(edit_config)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n", "file_path": "tests/rpu/editor.rs", "rank": 59, "score": 151960.0056404833 }, { "content": "#[test]\n\nfn xml_cmv4_0_2_with_l5() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let xml = Path::new(\"assets/tests/cmv4_0_2.xml\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let expected_rpu = Path::new(\"assets/tests/cmv4_0_2_xml_with_l5_rpu.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--xml\")\n\n .arg(xml)\n\n .arg(\"--canvas-width\")\n\n .arg(\"3840\")\n\n .arg(\"--canvas-height\")\n\n .arg(\"2160\")\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/rpu/generate.rs", "rank": 60, "score": 151960.0056404833 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn generate_full_hdr10plus() -> Result<()> {\n\n let args = GenerateArgs {\n\n json_file: Some(PathBuf::from(\n\n \"./assets/generator_examples/no_duration.json\",\n\n )),\n\n rpu_out: Some(PathBuf::from(\"/dev/null\")),\n\n hdr10plus_json: Some(PathBuf::from(\"./assets/tests/hdr10plus_metadata.json\")),\n\n xml: None,\n\n canvas_width: None,\n\n canvas_height: None,\n\n madvr_file: None,\n\n use_custom_targets: false,\n\n };\n\n\n\n let mut generator = Generator::from_args(args)?;\n\n generator.execute()?;\n\n\n\n // Get updated config\n\n let config = generator.config.unwrap();\n\n assert_eq!(config.shots.len(), 3);\n", "file_path": "src/tests/rpu.rs", "rank": 61, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn mode_mel() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n let expected_rpu = Path::new(\"assets/hevc_tests/regular_rpu_mel.bin\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(\"--mode\")\n\n .arg(\"1\")\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n\n\n\n/// Edit config with specific active area\n", "file_path": "tests/hevc/extract_rpu.rs", "rank": 62, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn generate_full_hdr10plus() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let generate_config = Path::new(\"assets/generator_examples/no_duration.json\");\n\n let hdr10plus_json = Path::new(\"./assets/tests/hdr10plus_metadata.json\");\n\n\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--json\")\n\n .arg(generate_config)\n\n .arg(\"--hdr10plus-json\")\n\n .arg(hdr10plus_json)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n", "file_path": "tests/rpu/generate.rs", "rank": 63, "score": 151960.0056404833 }, { "content": "#[test]\n\nfn empty_dmv1_blocks() -> Result<()> {\n\n let (original_data, mut dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/empty_dmv1_blocks.bin\"))?;\n\n assert!(!dovi_rpu.modified);\n\n assert_eq!(dovi_rpu.dovi_profile, 5);\n\n\n\n let mut parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n let reparsed_rpu = DoviRpu::parse_unspec62_nalu(&parsed_data)?;\n\n assert!(!reparsed_rpu.modified);\n\n assert_eq!(reparsed_rpu.dovi_profile, 5);\n\n\n\n assert_eq!(dovi_rpu.rpu_data_crc32, reparsed_rpu.rpu_data_crc32);\n\n\n\n dovi_rpu.convert_with_mode(3)?;\n\n parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n let reparsed_rpu = DoviRpu::parse_unspec62_nalu(&parsed_data)?;\n\n assert!(!reparsed_rpu.modified);\n\n assert_eq!(reparsed_rpu.dovi_profile, 8);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 64, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn sets_offsets_to_zero() -> Result<()> {\n\n use dolby_vision::rpu::extension_metadata::blocks::ExtMetadataBlock;\n\n\n\n let (_original_data, mut dovi_rpu) = _parse_file(PathBuf::from(\"./assets/tests/fel_orig.bin\"))?;\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n\n\n dovi_rpu.crop()?;\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n let dovi_rpu = DoviRpu::parse_unspec62_nalu(&parsed_data)?;\n\n if let Some(vdr_dm_data) = dovi_rpu.vdr_dm_data {\n\n let block = vdr_dm_data.get_block(5);\n\n\n\n assert!(block.is_some());\n\n\n\n if let Some(ExtMetadataBlock::Level5(b)) = block {\n\n assert_eq!(vec![0, 0, 0, 0], b.get_offsets_vec());\n\n }\n\n } else {\n\n panic!(\"No DM metadata\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 65, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn generate_default_cmv40() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let generate_config = Path::new(\"assets/generator_examples/default_cmv40.json\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--json\")\n\n .arg(generate_config)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu.assert(predicate::path::is_file());\n\n\n\n let rpus = utilities_dovi::parse_rpu_file(output_rpu.as_ref())?.unwrap();\n", "file_path": "tests/rpu/generate.rs", "rank": 66, "score": 151960.0056404833 }, { "content": "#[test]\n\nfn profile8_001_end_crc32() -> Result<()> {\n\n use utilities_dovi::parse_rpu_file;\n\n\n\n let rpus = parse_rpu_file(&PathBuf::from(\"./assets/tests/p8_001_end_crc32.bin\"))?;\n\n assert!(rpus.is_some());\n\n\n\n let rpus = rpus.unwrap();\n\n assert_eq!(rpus.len(), 3);\n\n\n\n let dovi_rpu = &rpus[0];\n\n assert_eq!(8, dovi_rpu.dovi_profile);\n\n assert_eq!([216, 0, 0, 1], dovi_rpu.rpu_data_crc32.to_be_bytes());\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 67, "score": 151960.00564048326 }, { "content": "#[cfg(target_os = \"linux\")]\n\n#[test]\n\nfn generate_default_cmv29() -> Result<()> {\n\n let args = GenerateArgs {\n\n json_file: Some(PathBuf::from(\n\n \"./assets/generator_examples/default_cmv29.json\",\n\n )),\n\n rpu_out: Some(PathBuf::from(\"/dev/null\")),\n\n hdr10plus_json: None,\n\n xml: None,\n\n canvas_width: None,\n\n canvas_height: None,\n\n madvr_file: None,\n\n use_custom_targets: false,\n\n };\n\n\n\n let mut generator = Generator::from_args(args)?;\n\n generator.execute()?;\n\n\n\n // Get updated config\n\n let config = generator.config.unwrap();\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 68, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn xml_cmv2_9_with_l5() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let xml = Path::new(\"assets/tests/cmv2_9.xml\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let expected_rpu = Path::new(\"assets/tests/cmv2_9_xml_with_l5_rpu.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--xml\")\n\n .arg(xml)\n\n .arg(\"--canvas-width\")\n\n .arg(\"3840\")\n\n .arg(\"--canvas-height\")\n\n .arg(\"2160\")\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/rpu/generate.rs", "rank": 69, "score": 151960.0056404833 }, { "content": "#[test]\n\nfn generate_default_cmv29() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let generate_config = Path::new(\"assets/generator_examples/default_cmv29.json\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--json\")\n\n .arg(generate_config)\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu.assert(predicate::path::is_file());\n\n\n\n let rpus = utilities_dovi::parse_rpu_file(output_rpu.as_ref())?.unwrap();\n", "file_path": "tests/rpu/generate.rs", "rank": 70, "score": 151960.00564048326 }, { "content": "#[test]\n\nfn profile8_unordered_l8_blocks() -> Result<()> {\n\n let (original_data, dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/unordered_l8_blocks.bin\"))?;\n\n assert!(!dovi_rpu.modified);\n\n assert_eq!(dovi_rpu.dovi_profile, 8);\n\n\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n let reparsed_rpu = DoviRpu::parse_unspec62_nalu(&parsed_data)?;\n\n assert!(!reparsed_rpu.modified);\n\n assert_eq!(reparsed_rpu.dovi_profile, 8);\n\n\n\n assert_eq!(dovi_rpu.rpu_data_crc32, reparsed_rpu.rpu_data_crc32);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 71, "score": 149177.94594250413 }, { "content": "#[test]\n\nfn mel_variable_l8_length13() -> Result<()> {\n\n let (original_data, dovi_rpu) =\n\n _parse_file(PathBuf::from(\"./assets/tests/mel_variable_l8_length13.bin\"))?;\n\n assert!(!dovi_rpu.modified);\n\n assert_eq!(dovi_rpu.dovi_profile, 7);\n\n\n\n let parsed_data = dovi_rpu.write_hevc_unspec62_nalu()?;\n\n\n\n assert_eq!(&original_data[4..], &parsed_data[2..]);\n\n\n\n let reparsed_rpu = DoviRpu::parse_unspec62_nalu(&parsed_data)?;\n\n assert!(!reparsed_rpu.modified);\n\n assert_eq!(reparsed_rpu.dovi_profile, 7);\n\n\n\n assert_eq!(dovi_rpu.rpu_data_crc32, reparsed_rpu.rpu_data_crc32);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 72, "score": 149177.94594250413 }, { "content": "#[test]\n\nfn inject_no_add_aud() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/no_aud_bl.hevc\");\n\n let input_rpu = Path::new(\"assets/hevc_tests/regular_rpu.bin\");\n\n\n\n let output_file = temp.child(\"injected_output.hevc\");\n\n let expected_bl_rpu = Path::new(\"assets/hevc_tests/no_aud_injected.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--rpu-in\")\n\n .arg(input_rpu)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .arg(\"--no-add-aud\")\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_bl_rpu));\n\n\n\n Ok(())\n\n}\n", "file_path": "tests/hevc/inject_rpu.rs", "rank": 73, "score": 149177.94594250413 }, { "content": "#[test]\n\nfn xml_cmv4_0_2_custom_displays() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let xml = Path::new(\"assets/tests/cmv4_0_2_custom_displays.xml\");\n\n let output_rpu = temp.child(\"RPU.bin\");\n\n\n\n let expected_rpu = Path::new(\"assets/tests/cmv4_0_2_custom_displays_xml_rpu.bin\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--xml\")\n\n .arg(xml)\n\n .arg(\"--canvas-width\")\n\n .arg(\"3840\")\n\n .arg(\"--canvas-height\")\n\n .arg(\"2160\")\n\n .arg(\"--rpu-out\")\n\n .arg(output_rpu.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_rpu\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/rpu/generate.rs", "rank": 74, "score": 149177.94594250413 }, { "content": "#[test]\n\nfn cmv40_full_l8_l9_l10() -> Result<()> {\n\n use dolby_vision::rpu::extension_metadata::blocks::*;\n\n use dolby_vision::rpu::generate::GenerateConfig;\n\n use dolby_vision::rpu::generate::VideoShot;\n\n\n\n // Random primaries derived from DCI-P3\n\n let primaries1 = ColorPrimaries::from_array_float(&[\n\n 0.681, 0.322, 0.2653, 0.694, 0.155, 0.066, 0.3127, 0.329,\n\n ]);\n\n\n\n // Random primaries derived from BT.709\n\n let primaries2 = ColorPrimaries::from_array_float(&[\n\n 0.641, 0.332, 0.303, 0.604, 0.155, 0.066, 0.3127, 0.329,\n\n ]);\n\n\n\n let mut level9 = ExtMetadataBlockLevel9 {\n\n length: 17,\n\n source_primary_index: 255,\n\n ..Default::default()\n\n };\n", "file_path": "src/tests/rpu.rs", "rank": 75, "score": 146531.48662356523 }, { "content": "fn parse_hdr10plus_for_l1(hdr10plus_path: &Path, config: &mut GenerateConfig) -> Result<()> {\n\n println!(\"Parsing HDR10+ JSON file...\");\n\n stdout().flush().ok();\n\n\n\n let mut s = String::new();\n\n File::open(hdr10plus_path)?.read_to_string(&mut s)?;\n\n\n\n let hdr10plus: Value = serde_json::from_str(&s)?;\n\n\n\n let mut frame_count = 0;\n\n\n\n if let Some(json) = hdr10plus.as_object() {\n\n // Assume a proper JSON for scene info\n\n let scene_summary = json\n\n .get(\"SceneInfoSummary\")\n\n .expect(\"No scene info summary in JSON\")\n\n .as_object()\n\n .unwrap();\n\n\n\n let scene_first_frames: Vec<usize> = scene_summary\n", "file_path": "src/dovi/generator.rs", "rank": 76, "score": 140035.45339885054 }, { "content": "fn _debug_generate(config: &GenerateConfig) -> Result<()> {\n\n let path = PathBuf::from(\"test.bin\");\n\n config.write_rpus(&path)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tests/rpu.rs", "rank": 77, "score": 138960.1552989065 }, { "content": "/// Assumes a list of size 8, otherwise panics\n\npub fn f64_to_integer_primaries(primaries: &[f64]) -> [u16; 8] {\n\n const SCALE: f64 = 1.0 / 32767.0;\n\n\n\n primaries\n\n .iter()\n\n .map(|v| (v / SCALE).round() as u16)\n\n .collect::<Vec<u16>>()\n\n .try_into()\n\n .unwrap()\n\n}\n", "file_path": "dolby_vision/src/rpu/extension_metadata/primaries.rs", "rank": 78, "score": 125416.06154784653 }, { "content": "fn main() -> Result<()> {\n\n let opt = Opt::parse();\n\n\n\n let edit_config = opt\n\n .edit_config\n\n .as_ref()\n\n .map(EditConfig::from_path)\n\n .and_then(Result::ok);\n\n\n\n let mut cli_options = CliOptions {\n\n mode: opt.mode,\n\n crop: opt.crop,\n\n discard_el: false,\n\n drop_hdr10plus: opt.drop_hdr10plus,\n\n edit_config,\n\n };\n\n\n\n // Set mode 0 by default if cropping, otherwise it has no effect\n\n if cli_options.mode.is_none() && cli_options.crop {\n\n cli_options.mode = Some(0);\n", "file_path": "src/main.rs", "rank": 79, "score": 123638.35145598152 }, { "content": "#[test]\n\nfn version() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(\"--version\").assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n Ok(())\n\n}\n", "file_path": "tests/mod.rs", "rank": 80, "score": 123638.35145598152 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\"dovi_tool [OPTIONS] <SUBCOMMAND>\"));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/mod.rs", "rank": 81, "score": 123638.35145598152 }, { "content": "pub fn generate_metadata_from_madvr(\n\n madvr_path: &Path,\n\n use_custom_targets: bool,\n\n config: &mut GenerateConfig,\n\n) -> Result<()> {\n\n println!(\"Parsing madVR measurement file...\");\n\n stdout().flush().ok();\n\n\n\n let madvr_info = madvr_parse::MadVRMeasurements::parse_file(madvr_path)?;\n\n\n\n let level6_meta = ExtMetadataBlockLevel6 {\n\n max_content_light_level: madvr_info.header.maxcll as u16,\n\n max_frame_average_light_level: madvr_info.header.maxfall as u16,\n\n ..Default::default()\n\n };\n\n\n\n let frame_count = madvr_info.frames.len();\n\n let mut madvr_shots = Vec::with_capacity(madvr_info.scenes.len());\n\n\n\n for (i, scene) in madvr_info.scenes.iter().enumerate() {\n", "file_path": "src/dovi/generator.rs", "rank": 82, "score": 121713.44113158796 }, { "content": "#[test]\n\nfn copy() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(input_file));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/convert.rs", "rank": 83, "score": 121395.68526011199 }, { "content": "#[test]\n\nfn mux() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_bl = Path::new(\"assets/hevc_tests/regular_bl_start_code_4.hevc\");\n\n let input_el = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n let expected_bl_el_rpu = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--bl\")\n\n .arg(input_bl)\n\n .arg(\"--el\")\n\n .arg(input_el)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n\n output_file\n\n .assert(predicate::path::is_file())\n\n .assert(predicate::path::eq_file(expected_bl_el_rpu));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/mux.rs", "rank": 84, "score": 121395.68526011199 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool mux [OPTIONS] --bl <bl> --el <el>\",\n\n ));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/mux.rs", "rank": 85, "score": 121395.68526011199 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool convert [OPTIONS] [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/convert.rs", "rank": 86, "score": 121395.68526011199 }, { "content": "#[test]\n\nfn demux() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n let expected_bl = Path::new(\"assets/hevc_tests/regular_bl_start_code_4.hevc\");\n\n let expected_el = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n\n\n let output_bl = temp.child(\"BL.hevc\");\n\n let output_el = temp.child(\"EL.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--bl-out\")\n\n .arg(output_bl.as_ref())\n\n .arg(\"--el-out\")\n\n .arg(output_el.as_ref())\n\n .assert();\n\n\n", "file_path": "tests/hevc/demux.rs", "rank": 87, "score": 121395.68526011199 }, { "content": "#[test]\n\nfn help() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let assert = cmd.arg(SUBCOMMAND).arg(\"--help\").assert();\n\n\n\n assert\n\n .success()\n\n .stderr(predicate::str::is_empty())\n\n .stdout(predicate::str::contains(\n\n \"dovi_tool demux [OPTIONS] [input_pos]\",\n\n ));\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/hevc/demux.rs", "rank": 88, "score": 121395.68526011199 }, { "content": "fn assert_num_blocks_for_level(blocks: &[ExtMetadataBlock], level: u8, count: usize) {\n\n let filtered = blocks.iter().filter(|b| b.level() == level).count();\n\n\n\n assert_eq!(filtered, count);\n\n}\n\n\n", "file_path": "dolby_vision/src/xml/tests.rs", "rank": 89, "score": 121286.35834863757 }, { "content": "#[test]\n\nfn no_add_aud() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_bl = Path::new(\"assets/hevc_tests/no_aud_bl.hevc\");\n\n let input_el = Path::new(\"assets/hevc_tests/no_aud_injected.hevc\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n let expected_bl_el_rpu = Path::new(\"assets/hevc_tests/no_aud_muxed.hevc\");\n\n\n\n let assert = cmd\n\n .arg(SUBCOMMAND)\n\n .arg(\"--bl\")\n\n .arg(input_bl)\n\n .arg(\"--el\")\n\n .arg(input_el)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .arg(\"--no-add-aud\")\n\n .assert();\n", "file_path": "tests/hevc/mux.rs", "rank": 90, "score": 119270.95104786582 }, { "content": "#[test]\n\nfn edit_config() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4.hevc\");\n\n let edit_config = Path::new(\"assets/editor_examples/active_area_all.json\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n\n\n let assert = cmd\n\n .arg(\"--edit-config\")\n\n .arg(edit_config)\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n\n .assert();\n\n\n\n assert.success().stderr(predicate::str::is_empty());\n\n\n", "file_path": "tests/hevc/convert.rs", "rank": 91, "score": 119270.95104786582 }, { "content": "#[test]\n\nfn edit_config() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_bl = Path::new(\"assets/hevc_tests/regular_bl_start_code_4.hevc\");\n\n let input_el = Path::new(\"assets/hevc_tests/regular.hevc\");\n\n let edit_config = Path::new(\"assets/editor_examples/active_area_all.json\");\n\n\n\n let output_file = temp.child(\"BL_EL_RPU.hevc\");\n\n\n\n let assert = cmd\n\n .arg(\"--edit-config\")\n\n .arg(edit_config)\n\n .arg(SUBCOMMAND)\n\n .arg(\"--bl\")\n\n .arg(input_bl)\n\n .arg(\"--el\")\n\n .arg(input_el)\n\n .arg(\"--output\")\n\n .arg(output_file.as_ref())\n", "file_path": "tests/hevc/mux.rs", "rank": 92, "score": 119270.95104786582 }, { "content": "#[test]\n\nfn edit_config() -> Result<()> {\n\n let mut cmd = Command::cargo_bin(env!(\"CARGO_PKG_NAME\"))?;\n\n let temp = assert_fs::TempDir::new().unwrap();\n\n\n\n let input_file = Path::new(\"assets/hevc_tests/regular_start_code_4_muxed_el.hevc\");\n\n let edit_config = Path::new(\"assets/editor_examples/active_area_all.json\");\n\n\n\n let output_bl = temp.child(\"BL.hevc\");\n\n let output_el = temp.child(\"EL.hevc\");\n\n\n\n let assert = cmd\n\n .arg(\"--edit-config\")\n\n .arg(edit_config)\n\n .arg(SUBCOMMAND)\n\n .arg(input_file)\n\n .arg(\"--bl-out\")\n\n .arg(output_bl.as_ref())\n\n .arg(\"--el-out\")\n\n .arg(output_el.as_ref())\n\n .arg(\"--el-only\")\n", "file_path": "tests/hevc/demux.rs", "rank": 93, "score": 119270.95104786582 }, { "content": "#[test]\n\nfn parse_cmv4_0_2() -> Result<()> {\n\n let lib_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let assets_path = lib_path.parent().unwrap();\n\n\n\n let opts = XmlParserOpts::default();\n\n let parser = CmXmlParser::parse_file(&assets_path.join(\"assets/tests/cmv4_0_2.xml\"), opts)?;\n\n\n\n let config = parser.config;\n\n\n\n assert_eq!(config.cm_version, CmVersion::V40);\n\n assert_eq!(config.length, 259);\n\n assert_eq!(config.shots.len(), 3);\n\n\n\n // L5\n\n assert_eq!(config.level5.get_offsets(), (0, 0, 0, 0));\n\n\n\n // L6\n\n assert_eq!(config.level6.max_display_mastering_luminance, 1000);\n\n assert_eq!(config.level6.min_display_mastering_luminance, 1);\n\n assert_eq!(config.level6.max_content_light_level, 3948);\n", "file_path": "dolby_vision/src/xml/tests.rs", "rank": 94, "score": 115339.92833238828 }, { "content": "#[test]\n\nfn parse_cmv2_9() -> Result<()> {\n\n let lib_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let assets_path = lib_path.parent().unwrap();\n\n\n\n let opts = XmlParserOpts {\n\n canvas_width: Some(3840),\n\n canvas_height: Some(2160),\n\n };\n\n let parser = CmXmlParser::parse_file(&assets_path.join(\"assets/tests/cmv2_9.xml\"), opts)?;\n\n\n\n let config = parser.config;\n\n\n\n assert_eq!(config.cm_version, CmVersion::V29);\n\n assert_eq!(config.length, 108);\n\n assert_eq!(config.shots.len(), 2);\n\n\n\n // L5\n\n assert_eq!(config.level5.get_offsets(), (0, 0, 276, 276));\n\n\n\n // L6\n", "file_path": "dolby_vision/src/xml/tests.rs", "rank": 95, "score": 115339.92833238828 }, { "content": "#[test]\n\nfn parse_cmv4_0_2_with_l5() -> Result<()> {\n\n let lib_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let assets_path = lib_path.parent().unwrap();\n\n\n\n let opts = XmlParserOpts {\n\n canvas_width: Some(3840),\n\n canvas_height: Some(2160),\n\n };\n\n\n\n let parser = CmXmlParser::parse_file(&assets_path.join(\"assets/tests/cmv4_0_2.xml\"), opts)?;\n\n\n\n let config = parser.config;\n\n\n\n assert_eq!(config.cm_version, CmVersion::V40);\n\n\n\n // L5\n\n assert_eq!(config.level5.get_offsets(), (480, 480, 0, 0));\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "dolby_vision/src/xml/tests.rs", "rank": 96, "score": 113518.11846601809 }, { "content": "#[test]\n\nfn parse_cmv4_2_xml_510() -> Result<()> {\n\n let lib_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let assets_path = lib_path.parent().unwrap();\n\n\n\n let opts = XmlParserOpts::default();\n\n let parser =\n\n CmXmlParser::parse_file(&assets_path.join(\"assets/tests/cmv4_2_xml_510.xml\"), opts)?;\n\n\n\n // Only HOME targets\n\n assert_eq!(parser.target_displays.len(), 3);\n\n\n\n let config = parser.config;\n\n\n\n assert_eq!(config.cm_version, CmVersion::V40);\n\n let rpus = config.generate_rpu_list()?;\n\n\n\n let rpu = &rpus[0];\n\n let vdr_dm_data = rpu.vdr_dm_data.as_ref().unwrap();\n\n\n\n // L1, L5, L6 in DMv1\n", "file_path": "dolby_vision/src/xml/tests.rs", "rank": 97, "score": 113518.11846601809 }, { "content": "#[test]\n\nfn parse_cmv4_0_2_custom_displays() -> Result<()> {\n\n let lib_path = PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n let assets_path = lib_path.parent().unwrap();\n\n\n\n let opts = XmlParserOpts::default();\n\n let parser = CmXmlParser::parse_file(\n\n &assets_path.join(\"assets/tests/cmv4_0_2_custom_displays.xml\"),\n\n opts,\n\n )?;\n\n\n\n let config = parser.config;\n\n\n\n assert_eq!(config.cm_version, CmVersion::V40);\n\n let rpus = config.generate_rpu_list()?;\n\n\n\n let rpu = &rpus[0];\n\n let vdr_dm_data = rpu.vdr_dm_data.as_ref().unwrap();\n\n\n\n // L1, L5, L6 in DMv1\n\n assert_eq!(vdr_dm_data.metadata_blocks(1).unwrap().len(), 3);\n", "file_path": "dolby_vision/src/xml/tests.rs", "rank": 98, "score": 111782.99289928761 }, { "content": "#[inline(always)]\n\npub fn nits_to_pq(nits: u32) -> f64 {\n\n let y = nits as f64 / ST2084_Y_MAX;\n\n\n\n ((ST2084_C1 + ST2084_C2 * y.powf(ST2084_M1)) / (1.0 + ST2084_C3 * y.powf(ST2084_M1)))\n\n .powf(ST2084_M2)\n\n}\n", "file_path": "madvr_parse/src/utils.rs", "rank": 99, "score": 104823.12509005284 } ]
Rust
xdr-codec/src/record.rs
Voronar/rust-xdr
a1fa344e6cd0fce72585388b9b5145aa21d569fc
use std::io::{self, BufRead, Read, Write}; use std::cmp::min; use crate::error::*; use super::{Error, pack, unpack}; const LAST_REC: u32 = 1u32 << 31; fn mapioerr(xdrerr: Error) -> io::Error { match xdrerr { Error(ErrorKind::IOError(ioerr), _) => ioerr, other => io::Error::new(io::ErrorKind::Other, other), } } #[derive(Debug)] pub struct XdrRecordReader<R: BufRead> { size: usize, consumed: usize, eor: bool, reader: R, } impl<R: BufRead> XdrRecordReader<R> { pub fn new(rd: R) -> XdrRecordReader<R> { XdrRecordReader { size: 0, consumed: 0, eor: false, reader: rd, } } fn nextrec(&mut self) -> io::Result<bool> { assert_eq!(self.consumed, self.size); let rechdr: u32 = match unpack(&mut self.reader) { Ok(v) => v, Err(Error(ErrorKind::IOError(ref err), _)) if err.kind() == io::ErrorKind::UnexpectedEof => return Ok(true), Err(e) => return Err(mapioerr(e)), }; self.size = (rechdr & !LAST_REC) as usize; self.consumed = 0; self.eor = (rechdr & LAST_REC) != 0; Ok(false) } fn totremains(&self) -> usize { self.size - self.consumed } pub fn eor(&self) -> bool { self.eor } } impl<R: BufRead> Read for XdrRecordReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let nread = { let data = self.fill_buf()?; let len = min(buf.len(), data.len()); (&data[..len]).read(buf)? }; self.consume(nread); Ok(nread) } } impl<R: BufRead> BufRead for XdrRecordReader<R> { fn fill_buf(&mut self) -> io::Result<&[u8]> { while self.totremains() == 0 { if self.nextrec()? { return Ok(&[]); } } let remains = self.totremains(); let data = self.reader.fill_buf()?; Ok(&data[..min(data.len(), remains)]) } fn consume(&mut self, sz: usize) { assert!(sz <= self.totremains()); self.consumed += sz; self.reader.consume(sz); } } impl<R: BufRead> IntoIterator for XdrRecordReader<R> { type Item = io::Result<Vec<u8>>; type IntoIter = XdrRecordReaderIter<R>; fn into_iter(self) -> Self::IntoIter { XdrRecordReaderIter(Some(self)) } } #[derive(Debug)] pub struct XdrRecordReaderIter<R: BufRead>(Option<XdrRecordReader<R>>); impl<R: BufRead> Iterator for XdrRecordReaderIter<R> { type Item = io::Result<Vec<u8>>; fn next(&mut self) -> Option<Self::Item> { if let Some(mut rr) = self.0.take() { let mut buf = Vec::new(); loop { if rr.totremains() == 0 { match rr.nextrec() { Err(e) => return Some(Err(e)), Ok(true) => return None, Ok(false) => (), } } let remains = rr.totremains(); let eor = rr.eor(); match rr.by_ref().take(remains as u64).read_to_end(&mut buf) { Ok(sz) if sz == remains => (), Ok(_) => return None, Err(e) => return Some(Err(e)), }; if eor { break; } } self.0 = Some(rr); Some(Ok(buf)) } else { None } } } const WRBUF: usize = 65536; pub struct XdrRecordWriter<W: Write> { buf: Vec<u8>, bufsz: usize, eor: bool, writer: W, } impl<W: Write> XdrRecordWriter<W> { pub fn new(w: W) -> XdrRecordWriter<W> { XdrRecordWriter::with_buffer(w, WRBUF) } pub fn with_buffer(w: W, bufsz: usize) -> XdrRecordWriter<W> { if bufsz == 0 { panic!("bufsz must be non-zero") } XdrRecordWriter { buf: Vec::with_capacity(bufsz), bufsz: bufsz, eor: false, writer: w, } } pub fn flush_eor(&mut self, eor: bool) -> io::Result<()> { if !eor && self.buf.len() == 0 { return Ok(()); } let rechdr = self.buf.len() as u32 | (if eor { LAST_REC } else { 0 }); pack(&rechdr, &mut self.writer).map_err(mapioerr)?; let _ = self.writer.write_all(&self.buf).map(|_| ())?; self.buf.truncate(0); self.eor = eor; self.writer.flush() } } impl<W: Write> Drop for XdrRecordWriter<W> { fn drop(&mut self) { if self.buf.len() > 0 || !self.eor { let _ = self.flush_eor(true); } } } impl<W: Write> Write for XdrRecordWriter<W> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { let mut off = 0; while off < buf.len() { let chunk = &buf[off..off + min(buf.len() - off, self.bufsz)]; if self.buf.len() + chunk.len() > self.bufsz { self.flush()?; } self.buf.extend(chunk); off += chunk.len(); } Ok(off) } fn flush(&mut self) -> io::Result<()> { self.flush_eor(false) } }
use std::io::{self, BufRead, Read, Write}; use std::cmp::min; use crate::error::*; use super::{Error, pack, unpack}; const LAST_REC: u32 = 1u32 << 31; fn mapioerr(xdrerr: Error) -> io::Error { match xdrerr { Error(ErrorKind::IOError(ioerr), _) => ioerr, other => io::Error::new(io::ErrorKind::Other, other), } } #[derive(Debug)] pub struct XdrRecordReader<R: BufRead> { size: usize, consumed: usize, eor: bool, reader: R, } impl<R: BufRead> XdrRecordReader<R> { pub fn new(rd: R) -> XdrRecordReader<R> { XdrRecordReader { size: 0, consumed: 0, eor: false, reader: rd, } } fn nextrec(&mut self) -> io::Result<bool> { assert_eq!(self.consumed, self.size); let rechdr: u32 = match unpack(&mut self.reader) { Ok(v) => v, Err(Error(ErrorKind::IOError(ref err), _)) if err.kind() == io::ErrorKind::UnexpectedEof => return Ok(true), Err(e) => return Err(mapioerr(e)), }; self.size = (rechdr & !LAST_REC) as usize; self.consumed = 0; self.eor = (rechdr & LAST_REC) != 0; Ok(false) } fn totremains(&self) -> usize { self.size - self.consumed } pub fn eor(&self) -> bool { self.eor } } impl<R: BufRead> Read for XdrRecordReader<R> { fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { let nread = { let data = self.fill_buf()?; let len = min(buf.len(), data.len()); (&data[..len]).read(buf)? }; self.consume(nread); Ok(nread) } } impl<R: BufRead> BufRead for XdrRecordReader<R> { fn fill_buf(&mut self) -> io::Result<&[u8]> { while self.totremains() == 0 { if self.nextrec()? { return Ok(&[]); } } let remains = self.totremains(); let data = self.reader.fill_buf()?; Ok(&data[..min(data.len(), remains)]) } fn consume(&mut self, sz: usize) { assert!(sz <= self.totremains()); self.consumed += sz; self.reader.consume(sz); } } impl<R: BufRead> IntoIterator for XdrRecordReader<R> { type Item = io::Result<Vec<u8>>; type IntoIter = XdrRecordReaderIter<R>; fn into_iter(self) -> Self::IntoIter { XdrRecordReaderIter(Some(self)) } } #[derive(Debug)] pub struct XdrRecordReaderIter<R: BufRead>(Option<XdrRecordReader<R>>); impl<R: BufRead> Iterator for XdrRecordReaderIter<R> { type Item = io::Result<Vec<u8>>; fn next(&mut self) -> Option<Self::Item> { if let Some(mut rr) = self.0.take() { let mut buf = Vec::new(); loop { if rr.totremains() == 0 { match rr.nextrec() { Err(e) => return Some(Err(e)), Ok(true) => return None, Ok(false) => (), } } let remains = rr.totremains(); let eor = rr.eor(); match rr.by_ref().take(remains as u64).read_to_end(&mut buf) { Ok(sz) if sz == remains => (), Ok(_) => return None, Err(e) => return Some(Err(e)), }; if eor { brea
} const WRBUF: usize = 65536; pub struct XdrRecordWriter<W: Write> { buf: Vec<u8>, bufsz: usize, eor: bool, writer: W, } impl<W: Write> XdrRecordWriter<W> { pub fn new(w: W) -> XdrRecordWriter<W> { XdrRecordWriter::with_buffer(w, WRBUF) } pub fn with_buffer(w: W, bufsz: usize) -> XdrRecordWriter<W> { if bufsz == 0 { panic!("bufsz must be non-zero") } XdrRecordWriter { buf: Vec::with_capacity(bufsz), bufsz: bufsz, eor: false, writer: w, } } pub fn flush_eor(&mut self, eor: bool) -> io::Result<()> { if !eor && self.buf.len() == 0 { return Ok(()); } let rechdr = self.buf.len() as u32 | (if eor { LAST_REC } else { 0 }); pack(&rechdr, &mut self.writer).map_err(mapioerr)?; let _ = self.writer.write_all(&self.buf).map(|_| ())?; self.buf.truncate(0); self.eor = eor; self.writer.flush() } } impl<W: Write> Drop for XdrRecordWriter<W> { fn drop(&mut self) { if self.buf.len() > 0 || !self.eor { let _ = self.flush_eor(true); } } } impl<W: Write> Write for XdrRecordWriter<W> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { let mut off = 0; while off < buf.len() { let chunk = &buf[off..off + min(buf.len() - off, self.bufsz)]; if self.buf.len() + chunk.len() > self.bufsz { self.flush()?; } self.buf.extend(chunk); off += chunk.len(); } Ok(off) } fn flush(&mut self) -> io::Result<()> { self.flush_eor(false) } }
k; } } self.0 = Some(rr); Some(Ok(buf)) } else { None } }
function_block-function_prefixed
[ { "content": "/// Pack a fixed-size byte array\n\n///\n\n/// As size is fixed, it doesn't need to be encoded. `sz` is in bytes (and array elements, which are u8)\n\n/// If the array is too large, it is truncated; if its too small its padded with `0x00`.\n\npub fn pack_opaque_array<Out: Write>(val: &[u8], sz: usize, out: &mut Out) -> Result<usize> {\n\n let mut vsz;\n\n let val = &val[..min(sz, val.len())];\n\n\n\n vsz = val.len();\n\n out.write_all(val)?;\n\n\n\n let p = padding(sz);\n\n for _ in val.len()..(sz + p.len()) {\n\n out.write_u8(0)?;\n\n vsz += 1;\n\n }\n\n\n\n Ok(vsz)\n\n}\n\n\n\n/// Pack a dynamically sized array, with size limit check.\n\n///\n\n/// This packs an array of packable objects, and also applies an optional size limit.\n", "file_path": "xdr-codec/src/lib.rs", "rank": 0, "score": 239943.20330265007 }, { "content": "#[inline]\n\npub fn padding(sz: usize) -> &'static [u8] {\n\n &PADDING[..(4 - (sz % 4)) % 4]\n\n}\n\n\n\n/// Wrapper for XDR opaque data.\n\n///\n\n/// In XDR terms, \"opaque data\" is a plain array of bytes, packed as tightly as possible, and then\n\n/// padded to a 4 byte offset. This is different from an array of bytes, where each byte would be\n\n/// padded to 4 bytes when emitted into the array.\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]\n\npub struct Opaque<'a>(pub Cow<'a, [u8]>);\n\n\n\nimpl<'a> Opaque<'a> {\n\n pub fn owned(v: Vec<u8>) -> Opaque<'a> {\n\n Opaque(Cow::Owned(v))\n\n }\n\n pub fn borrowed(v: &'a [u8]) -> Opaque<'a> {\n\n Opaque(Cow::Borrowed(v))\n\n }\n\n}\n", "file_path": "xdr-codec/src/lib.rs", "rank": 1, "score": 186333.52240721844 }, { "content": "/// Deserialization (unpacking) helper function\n\n///\n\n/// This function will read encoded bytes from `input` (a `Read`\n\n/// implementation) and return a fully constructed type (or an\n\n/// error). This relies on type inference to determine which type is\n\n/// to be unpacked, so its up to the calling envionment to clarify\n\n/// this. (Generally it falls out quite naturally.)\n\npub fn unpack<In: Read, T: Unpack<In>>(input: &mut In) -> Result<T> {\n\n T::unpack(input).map(|(v, _)| v)\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 2, "score": 186292.8740021041 }, { "content": "/// Serialization (packing) helper.\n\n///\n\n/// Helper to serialize any type implementing `Pack` into an implementation of `std::io::Write`.\n\npub fn pack<Out: Write, T: Pack<Out>>(val: &T, out: &mut Out) -> Result<()> {\n\n val.pack(out).map(|_| ())\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 3, "score": 183375.45619674894 }, { "content": "/// Unpack (perhaps) length-limited string\n\npub fn unpack_string<In: Read>(input: &mut In, maxsz: Option<usize>) -> Result<(String, usize)> {\n\n let (v, sz) = unpack_opaque_flex(input, maxsz)?;\n\n\n\n String::from_utf8(v).map_err(Error::from).map(|s| (s, sz))\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 4, "score": 173719.70535070138 }, { "content": "#[inline]\n\npub fn pack_string<Out: Write>(val: &str, maxsz: Option<usize>, out: &mut Out) -> Result<usize> {\n\n pack_opaque_flex(val.as_bytes(), maxsz, out)\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 5, "score": 171793.666070178 }, { "content": "fn check_array(arraysz: usize, rxsize: usize, data: Vec<u32>, defl: Option<u32>) -> bool {\n\n let mut buf = Vec::new();\n\n\n\n // pack data we have into the array\n\n let tsz = match pack_array(&data[..], arraysz, &mut buf, defl.as_ref()) {\n\n Ok(tsz) if data.len() >= arraysz || defl.is_some() => tsz,\n\n e @ Err(Error(ErrorKind::InvalidLen(_), _)) => {\n\n let pass = defl.is_none() && data.len() < arraysz;\n\n if !pass {\n\n println!(\n\n \"pack_array failed {:?}, defl {:?} data.len {} arraysz {}\",\n\n e,\n\n defl,\n\n data.len(),\n\n arraysz\n\n )\n\n }\n\n return pass;\n\n }\n\n Err(e) => {\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 6, "score": 170564.15433362118 }, { "content": "fn check_opaque(arraysz: usize, rxsize: usize, data: Vec<u8>) -> bool {\n\n let mut buf = Vec::new();\n\n\n\n // pack data we have into the array\n\n let tsz = pack_opaque_array(&data[..], arraysz, &mut buf).expect(\"pack_array failed\");\n\n if tsz != arraysz + padding(arraysz).len() {\n\n println!(\n\n \"tsz {} arraysz+pad {}\",\n\n tsz,\n\n arraysz + padding(arraysz).len()\n\n );\n\n return false;\n\n }\n\n if buf.len() != tsz {\n\n println!(\"buf.len {} tsz {}\", buf.len(), tsz);\n\n return false;\n\n }\n\n\n\n // if data is shorter than array, then serialized is padded with zero\n\n if data.len() < arraysz {\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 7, "score": 170172.88591819105 }, { "content": "#[inline]\n\npub fn pack_flex<Out: Write, T: Pack<Out>>(\n\n val: &[T],\n\n maxsz: Option<usize>,\n\n out: &mut Out,\n\n) -> Result<usize> {\n\n if maxsz.map_or(false, |m| val.len() > m) {\n\n bail!(ErrorKind::InvalidLen(maxsz.unwrap()));\n\n }\n\n\n\n val.pack(out)\n\n}\n\n\n\n/// Pack a dynamically sized opaque array, with size limit check.\n\n///\n\n/// This packs an array of packable objects, and also applies an optional size limit.\n", "file_path": "xdr-codec/src/lib.rs", "rank": 8, "score": 167196.97104961934 }, { "content": "/// Unpack a (perhaps) length-limited array\n\npub fn unpack_flex<In: Read, T: Unpack<In>>(\n\n input: &mut In,\n\n maxsz: Option<usize>,\n\n) -> Result<(Vec<T>, usize)> {\n\n let (elems, mut sz) = Unpack::unpack(input)?;\n\n\n\n if maxsz.map_or(false, |m| elems > m) {\n\n bail!(ErrorKind::InvalidLen(maxsz.unwrap()));\n\n }\n\n // TODO_THINK_ABOUT: One can cause allocation maximum exceeding in case\n\n // of XDR protocol missmatch (different XDR-files or invalid input data).\n\n // let mut out = Vec::with_capacity(elems);\n\n let mut out = vec![];\n\n\n\n for _ in 0..elems {\n\n let (e, esz) = Unpack::unpack(input)?;\n\n out.push(e);\n\n sz += esz;\n\n }\n\n\n\n let p = padding(sz);\n\n for _ in 0..p.len() {\n\n let _ = input.read_u8()?;\n\n }\n\n sz += p.len();\n\n\n\n Ok((out, sz))\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 9, "score": 167194.88524377692 }, { "content": "/// Basic unpacking trait\n\n///\n\n/// This trait is used to unpack a type from an XDR encoded byte\n\n/// stream (encoded with `Pack`). It returns the decoded instance and\n\n/// the number of bytes consumed from the input.\n\n///\n\n/// This crate provides implementations for all the basic XDR types,\n\n/// as well as for arrays.\n\npub trait Unpack<In: Read>: Sized {\n\n fn unpack(input: &mut In) -> Result<(Self, usize)>;\n\n}\n\n\n\n#[cfg(feature = \"bytecodec\")]\n\nimpl<In: Read> Unpack<In> for u8 {\n\n #[inline]\n\n fn unpack(input: &mut In) -> Result<(Self, usize)> {\n\n input.read_u32::<BigEndian>().map_err(Error::from).map(\n\n |v| {\n\n (v as u8, 4)\n\n },\n\n )\n\n }\n\n}\n\n\n\n#[cfg(feature = \"bytecodec\")]\n\nimpl<In: Read> Unpack<In> for i8 {\n\n #[inline]\n\n fn unpack(input: &mut In) -> Result<(Self, usize)> {\n", "file_path": "xdr-codec/src/lib.rs", "rank": 10, "score": 167060.77948277068 }, { "content": "/// Pack a fixed-size array.\n\n///\n\n/// As the size is fixed, it doesn't need to be encoded. `sz` is in units of array elements.\n\n/// If the `val` is too large, it is truncated; it is too small, then the array is padded out with\n\n/// default values (if provided). If the array is too small and there's no pad/default value, then it fails\n\n/// with `Error::InvalidLen`.\n\npub fn pack_array<Out, T>(val: &[T], sz: usize, out: &mut Out, defl: Option<&T>) -> Result<usize>\n\nwhere\n\n Out: Write,\n\n T: Pack<Out>,\n\n{\n\n let mut vsz = 0;\n\n let val = &val[..min(sz, val.len())];\n\n\n\n for v in val {\n\n vsz += v.pack(out)?;\n\n }\n\n assert!(vsz % 4 == 0);\n\n\n\n if val.len() < sz {\n\n if let Some(defl) = defl {\n\n for _ in val.len()..sz {\n\n vsz += defl.pack(out)?;\n\n }\n\n } else {\n\n bail!(ErrorKind::InvalidLen(sz));\n\n }\n\n }\n\n Ok(vsz)\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 11, "score": 161667.47909621536 }, { "content": "#[inline]\n\npub fn pack_opaque_flex<Out: Write>(\n\n val: &[u8],\n\n maxsz: Option<usize>,\n\n out: &mut Out,\n\n) -> Result<usize> {\n\n if maxsz.map_or(false, |m| val.len() > m) {\n\n bail!(ErrorKind::InvalidLen(maxsz.unwrap()));\n\n }\n\n\n\n Opaque::borrowed(val).pack(out)\n\n}\n\n\n\n/// Pack a string with size limit check.\n", "file_path": "xdr-codec/src/lib.rs", "rank": 12, "score": 153739.61216486082 }, { "content": "/// Unpack a fixed-sized opaque array\n\n///\n\n/// Unpack a fixed-size array of raw bytes. The results are placed in `bytes`, but the actual wire-size of\n\n/// the array is `bytesz`. If the supplied `bytes` is too large, the remainer is filled in with 0x00;\n\n/// if it is too small, the excess elements are discarded.\n\n///\n\n/// All the bytes in `bytes` will be initialized after a successful call.\n\npub fn unpack_opaque_array<In: Read>(\n\n input: &mut In,\n\n bytes: &mut [u8],\n\n bytesz: usize,\n\n) -> Result<usize> {\n\n let sz = min(bytesz, bytes.len());\n\n let mut rsz = 0;\n\n\n\n while rsz < sz {\n\n let r = input.read(&mut bytes[rsz..])?;\n\n rsz += r;\n\n }\n\n\n\n // Fill in excess\n\n if sz < bytes.len() {\n\n for b in &mut bytes[sz..] {\n\n *b = 0;\n\n }\n\n }\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 13, "score": 153715.88126394927 }, { "content": "/// Unpack a (perhaps) length-limited opaque array\n\n///\n\n/// Unpack an XDR encoded array of bytes, with an optional maximum length.\n\npub fn unpack_opaque_flex<In: Read>(\n\n input: &mut In,\n\n maxsz: Option<usize>,\n\n) -> Result<(Vec<u8>, usize)> {\n\n let (elems, mut sz): (usize, _) = Unpack::unpack(input)?;\n\n\n\n if maxsz.map_or(false, |m| elems > m) {\n\n bail!(ErrorKind::InvalidLen(maxsz.unwrap()));\n\n }\n\n // TODO_THINK_ABOUT: same as unpack_flex\n\n // let mut out = Vec::with_capacity(elems);\n\n let mut out = vec![];\n\n\n\n sz += input.take(elems as u64).read_to_end(&mut out)?;\n\n\n\n let p = padding(sz);\n\n for _ in 0..p.len() {\n\n let _ = input.read_u8()?;\n\n }\n\n sz += p.len();\n\n\n\n Ok((out, sz))\n\n}\n\n\n", "file_path": "xdr-codec/src/lib.rs", "rank": 14, "score": 153710.9552962451 }, { "content": "// Make sure XdrRecordWriter writes the right stuff\n\nfn check_writerec(bufsz: usize, eor: bool, ref bytes: Vec<u8>) -> TestResult {\n\n const EOR: u32 = 1 << 31;\n\n\n\n if bufsz == 0 {\n\n return TestResult::discard();\n\n }\n\n\n\n // Make an expected serialization into fragments\n\n let mut expected = Vec::new();\n\n let nchunks = (bytes.len() + bufsz - 1) / bufsz;\n\n\n\n for (idx, c) in bytes.chunks(bufsz).enumerate() {\n\n let mut len = c.len() as u32;\n\n if nchunks - 1 == idx && eor {\n\n len |= EOR;\n\n }\n\n\n\n if let Err(e) = len.pack(&mut expected) {\n\n return TestResult::error(format!(\"pack failed: {:?}\", e));\n\n }\n", "file_path": "xdr-codec/tests/qc-record.rs", "rank": 15, "score": 148492.1261025571 }, { "content": "/// Basic packing trait.\n\n///\n\n/// This trait is used to implement XDR packing any Rust type into a\n\n/// `Write` stream. It returns the number of bytes the encoding took.\n\n///\n\n/// This crate provides a number of implementations for all the basic\n\n/// XDR types, and generated code will generally compose them to pack\n\n/// structures, unions, etc.\n\n///\n\n/// Streams generated by `Pack` can be consumed by `Unpack`.\n\npub trait Pack<Out: Write> {\n\n fn pack(&self, out: &mut Out) -> Result<usize>;\n\n}\n\n\n\n#[cfg(feature = \"bytecodec\")]\n\nimpl<Out: Write> Pack<Out> for u8 {\n\n #[inline]\n\n fn pack(&self, out: &mut Out) -> Result<usize> {\n\n out.write_u32::<BigEndian>(*self as u32)\n\n .map_err(Error::from)\n\n .map(|_| 4)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"bytecodec\")]\n\nimpl<Out: Write> Pack<Out> for i8 {\n\n #[inline]\n\n fn pack(&self, out: &mut Out) -> Result<usize> {\n\n out.write_i32::<BigEndian>(*self as i32)\n\n .map_err(Error::from)\n", "file_path": "xdr-codec/src/lib.rs", "rank": 16, "score": 148313.04030810678 }, { "content": "// Output of packing is a multiple of 4\n\nfn pack<T>(v: T) -> bool\n\nwhere\n\n T: PartialEq + Pack<Cursor<Vec<u8>>>,\n\n{\n\n let mut data = Cursor::new(Vec::new());\n\n\n\n let sz = v.pack(&mut data).expect(\"pack failed\");\n\n sz % 4 == 0\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 17, "score": 148117.32712082154 }, { "content": "fn digit<F: Fn(u8) -> bool>(input: &[u8], isdigit: F) -> IResult<&[u8], &[u8]> {\n\n for (idx, item) in input.iter().enumerate() {\n\n if !isdigit(*item) {\n\n if idx == 0 {\n\n return Error(Err::Position(ErrorKind::Digit, input));\n\n } else {\n\n return Done(&input[idx..], &input[0..idx]);\n\n }\n\n }\n\n }\n\n Incomplete(Needed::Unknown)\n\n}\n\n\n\nnamed!(lbrace, preceded!(spaces, apply!(ctag, \"{\")));\n\nnamed!(rbrace, preceded!(spaces, apply!(ctag, \"}\")));\n\nnamed!(lbrack, preceded!(spaces, apply!(ctag, \"[\")));\n\nnamed!(rbrack, preceded!(spaces, apply!(ctag, \"]\")));\n\nnamed!(lparen, preceded!(spaces, apply!(ctag, \"(\")));\n\nnamed!(rparen, preceded!(spaces, apply!(ctag, \")\")));\n\nnamed!(lt, preceded!(spaces, apply!(ctag, \"<\")));\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 19, "score": 136723.47998966233 }, { "content": "fn is_hexdigit(ch: u8) -> bool {\n\n match ch as char {\n\n '0'..='9' | 'A'..='F' | 'a'..='f' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 20, "score": 128411.84240987065 }, { "content": "fn is_octdigit(ch: u8) -> bool {\n\n match ch as char {\n\n '0'..='7' => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 21, "score": 128411.84240987065 }, { "content": "// Packing something then unpacking returns the same value\n\nfn short_unpack<T>(v: T) -> bool\n\nwhere\n\n T: PartialEq + Pack<Cursor<Vec<u8>>> + Unpack<Cursor<Vec<u8>>>,\n\n{\n\n let mut data = Cursor::new(Vec::new());\n\n\n\n let psz = v.pack(&mut data).expect(\"pack failed\");\n\n\n\n // truncate data to make sure unpacking fails\n\n let data = data.into_inner();\n\n assert_eq!(psz, data.len());\n\n let data = Vec::from(&data[..data.len() - 1]);\n\n\n\n let mut data = Cursor::new(data);\n\n match T::unpack(&mut data) {\n\n Err(Error(ErrorKind::IOError(_), _)) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 22, "score": 124809.58252797043 }, { "content": "/// Unpack a fixed-sized array\n\n///\n\n/// Unpack a fixed-size array of elements. The results are placed in `array`, but the actual wire-size of\n\n/// the array is `arraysz`. If the supplied `array` is too large, the remainer is filled in with the\n\n/// default value (if provided); if it is too small, the excess elements are discarded.\n\n///\n\n/// If the provided array is too large and there is no default, then decoding fails with an `InvalidLen` error.\n\n/// All the elements in `array` will be initialized after a successful return.\n\npub fn unpack_array<In, T>(\n\n input: &mut In,\n\n array: &mut [T],\n\n arraysz: usize,\n\n defl: Option<&T>,\n\n) -> Result<usize>\n\nwhere\n\n In: Read,\n\n T: Unpack<In> + Clone,\n\n{\n\n #[inline]\n\n fn set<T>(p: &mut T, v: T) { *p = v }\n\n #[inline]\n\n fn drop<T>(_: &mut T) { }\n\n\n\n unpack_array_with(input, array, arraysz, set, drop, defl)\n\n}\n\n\n\n/// Specialized variant of `unpack_array` which initializes the element via a callback. This is primarily\n\n/// so that the array can be uninitialized, and we initialize it element at a time with `ptr::write()`.\n", "file_path": "xdr-codec/src/lib.rs", "rank": 23, "score": 108922.15369781846 }, { "content": "#[inline]\n\npub fn unpack_array_with<In, T>(\n\n input: &mut In,\n\n array: &mut [T],\n\n arraysz: usize,\n\n set: fn (&mut T, T),\n\n drop: fn(&mut T),\n\n defl: Option<&T>,\n\n) -> Result<usize>\n\nwhere\n\n In: Read,\n\n T: Unpack<In> + Clone,\n\n{\n\n let mut rsz = 0;\n\n let sz = min(arraysz, array.len());\n\n \n\n // If we fail part way through then return the error and the index we got up to\n\n // so we can clean up the entries we did initialize.\n\n let res = (|| {\n\n for (idx, elem) in (&mut array[..sz]).into_iter().enumerate() {\n\n let (v, sz) = match Unpack::unpack(input) {\n", "file_path": "xdr-codec/src/lib.rs", "rank": 24, "score": 108908.66103901545 }, { "content": "pub fn exclude_definition_line(line: &str, exclude_defs: &[&str]) -> bool {\n\n exclude_defs.iter().fold(false, |acc, v| {\n\n acc || line.contains(&format!(\"const {}\", v))\n\n || line.contains(&format!(\"struct {}\", v))\n\n || line.contains(&format!(\"enum {}\", v))\n\n || line.contains(&format!(\"for {}\", v))\n\n })\n\n}\n\n\n", "file_path": "xdrgen/src/lib.rs", "rank": 25, "score": 107881.73154541841 }, { "content": "// Packing something then unpacking returns the same value\n\nfn codec<T>(v: T) -> bool\n\nwhere\n\n T: PartialEq + Pack<Cursor<Vec<u8>>> + Unpack<Cursor<Vec<u8>>>,\n\n{\n\n let mut data = Cursor::new(Vec::new());\n\n\n\n let psz = v.pack(&mut data).expect(\"pack failed\");\n\n\n\n let mut data = Cursor::new(data.into_inner());\n\n let (uv, usz) = T::unpack(&mut data).expect(\"unpack failed\");\n\n\n\n psz == usz && v == uv\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 26, "score": 101097.7938692261 }, { "content": "fn token(input: &[u8]) -> IResult<&[u8], &[u8]> {\n\n let input = ws(input);\n\n\n\n for (idx, item) in input.iter().enumerate() {\n\n match *item as char {\n\n 'a'..='z' | 'A'..='Z' | '_' => continue,\n\n '0'..='9' if idx > 0 => continue,\n\n _ => {\n\n if idx > 0 {\n\n return Done(&input[idx..], &input[0..idx]);\n\n } else {\n\n return Error(Err::Position(ErrorKind::AlphaNumeric, input));\n\n }\n\n }\n\n }\n\n }\n\n Incomplete(Needed::Unknown)\n\n}\n\n\n\nmacro_rules! kw {\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 27, "score": 100616.72289380155 }, { "content": "fn ws(input: &[u8]) -> &[u8] {\n\n match spaces(input) {\n\n Done(rest, _) => rest,\n\n _ => input,\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 28, "score": 98568.0456855236 }, { "content": "fn eof(input: &[u8]) -> IResult<&[u8], ()> {\n\n if input.len() == 0 {\n\n IResult::Done(input, ())\n\n } else {\n\n IResult::Error(Err::Position(ErrorKind::Eof, input))\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 29, "score": 94548.16197709745 }, { "content": "// Complete tag\n\nfn ctag<T: AsRef<[u8]>>(input: &[u8], tag: T) -> IResult<&[u8], &[u8]> {\n\n complete!(input, tag!(tag.as_ref()))\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 30, "score": 93666.10114014623 }, { "content": "fn ident(input: &[u8]) -> IResult<&[u8], &str> {\n\n // Grab an identifier and make sure it isn't a keyword\n\n match token(input) {\n\n Done(rest, val) => {\n\n match keyword(input) {\n\n Done(..) => Error(Err::Position(ErrorKind::Custom(1), val)),\n\n Error(..) | Incomplete(..) => Done(rest, str::from_utf8(val).unwrap()),\n\n }\n\n }\n\n Error(e) => Error(e),\n\n Incomplete(need) => Incomplete(need),\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 31, "score": 90886.55344184529 }, { "content": "/// Generate Rust code from an RFC4506 XDR specification\n\n///\n\n/// `infile` is simply a string used in error messages; it may be empty. `input` is a read stream of\n\n/// the specification, and `output` is where the generated code is sent.\n\n/// `exclude_defs` is list of not generated type definitions.\n\npub fn generate<In, Out>(\n\n infile: &str,\n\n mut input: In,\n\n mut output: Out,\n\n exclude_defs: &[&str],\n\n) -> Result<()>\n\nwhere\n\n In: Read,\n\n Out: Write,\n\n{\n\n let mut source = String::new();\n\n\n\n input.read_to_string(&mut source)?;\n\n\n\n let xdr = match spec::specification(&source) {\n\n Ok(defns) => Symtab::new(&defns),\n\n Err(e) => return Err(xdr::Error::from(format!(\"parse error: {}\", e))),\n\n };\n\n\n\n let xdr = xdr;\n", "file_path": "xdrgen/src/lib.rs", "rank": 32, "score": 88983.88621007936 }, { "content": "// Make sure record structure survives a round trip\n\nfn check_codec(bufsz: usize, ref records: Vec<Vec<u8>>) -> TestResult {\n\n if bufsz == 0 {\n\n return TestResult::discard();\n\n }\n\n\n\n let mut buf = Vec::new();\n\n\n\n for rec in records {\n\n let mut xw = XdrRecordWriter::with_buffer(&mut buf, bufsz);\n\n\n\n if let Err(e) = xw.write(rec) {\n\n return TestResult::error(format!(\"xw write failed: {:?}\", e));\n\n }\n\n }\n\n\n\n {\n\n let cur = Cursor::new(buf);\n\n let xr = XdrRecordReader::new(cur);\n\n\n\n for (res, orig) in xr.into_iter().zip(records) {\n", "file_path": "xdr-codec/tests/qc-record.rs", "rank": 33, "score": 82076.17278655872 }, { "content": "#[test]\n\nfn consts() {\n\n let name = \"consts\";\n\n let spec = r#\"\n\n const FOO = 1;\n\n const BAR = -1;\n\n \"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 34, "score": 73916.54684215687 }, { "content": "#[test]\n\nfn recursive_type() {\n\n let name = \"recursive_type\";\n\n let spec = r#\"\n", "file_path": "xdrgen/tests/lib.rs", "rank": 35, "score": 71233.27625929433 }, { "content": "#[test]\n\nfn inline_struct() {\n\n let spec = r#\"\n\n struct thing {\n\n struct { int a; int b; } thing;\n\n };\n\n\"#;\n\n let s = specification(spec);\n\n\n\n println!(\"spec {:?}\", s);\n\n assert!(s.is_ok());\n\n\n\n let g = generate(\"\", Cursor::new(spec.as_bytes()), Vec::new(), &[]);\n\n assert!(g.is_err());\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 36, "score": 68829.73783292048 }, { "content": "#[test]\n\nfn case_type() {\n\n let specs = vec![\n\n \"enum Foo { A, B, C }; union Bar switch (Foo x) { case A: void; case B: void; case C: void; };\",\n\n \"union Bar switch (int x) { case 1: void; case 2: void; case 3: void; };\",\n\n ];\n\n\n\n for sp in specs {\n\n let s = specification(sp);\n\n println!(\"spec sp \\\"{}\\\" => {:?}\", sp, s);\n\n assert!(s.is_ok());\n\n\n\n let g = generate(\"\", Cursor::new(sp.as_bytes()), Vec::new(), &[]);\n\n assert!(g.is_ok());\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 37, "score": 68811.44116038033 }, { "content": "#[test]\n\nfn basic_bool() {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(true.pack(&mut out).unwrap(), 4);\n\n assert_eq!(false.pack(&mut out).unwrap(), 4);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 8);\n\n assert_eq!(v, vec![0, 0, 0, 1, 0, 0, 0, 0]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (true, 4));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (false, 4));\n\n\n\n let bad = vec![0, 0, 0, 2];\n\n let mut input = Cursor::new(bad);\n\n match bool::unpack(&mut input) {\n\n Err(Error(ErrorKind::InvalidEnum(_), _)) => (),\n\n res => panic!(\"bad result {:?}\", res),\n\n }\n\n}\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 38, "score": 68684.31590574057 }, { "content": "/// Simplest possible way to generate Rust code from an XDR specification.\n\n///\n\n/// It is intended for use in a build.rs script:\n\n///\n\n/// ```ignore\n\n/// extern crate xdrgen;\n\n///\n\n/// fn main() {\n\n/// xdrgen::compile(\"src/simple.x\").unwrap();\n\n/// }\n\n/// ```\n\n///\n\n/// Output is put into OUT_DIR, and can be included:\n\n///\n\n/// ```ignore\n\n/// mod simple {\n\n/// use xdr_codec;\n\n///\n\n/// include!(concat!(env!(\"OUT_DIR\"), \"/simple_xdr.rs\"));\n\n/// }\n\n/// ```\n\n///\n\n/// If your specification uses types which are not within the specification, you can provide your\n\n/// own implementations of `Pack` and `Unpack` for them.\n\npub fn compile<P>(infile: P, exclude_defs: &[&str]) -> Result<()>\n\nwhere\n\n P: AsRef<Path> + Display,\n\n{\n\n let input = File::open(&infile)?;\n\n\n\n let mut outdir = PathBuf::from(env::var(\"OUT_DIR\").unwrap_or(String::from(\".\")));\n\n let outfile = PathBuf::from(infile.as_ref())\n\n .file_stem()\n\n .unwrap()\n\n .to_owned()\n\n .into_string()\n\n .unwrap()\n\n .replace(\"-\", \"_\");\n\n\n\n outdir.push(&format!(\"{}_xdr.rs\", outfile));\n\n\n\n let output = File::create(outdir)?;\n\n\n\n generate(\n\n infile.as_ref().as_os_str().to_str().unwrap_or(\"<unknown>\"),\n\n input,\n\n output,\n\n exclude_defs,\n\n )\n\n}\n", "file_path": "xdrgen/src/lib.rs", "rank": 39, "score": 67877.19542396785 }, { "content": "#[test]\n\nfn recread_iter() {\n\n let inbuf = vec![\n\n 0,\n\n 0,\n\n 0,\n\n 5,\n\n 0,\n\n 1,\n\n 2,\n\n 3,\n\n 4,\n\n 128,\n\n 0,\n\n 0,\n\n 5,\n\n 5,\n\n 6,\n\n 7,\n\n 8,\n\n 9,\n", "file_path": "xdr-codec/tests/test-record.rs", "rank": 40, "score": 66638.70371304978 }, { "content": "#[test]\n\nfn test_const() {\n\n assert_eq!(const_def(&b\"const foo = 123;\"[..]), Done(&b\"\"[..], Defn::constant(\"foo\", 123)));\n\n}\n\n\n\nnamed!(type_def<Defn>,\n\n alt!(\n\n do_parse!(kw_typedef >> decl: nonvoid_declaration >> semi >>\n\n ({\n\n match decl.clone() {\n\n Decl::Named(name, ty) => {\n\n if ty.is_syn() {\n\n Defn::typesyn(name, ty)\n\n } else {\n\n Defn::typespec(name, ty)\n\n }\n\n },\n\n Decl::Void => panic!(\"void non-void declaration?\"),\n\n }\n\n })\n\n )\n\n | do_parse!(kw_enum >> id:ident >> e:enum_body >> semi >> (Defn::typespec(id, Type::Enum(e))))\n\n | do_parse!(kw_struct >> id:ident >> s:struct_body >> semi >> (Defn::typespec(id, Type::Struct(s))))\n\n | do_parse!(kw_union >> id:ident >> u:union_body >> semi >> (Defn::typespec(id, Type::union(u))))\n\n )\n\n);\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 41, "score": 66619.5543512677 }, { "content": "#[test]\n\nfn test_type() {\n\n assert_eq!(type_spec(&b\"int \"[..]), Done(&b\" \"[..], Type::Int));\n\n assert_eq!(type_spec(&b\"unsigned int \"[..]), Done(&b\" \"[..], Type::UInt));\n\n assert_eq!(type_spec(&b\"unsigned\\nint \"[..]), Done(&b\" \"[..], Type::UInt));\n\n assert_eq!(type_spec(&b\"unsigned/* foo */int \"[..]), Done(&b\" \"[..], Type::UInt));\n\n assert_eq!(type_spec(&b\"unsigned//\\nint \"[..]), Done(&b\" \"[..], Type::UInt));\n\n\n\n assert_eq!(type_spec(&b\"unsigned hyper \"[..]), Done(&b\" \"[..], Type::UHyper));\n\n\n\n assert_eq!(type_spec(&b\"unsigned char \"[..]), Done(&b\" \"[..],\n\n Type::Ident(\"u8\".into(), Some(Derives::COPY | Derives::CLONE | Derives::EQ | Derives::PARTIALEQ | Derives::DEBUG))));\n\n assert_eq!(type_spec(&b\"unsigned short \"[..]), Done(&b\" \"[..], Type::UInt));\n\n\n\n assert_eq!(type_spec(&b\" hyper \"[..]), Done(&b\" \"[..], Type::Hyper));\n\n assert_eq!(type_spec(&b\" double \"[..]), Done(&b\" \"[..], Type::Double));\n\n assert_eq!(type_spec(&b\"// thing\\nquadruple \"[..]), Done(&b\" \"[..], Type::Quadruple));\n\n assert_eq!(type_spec(&b\"// thing\\n bool \"[..]), Done(&b\" \"[..], Type::Bool));\n\n\n\n assert_eq!(type_spec(&b\"char \"[..]), Done(&b\" \"[..],\n\n Type::Ident(\"i8\".into(), Some(Derives::COPY | Derives::CLONE | Derives::EQ | Derives::PARTIALEQ | Derives::DEBUG))));\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 42, "score": 66584.06268431433 }, { "content": "#[test]\n\nfn case_type_mismatch() {\n\n let specs = vec![\n\n \"enum Foo { A, B, C}; union Bar switch (Foo x) { case 1: void; case 2: void; case 3: void; };\",\n\n \"enum Foo { A, B, C}; union Bar switch (int x) { case A: void; case B: void; case C: void; };\",\n\n ];\n\n\n\n for sp in specs {\n\n let s = specification(sp);\n\n println!(\"spec sp \\\"{}\\\" => {:?}\", sp, s);\n\n assert!(s.is_ok());\n\n\n\n let g = generate(\"\", Cursor::new(sp.as_bytes()), Vec::new(), &[]);\n\n assert!(g.is_err());\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 43, "score": 66584.06268431433 }, { "content": "#[test]\n\nfn read_zerorec() {\n\n let inbuf = vec![0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0];\n\n\n\n let cur = Cursor::new(inbuf);\n\n let mut recread = XdrRecordReader::new(cur);\n\n\n\n let mut buf = [0; 100];\n\n assert_eq!(recread.read(&mut buf).unwrap(), 0);\n\n assert!(recread.eor());\n\n}\n\n\n", "file_path": "xdr-codec/tests/test-record.rs", "rank": 44, "score": 66531.87193902506 }, { "content": "#[test]\n\nfn quickcheck_pack_float() {\n\n quickcheck_pack_t::<f32>();\n\n quickcheck_pack_t::<f64>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 45, "score": 66444.46783685357 }, { "content": "#[test]\n\nfn quickcheck_pack_iu64() {\n\n quickcheck_pack_t::<i64>();\n\n quickcheck_pack_t::<u64>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 46, "score": 66444.46783685357 }, { "content": "#[test]\n\nfn quickcheck_pack_ui32() {\n\n quickcheck_pack_t::<i32>();\n\n quickcheck_pack_t::<u32>();\n\n quickcheck_pack_t::<usize>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 47, "score": 66444.46783685357 }, { "content": "pub fn specification(input: &str) -> Result<Vec<Defn>, String> {\n\n match spec(input.as_bytes()) {\n\n Done(_, spec) => Ok(spec),\n\n Error(Err::Position(kind, input)) => {\n\n Err(format!(\n\n \"{:?}: {}\",\n\n kind,\n\n String::from(str::from_utf8(input).unwrap())\n\n ))\n\n }\n\n Error(err) => Err(format!(\"Error: {:?}\", err)),\n\n Incomplete(need) => Err(format!(\"Incomplete {:?}\", need)),\n\n }\n\n}\n\n\n\nnamed!(spec< Vec<Defn> >,\n\n do_parse!(\n\n opt!(directive) >>\n\n defns: many0!(definition) >>\n\n spaces >> eof >>\n\n (defns))\n\n);\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 48, "score": 66441.11350880565 }, { "content": "fn quickcheck_pack_t<T>()\n\nwhere\n\n T: PartialEq + Pack<Cursor<Vec<u8>>> + Unpack<Cursor<Vec<u8>>> + Arbitrary + Debug,\n\n{\n\n quickcheck(pack as fn(T) -> bool);\n\n quickcheck(pack as fn(Vec<T>) -> bool);\n\n quickcheck(pack as fn(Option<T>) -> bool);\n\n quickcheck(pack as fn(Vec<Option<T>>) -> bool);\n\n quickcheck(pack as fn(Option<Vec<T>>) -> bool);\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 49, "score": 65169.71168698951 }, { "content": "#[test]\n\nfn quickcheck_short_unpack_ui32() {\n\n quickcheck_short_unpack_t::<i32>();\n\n quickcheck_short_unpack_t::<u32>();\n\n quickcheck_short_unpack_t::<usize>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 50, "score": 64377.470710482856 }, { "content": "#[test]\n\nfn quickcheck_short_unpack_iu64() {\n\n quickcheck_short_unpack_t::<i64>();\n\n quickcheck_short_unpack_t::<u64>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 51, "score": 64377.470710482856 }, { "content": "#[test]\n\nfn quickcheck_short_unpack_float() {\n\n quickcheck_short_unpack_t::<f32>();\n\n quickcheck_short_unpack_t::<f64>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 52, "score": 64377.470710482856 }, { "content": "fn quickcheck_short_unpack_t<T>()\n\nwhere\n\n T: PartialEq + Pack<Cursor<Vec<u8>>> + Unpack<Cursor<Vec<u8>>> + Arbitrary + Debug,\n\n{\n\n quickcheck(short_unpack as fn(T) -> bool);\n\n quickcheck(short_unpack as fn(Vec<T>) -> bool);\n\n quickcheck(short_unpack as fn(Option<T>) -> bool);\n\n quickcheck(short_unpack as fn(Vec<Option<T>>) -> bool);\n\n quickcheck(short_unpack as fn(Option<Vec<T>>) -> bool);\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 53, "score": 63102.7145606188 }, { "content": "struct bar {\n\n int a;\n\n int b;\n\n};\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 54, "score": 58154.525370104966 }, { "content": "#[inline]\n\nfn ignore<T>(_: T) -> () {\n\n ()\n\n}\n\n\n", "file_path": "xdrgen/src/spec/xdr_nom.rs", "rank": 55, "score": 56740.165301046465 }, { "content": "pub trait Emit {\n\n fn define(&self, symtab: &Symtab) -> Result<TokenStream>;\n\n}\n\n\n", "file_path": "xdrgen/src/spec/mod.rs", "rank": 56, "score": 54026.91097649271 }, { "content": "struct list { list *next; };\n\n\"#;\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 57, "score": 53066.17386736478 }, { "content": "pub trait Emitpack: Emit {\n\n fn pack(&self, symtab: &Symtab) -> Result<Option<TokenStream>>;\n\n fn unpack(&self, symtab: &Symtab) -> Result<Option<TokenStream>>;\n\n}\n\n\n\nimpl Emit for Const {\n\n fn define(&self, _: &Symtab) -> Result<TokenStream> {\n\n let name = quote_ident(&self.0);\n\n let val = &self.1;\n\n\n\n Ok(quote!(pub const #name: i64 = #val;))\n\n }\n\n}\n\n\n\nimpl Emit for Typesyn {\n\n fn define(&self, symtab: &Symtab) -> Result<TokenStream> {\n\n let ty = &self.1;\n\n let name = quote_ident(&self.0);\n\n let tok = ty.as_token(symtab)?;\n\n Ok(quote!(pub type #name = #tok;))\n", "file_path": "xdrgen/src/spec/mod.rs", "rank": 58, "score": 50662.01875521848 }, { "content": "fn main() {\n\n let _ = env_logger::init();\n\n\n\n let matches = App::new(\"XDR code generator\")\n\n .version(env!(\"CARGO_PKG_VERSION\"))\n\n .arg_from_usage(\"[FILE] 'Set .x file'\")\n\n .get_matches();\n\n\n\n let output = stdout();\n\n let mut err = stderr();\n\n\n\n let res = if let Some(fname) = matches.value_of(\"FILE\") {\n\n let f = match File::open(fname) {\n\n Ok(f) => f,\n\n Err(e) => {\n\n let _ = writeln!(&mut err, \"Failed to open {}: {}\", fname, e);\n\n std::process::exit(1);\n\n }\n\n };\n\n generate(fname, BufReader::new(f), output, &[])\n\n } else {\n\n generate(\"stdin\", BufReader::new(stdin()), output, &[])\n\n };\n\n\n\n if let Err(e) = res {\n\n let _ = writeln!(&mut err, \"Failed: {}\", e);\n\n }\n\n}\n", "file_path": "xdrgen/src/xdrgen.rs", "rank": 59, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn arrays() {\n\n let name = \"arrays\";\n\n let spec = r#\"\n\n struct a { opaque data[15]; };\n\n struct b { int things[10]; };\n\n struct c { string decitweet[14]; };\n\n struct d { c tweetses[10]; };\n\n struct big { c tweetses[100]; };\n\n \"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 60, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn simple() {\n\n let name = \"simple\";\n\n let specs = vec![\n\n \"struct foo { int bar; unsigned int blat; hyper foo; unsigned hyper hyperfoo; };\",\n\n \"const blop = 123;\",\n\n \"typedef opaque Ioaddr<>;\",\n\n ];\n\n\n\n for (i, spec) in specs.into_iter().enumerate() {\n\n let name = format!(\"{}_{}\", name, i);\n\n\n\n if let Err(e) = build_test(&name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 61, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn enums() {\n\n let name = \"enums\";\n\n let spec = r#\"\n\n enum Foo {\n\n A = 0,\n\n B = -1\n\n };\n\n struct Bar { Foo x; };\n\n \"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 62, "score": 42242.62791512338 }, { "content": "fn main() {{}}\n\n\"#,\n\n testfile.as_os_str().to_string_lossy()\n\n );\n\n\n\n {\n\n let mut main = File::create(&mainfile)?;\n\n main.write_all(template.as_bytes())?;\n\n }\n\n\n\n {\n\n let mut cargo = File::create(&cargotoml)?;\n\n cargo.write_all(toml.as_bytes())?;\n\n }\n\n\n\n let _ = create_dir_all(&cargohome);\n\n\n\n {\n\n let test = File::create(&testfile)?;\n\n generate(name, Cursor::new(xdr_spec.as_bytes()), test, &[])?;\n", "file_path": "xdrgen/tests/lib.rs", "rank": 63, "score": 42242.62791512338 }, { "content": "fn main() {\n\n println!(\"cargo:rerun-if-changed=src/simple.x\");\n\n xdrgen::compile(\"src/simple.x\", &[]).unwrap();\n\n}\n", "file_path": "xdrgen/example/build.rs", "rank": 64, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn unions() {\n\n let name = \"unions\";\n\n let spec = r#\"\n\n enum Foo {\n\n A = 0,\n\n B = -1\n\n };\n\n union foo switch (Foo bar) {\n\n case A: int val;\n\n case B: void;\n\n default: int other;\n\n };\n\n union foo2 switch (Foo bar) {\n\n case A: void;\n\n case B: int a;\n\n default: int other;\n\n };\n\n \"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 65, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn rfc4506() {\n\n let name = \"rfc4506\";\n\n let spec = r#\"\n\n\n\n const MAXUSERNAME = 32; /* max length of a user name */\n\n const MAXFILELEN = 65535; /* max length of a file */\n\n const MAXNAMELEN = 255; /* max length of a file name */\n\n\n\n /*\n\n * Types of files:\n\n */\n\n enum filekind {\n\n TEXT = 0, /* ascii data */\n\n DATA = 1, /* raw data */\n\n EXEC = 2 /* executable */\n\n };\n\n\n\n /*\n\n * File information, per kind of file:\n\n */\n", "file_path": "xdrgen/tests/lib.rs", "rank": 66, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn flex() {\n\n let name = \"flex\";\n\n let spec = r#\"\n\n struct a { opaque data<>; opaque limdata<15>; };\n\n struct b { string s<>; string limstr<32>; };\n\n struct c { a athing<>; a alim<10>; };\n\n \"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 67, "score": 42242.62791512338 }, { "content": "#[test]\n\nfn kwishnames() {\n\n let kws = vec![\n\n \"bool\",\n\n \"case\",\n\n \"const\",\n\n \"default\",\n\n \"double\",\n\n \"enum\",\n\n \"float\",\n\n \"hyper\",\n\n \"int\",\n\n \"opaque\",\n\n \"quadruple\",\n\n \"string\",\n\n \"struct\",\n\n \"switch\",\n\n \"typedef\",\n\n \"union\",\n\n \"unsigned\",\n\n \"void\",\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 68, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn basic_32() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(0u32.pack(&mut out).unwrap(), 4);\n\n assert_eq!(1000u32.pack(&mut out).unwrap(), 4);\n\n assert_eq!(823987423u32.pack(&mut out).unwrap(), 4);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 12);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x03, 0xe8,\n\n 0x31, 0x1d, 0x0c, 0xdf, ]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (0u32, 4));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (1000u32, 4));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (823987423u32, 4));\n\n }\n", "file_path": "xdr-codec/src/test.rs", "rank": 69, "score": 40980.56017327675 }, { "content": "#[cfg(feature = \"bytecodec\")]\n\n#[test]\n\nfn basic_8() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(0u8.pack(&mut out).unwrap(), 4);\n\n assert_eq!(100u8.pack(&mut out).unwrap(), 4);\n\n assert_eq!((-1i8).pack(&mut out).unwrap(), 4);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 12);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x64,\n\n 0xff, 0xff, 0xff, 0xff, ]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (0u8, 4));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (100u8, 4));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (-1i8, 4));\n\n }\n", "file_path": "xdr-codec/src/test.rs", "rank": 70, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn kwnames() {\n\n let kws = vec![\n\n \"bool\",\n\n \"case\",\n\n \"const\",\n\n \"default\",\n\n \"double\",\n\n \"enum\",\n\n \"float\",\n\n \"hyper\",\n\n \"int\",\n\n \"opaque\",\n\n \"quadruple\",\n\n \"string\",\n\n \"struct\",\n\n \"switch\",\n\n \"typedef\",\n\n \"union\",\n\n \"unsigned\",\n\n \"void\",\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 71, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn constants() {\n\n let specs = vec![\n\n \"const A = 0;\",\n\n \"const A = 0x0;\",\n\n \"const A = 00;\",\n\n \"const A = -0;\",\n\n \"const A = 0x123;\",\n\n \"const A = 0123;\",\n\n \"const A = -0123;\",\n\n \"const A = 123;\",\n\n \"const A = -123;\",\n\n ];\n\n\n\n for sp in specs {\n\n let s = specification(sp);\n\n println!(\"spec sp \\\"{}\\\" => {:?}\", sp, s);\n\n assert!(s.is_ok());\n\n\n\n let g = generate(\"\", Cursor::new(sp.as_bytes()), Vec::new(), &[]);\n\n assert!(g.is_ok());\n\n }\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 72, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn union_with_default() {\n\n let name = \"union_with_default\";\n\n let spec = r#\"\n\nunion foo switch (int bar) {\n\ncase 1:\n\n int val;\n\ndefault:\n\n void;\n\n};\n\n\"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 73, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn derive_float() {\n\n let name = \"derive_float\";\n\n let spec = r#\"\n\n struct a { float a; double b; };\n\n \"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n", "file_path": "xdrgen/tests/lib.rs", "rank": 74, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn typedef_arrays() {\n\n let name = \"typedef_arrays\";\n\n let spec = r#\"\n\ntypedef opaque buf1<20>;\n\ntypedef opaque buf2[10];\n\ntypedef opaque buf3<>;\n\n\"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 75, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn basic_64() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(0u64.pack(&mut out).unwrap(), 8);\n\n assert_eq!(0x0011223344556677u64.pack(&mut out).unwrap(), 8);\n\n assert_eq!(0xff00ff00ff00ff00u64.pack(&mut out).unwrap(), 8);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 24);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x11, 0x22, 0x33, 0x44, 0x55, 0x66, 0x77,\n\n 0xff, 0x00, 0xff, 0x00, 0xff, 0x00, 0xff, 0x00 ]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (0u64, 8));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (4822678189205111u64, 8));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (18374966859414961920u64, 8));\n\n }\n", "file_path": "xdr-codec/src/test.rs", "rank": 76, "score": 40980.56017327675 }, { "content": "fn main() {\n\n let foo = simple::Foo {\n\n a: 1, b: 2, c: 3,\n\n bar: vec![simple::Bar { data: vec![1,2,3] }],\n\n barish: None,\n\n name: String::from(\"foox\"),\n\n thing: simple::Things::C,\n\n type_: 123,\n\n };\n\n // \"derive_serde\" feature makes this working\n\n // println!(\"Serialized JSON: {}\", serde_json::to_string(&foo).unwrap());\n\n\n\n let mut buf = Vec::new();\n\n\n\n pack(&foo, &mut buf).unwrap();\n\n println!(\"foo={:?}\", foo);\n\n println!(\"buf={:?} len={}\", buf, buf.len());\n\n\n\n let mut cur = Cursor::new(buf);\n\n \n\n let foo2 = unpack(&mut cur).unwrap();\n\n\n\n println!(\"foo={:?}\", foo);\n\n println!(\"foo2={:?}\", foo2);\n\n assert_eq!(foo, foo2);\n\n}\n", "file_path": "xdrgen/example/src/simple.rs", "rank": 77, "score": 40980.56017327675 }, { "content": "#[test]\n\nfn bounded_string() {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(String::from(\"hello, world\").pack(&mut out).unwrap(), 16);\n\n\n\n let v = out.into_inner();\n\n\n\n {\n\n let mut input = Cursor::new(v.clone());\n\n assert_eq!(unpack_string(&mut input, Some(16)).expect(\"unpack_string failed\"),\n\n (String::from(\"hello, world\"), 16));\n\n }\n\n {\n\n let mut input = Cursor::new(v.clone());\n\n match unpack_string(&mut input, Some(5)) {\n\n Result::Err(Error(ErrorKind::InvalidLen(_), _)) => (),\n\n e => panic!(\"Unexpected {:?}\", e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 78, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn union_simple() {\n\n let s = specification(\n\n r#\"\n\nunion foo switch (int x) {\n\ncase 0:\n\n int val;\n\n};\n\n\"#,\n\n );\n\n println!(\"spec {:?}\", s);\n\n assert!(s.is_ok())\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 79, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn basic_flex() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(vec![0x11u32, 0x22, 0x33, 0x44].pack(&mut out).unwrap(), 4*4 + 4);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 4*4 + 4);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x04,\n\n 0x00, 0x00, 0x00, 0x11, 0x00, 0x00, 0x00, 0x22,\n\n 0x00, 0x00, 0x00, 0x33, 0x00, 0x00, 0x00, 0x44]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (vec![0x11u32, 0x22, 0x33, 0x44], 4*4+4));\n\n }\n\n\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 80, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn quickcheck_opaque() {\n\n quickcheck(check_opaque as fn(usize, usize, Vec<u8>) -> bool);\n\n}\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 81, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn basic_string() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(\"foo!\".pack(&mut out).unwrap(), 8);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 8);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x04, 0x66, 0x6f, 0x6f, 0x21]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (String::from(\"foo!\"), 8));\n\n }\n\n\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(\"foo\".pack(&mut out).unwrap(), 8);\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 82, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn largerec() {\n\n let mut buf = Vec::new();\n\n\n\n {\n\n let mut xw = XdrRecordWriter::with_buffer(&mut buf, 3);\n\n\n\n assert_eq!(write!(xw, \"hello\").unwrap(), ());\n\n }\n\n\n\n assert_eq!(buf, vec![0, 0, 0, 3, 104, 101, 108, 128, 0, 0, 2, 108, 111])\n\n}\n\n\n", "file_path": "xdr-codec/tests/test-record.rs", "rank": 83, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn union_default() {\n\n let s = specification(\n\n r#\"\n\nunion foo switch (int x) {\n\ncase 0:\n\n int val;\n\ndefault:\n\n void;\n\n};\n\n\"#,\n\n );\n\n println!(\"spec {:?}\", s);\n\n assert!(s.is_ok())\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 84, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn bounded_flex() {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(vec![0x11u32, 0x22, 0x33, 0x44, 0x55].pack(&mut out).unwrap(), 4*5+4);\n\n\n\n let v = out.into_inner();\n\n\n\n {\n\n let mut input = Cursor::new(v.clone());\n\n assert_eq!(unpack_flex(&mut input, Some(10)).unwrap(), (vec![0x11u32, 0x22, 0x33, 0x44, 0x55], 5*4+4));\n\n }\n\n {\n\n let mut input = Cursor::new(v.clone());\n\n match unpack_flex::<_, Vec<u32>>(&mut input, Some(4)) {\n\n Result::Err(Error(ErrorKind::InvalidLen(_), _)) => (),\n\n e => panic!(\"Unexpected {:?}\", e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 85, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn quickcheck_array() {\n\n quickcheck(\n\n check_array as fn(usize, usize, Vec<u32>, Option<u32>) -> bool,\n\n );\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 86, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn inline_union() {\n\n let spec = r#\"\n\n struct thing {\n\n union switch(int x) { case 0: int a; case 1: int b; } thing;\n\n };\n\n\"#;\n\n let s = specification(spec);\n\n\n\n println!(\"spec {:?}\", s);\n\n assert!(s.is_ok());\n\n\n\n let g = generate(\"\", Cursor::new(spec.as_bytes()), Vec::new(), &[]);\n\n assert!(g.is_err());\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 87, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn smallrec() {\n\n let mut buf = Vec::new();\n\n\n\n {\n\n let mut xw = XdrRecordWriter::new(&mut buf);\n\n\n\n assert_eq!(write!(xw, \"hello\").unwrap(), ());\n\n }\n\n\n\n assert_eq!(buf, vec![128, 0, 0, 5, 104, 101, 108, 108, 111])\n\n}\n\n\n", "file_path": "xdr-codec/tests/test-record.rs", "rank": 88, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn fallthrough_case() {\n\n let s = specification(\n\n r#\"\n\nunion foo switch (int x) {\n\n case 0:\n\n case 1:\n\n int val;\n\n case 2:\n\n void;\n\n};\n\n\"#,\n\n );\n\n println!(\"spec {:?}\", s);\n\n assert!(s.is_ok())\n\n}\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 89, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn union_default_nonempty() {\n\n let name = \"union_default_nonempty\";\n\n let spec = r#\"\n\nunion foo switch (int bar) {\n\ncase 1:\n\n int val;\n\ndefault:\n\n opaque buf<>;\n\n};\n\n\"#;\n\n\n\n if let Err(e) = build_test(name, spec) {\n\n panic!(\"test {} failed: {}\", name, e);\n\n }\n\n}\n\n\n", "file_path": "xdrgen/tests/lib.rs", "rank": 90, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn basic_option() {\n\n let mut out = Cursor::new(Vec::new());\n\n let none: Option<u32> = None;\n\n let some: Option<u32> = Some(0x11223344_u32);\n\n\n\n assert_eq!(none.pack(&mut out).unwrap(), 4);\n\n assert_eq!(some.pack(&mut out).unwrap(), 8);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 12);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x00,\n\n 0x00, 0x00, 0x00, 0x01, 0x11, 0x22, 0x33, 0x44,]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Option::<u32>::unpack(&mut input).unwrap(), (None, 4));\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (Some(0x11223344_u32), 8));\n\n\n\n let bad = vec![0, 0, 0, 2];\n\n let mut input = Cursor::new(bad);\n\n\n\n match Option::<u32>::unpack(&mut input) {\n\n Err(Error(ErrorKind::InvalidEnum(_), _)) => (),\n\n res => panic!(\"bad result {:?}\", res),\n\n }\n\n}\n", "file_path": "xdr-codec/src/test.rs", "rank": 91, "score": 39824.06617069889 }, { "content": "#[test]\n\n#[should_panic(expected = \"must be non-zero\")]\n\nfn zerosz() {\n\n let buf = Vec::new();\n\n let _ = XdrRecordWriter::with_buffer(buf, 0);\n\n}\n\n\n", "file_path": "xdr-codec/tests/test-record.rs", "rank": 92, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn typedef_void() {\n\n let s = specification(\n\n r#\"\n\ntypedef void; /* syntactically defined, semantically meaningless */\n\n\"#,\n\n );\n\n\n\n println!(\"spec {:?}\", s);\n\n assert!(s.is_err())\n\n}\n\n\n", "file_path": "xdrgen/src/spec/test.rs", "rank": 93, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn basic_array() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n let a = [0x11u32, 0x22, 0x33];\n\n\n\n\n\n assert_eq!(pack_array(&a, a.len(), &mut out, Some(&0)).unwrap(), 3*4);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 3*4);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x11,\n\n 0x00, 0x00, 0x00, 0x22,\n\n 0x00, 0x00, 0x00, 0x33]);\n\n\n\n let mut input = Cursor::new(v);\n\n let mut b = [0u32; 3];\n\n let bsz = unpack_array(&mut input, &mut b[..], 3, Some(&0)).expect(\"unpack failed\");\n\n assert_eq!(bsz, 4*3);\n\n assert_eq!(&a[..], &b[..]);\n", "file_path": "xdr-codec/src/test.rs", "rank": 94, "score": 39824.06617069889 }, { "content": "#[test]\n\nfn quickcheck_codec_ui32() {\n\n quickcheck_codec_t::<i32>();\n\n quickcheck_codec_t::<u32>();\n\n quickcheck_codec_t::<usize>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 95, "score": 38760.43064481339 }, { "content": "#[test]\n\nfn quickcheck_codec_iu64() {\n\n quickcheck_codec_t::<i64>();\n\n quickcheck_codec_t::<u64>();\n\n}\n\n\n", "file_path": "xdr-codec/tests/quickcheck.rs", "rank": 96, "score": 38760.43064481339 }, { "content": "#[test]\n\nfn basic_opaque_array() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n let a = [0x11u8, 0x22, 0x33];\n\n\n\n\n\n assert_eq!(pack_opaque_array(&a, a.len(), &mut out).unwrap(), 4);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 4);\n\n assert_eq!(v, vec![0x11, 0x22, 0x33, 0x00]);\n\n\n\n let mut input = Cursor::new(v);\n\n let mut b = [0u8; 3];\n\n let bsz = unpack_opaque_array(&mut input, &mut b[..], 3).expect(\"unpack opaque failed\");\n\n assert_eq!(bsz, 4);\n\n assert_eq!(&a[..], &b[..]);\n\n }\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 97, "score": 38760.43064481339 }, { "content": "#[test]\n\nfn basic_opaque_flex() {\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(Opaque::borrowed(&vec![0x11u8, 0x22, 0x33, 0x44]).pack(&mut out).unwrap(), 8);\n\n\n\n let v = out.into_inner();\n\n\n\n assert_eq!(v.len(), 8);\n\n assert_eq!(v, vec![0x00, 0x00, 0x00, 0x04, 0x11, 0x22, 0x33, 0x44]);\n\n\n\n let mut input = Cursor::new(v);\n\n assert_eq!(Unpack::unpack(&mut input).unwrap(), (Opaque::borrowed(&vec![0x11u8, 0x22, 0x33, 0x44]), 8));\n\n }\n\n\n\n {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(Opaque::borrowed(&vec![0x11u8, 0x22]).pack(&mut out).unwrap(), 8);\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 98, "score": 38760.43064481339 }, { "content": "#[test]\n\nfn bounded_opaque_flex() {\n\n let mut out = Cursor::new(Vec::new());\n\n\n\n assert_eq!(Opaque::borrowed(&vec![0x11u8, 0x22, 0x33, 0x44, 0x55]).pack(&mut out).unwrap(), 12);\n\n\n\n let v = out.into_inner();\n\n\n\n {\n\n let mut input = Cursor::new(v.clone());\n\n assert_eq!(unpack_opaque_flex(&mut input, Some(10)).unwrap(), (vec![0x11u8, 0x22, 0x33, 0x44, 0x55], 12));\n\n }\n\n {\n\n let mut input = Cursor::new(v.clone());\n\n match unpack_opaque_flex(&mut input, Some(4)) {\n\n Result::Err(Error(ErrorKind::InvalidLen(_), _)) => (),\n\n e => panic!(\"Unexpected {:?}\", e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "xdr-codec/src/test.rs", "rank": 99, "score": 38760.43064481339 } ]
Rust
pac/atsam4sd32c/src/spi/ier.rs
haata/atsam4-pac
849dd8fcf3be0074d98b8fc65e4fb03fdfd4b6b1
#[doc = "Writer for register IER"] pub type W = crate::W<u32, super::IER>; #[doc = "Write proxy for field `RDRF`"] pub struct RDRF_W<'a> { w: &'a mut W, } impl<'a> RDRF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `TDRE`"] pub struct TDRE_W<'a> { w: &'a mut W, } impl<'a> TDRE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `MODF`"] pub struct MODF_W<'a> { w: &'a mut W, } impl<'a> MODF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `OVRES`"] pub struct OVRES_W<'a> { w: &'a mut W, } impl<'a> OVRES_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `ENDRX`"] pub struct ENDRX_W<'a> { w: &'a mut W, } impl<'a> ENDRX_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `ENDTX`"] pub struct ENDTX_W<'a> { w: &'a mut W, } impl<'a> ENDTX_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `RXBUFF`"] pub struct RXBUFF_W<'a> { w: &'a mut W, } impl<'a> RXBUFF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Write proxy for field `TXBUFE`"] pub struct TXBUFE_W<'a> { w: &'a mut W, } impl<'a> TXBUFE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Write proxy for field `NSSR`"] pub struct NSSR_W<'a> { w: &'a mut W, } impl<'a> NSSR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Write proxy for field `TXEMPTY`"] pub struct TXEMPTY_W<'a> { w: &'a mut W, } impl<'a> TXEMPTY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `UNDES`"] pub struct UNDES_W<'a> { w: &'a mut W, } impl<'a> UNDES_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } impl W { #[doc = "Bit 0 - Receive Data Register Full Interrupt Enable"] #[inline(always)] pub fn rdrf(&mut self) -> RDRF_W { RDRF_W { w: self } } #[doc = "Bit 1 - SPI Transmit Data Register Empty Interrupt Enable"] #[inline(always)] pub fn tdre(&mut self) -> TDRE_W { TDRE_W { w: self } } #[doc = "Bit 2 - Mode Fault Error Interrupt Enable"] #[inline(always)] pub fn modf(&mut self) -> MODF_W { MODF_W { w: self } } #[doc = "Bit 3 - Overrun Error Interrupt Enable"] #[inline(always)] pub fn ovres(&mut self) -> OVRES_W { OVRES_W { w: self } } #[doc = "Bit 4 - End of Receive Buffer Interrupt Enable"] #[inline(always)] pub fn endrx(&mut self) -> ENDRX_W { ENDRX_W { w: self } } #[doc = "Bit 5 - End of Transmit Buffer Interrupt Enable"] #[inline(always)] pub fn endtx(&mut self) -> ENDTX_W { ENDTX_W { w: self } } #[doc = "Bit 6 - Receive Buffer Full Interrupt Enable"] #[inline(always)] pub fn rxbuff(&mut self) -> RXBUFF_W { RXBUFF_W { w: self } } #[doc = "Bit 7 - Transmit Buffer Empty Interrupt Enable"] #[inline(always)] pub fn txbufe(&mut self) -> TXBUFE_W { TXBUFE_W { w: self } } #[doc = "Bit 8 - NSS Rising Interrupt Enable"] #[inline(always)] pub fn nssr(&mut self) -> NSSR_W { NSSR_W { w: self } } #[doc = "Bit 9 - Transmission Registers Empty Enable"] #[inline(always)] pub fn txempty(&mut self) -> TXEMPTY_W { TXEMPTY_W { w: self } } #[doc = "Bit 10 - Underrun Error Interrupt Enable"] #[inline(always)] pub fn undes(&mut self) -> UNDES_W { UNDES_W { w: self } } }
#[doc = "Writer for register IER"] pub type W = crate::W<u32, super::IER>; #[doc = "Write proxy for field `RDRF`"] pub struct RDRF_W<'a> { w: &'a mut W, } impl<'a> RDRF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01); self.w } } #[doc = "Write proxy for field `TDRE`"] pub struct TDRE_W<'a> { w: &'a mut W, } impl<'a> TDRE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1); self.w } } #[doc = "Write proxy for field `MODF`"] pub struct MODF_W<'a> { w: &'a mut W, } impl<'a> MODF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2); self.w } } #[doc = "Write proxy for field `OVRES`"] pub struct OVRES_W<'a> { w: &'a mut W, } impl<'a> OVRES_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r
#[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10); self.w } } impl W { #[doc = "Bit 0 - Receive Data Register Full Interrupt Enable"] #[inline(always)] pub fn rdrf(&mut self) -> RDRF_W { RDRF_W { w: self } } #[doc = "Bit 1 - SPI Transmit Data Register Empty Interrupt Enable"] #[inline(always)] pub fn tdre(&mut self) -> TDRE_W { TDRE_W { w: self } } #[doc = "Bit 2 - Mode Fault Error Interrupt Enable"] #[inline(always)] pub fn modf(&mut self) -> MODF_W { MODF_W { w: self } } #[doc = "Bit 3 - Overrun Error Interrupt Enable"] #[inline(always)] pub fn ovres(&mut self) -> OVRES_W { OVRES_W { w: self } } #[doc = "Bit 4 - End of Receive Buffer Interrupt Enable"] #[inline(always)] pub fn endrx(&mut self) -> ENDRX_W { ENDRX_W { w: self } } #[doc = "Bit 5 - End of Transmit Buffer Interrupt Enable"] #[inline(always)] pub fn endtx(&mut self) -> ENDTX_W { ENDTX_W { w: self } } #[doc = "Bit 6 - Receive Buffer Full Interrupt Enable"] #[inline(always)] pub fn rxbuff(&mut self) -> RXBUFF_W { RXBUFF_W { w: self } } #[doc = "Bit 7 - Transmit Buffer Empty Interrupt Enable"] #[inline(always)] pub fn txbufe(&mut self) -> TXBUFE_W { TXBUFE_W { w: self } } #[doc = "Bit 8 - NSS Rising Interrupt Enable"] #[inline(always)] pub fn nssr(&mut self) -> NSSR_W { NSSR_W { w: self } } #[doc = "Bit 9 - Transmission Registers Empty Enable"] #[inline(always)] pub fn txempty(&mut self) -> TXEMPTY_W { TXEMPTY_W { w: self } } #[doc = "Bit 10 - Underrun Error Interrupt Enable"] #[inline(always)] pub fn undes(&mut self) -> UNDES_W { UNDES_W { w: self } } }
"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3); self.w } } #[doc = "Write proxy for field `ENDRX`"] pub struct ENDRX_W<'a> { w: &'a mut W, } impl<'a> ENDRX_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4); self.w } } #[doc = "Write proxy for field `ENDTX`"] pub struct ENDTX_W<'a> { w: &'a mut W, } impl<'a> ENDTX_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5); self.w } } #[doc = "Write proxy for field `RXBUFF`"] pub struct RXBUFF_W<'a> { w: &'a mut W, } impl<'a> RXBUFF_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6); self.w } } #[doc = "Write proxy for field `TXBUFE`"] pub struct TXBUFE_W<'a> { w: &'a mut W, } impl<'a> TXBUFE_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7); self.w } } #[doc = "Write proxy for field `NSSR`"] pub struct NSSR_W<'a> { w: &'a mut W, } impl<'a> NSSR_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 8)) | (((value as u32) & 0x01) << 8); self.w } } #[doc = "Write proxy for field `TXEMPTY`"] pub struct TXEMPTY_W<'a> { w: &'a mut W, } impl<'a> TXEMPTY_W<'a> { #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 9)) | (((value as u32) & 0x01) << 9); self.w } } #[doc = "Write proxy for field `UNDES`"] pub struct UNDES_W<'a> { w: &'a mut W, } impl<'a> UNDES_W<'a> {
random
[]
Rust
graphdb/rust/src/generated.rs
jordan-rash/actor-interfaces
e4679488148ff4f1e11a0c7b87a8d9c8b32e24e8
extern crate rmp_serde as rmps; use rmps::{Deserializer, Serializer}; use serde::{Deserialize, Serialize}; use std::io::Cursor; extern crate log; #[cfg(feature = "guest")] extern crate wapc_guest as guest; #[cfg(feature = "guest")] use guest::prelude::*; #[cfg(feature = "guest")] use lazy_static::lazy_static; #[cfg(feature = "guest")] use std::sync::RwLock; #[cfg(feature = "guest")] pub struct Host { binding: String, } #[cfg(feature = "guest")] impl Default for Host { fn default() -> Self { Host { binding: "default".to_string(), } } } #[cfg(feature = "guest")] pub fn host(binding: &str) -> Host { Host { binding: binding.to_string(), } } #[cfg(feature = "guest")] pub fn default() -> Host { Host::default() } #[cfg(feature = "guest")] impl Host { pub(crate) fn query(&self, graphName: String, query: String) -> HandlerResult<QueryResponse> { let input_args = QueryGraphArgs { graph_name: graphName, query: query, }; host_call( &self.binding, "wasmcloud:graphdb", "QueryGraph", &serialize(input_args)?, ) .map(|vec| { let resp = deserialize::<QueryResponse>(vec.as_ref()).unwrap(); resp }) .map_err(|e| e.into()) } pub fn delete_graph(&self, graphName: String) -> HandlerResult<DeleteResponse> { let input_args = DeleteGraphArgs { graph_name: graphName, }; host_call( &self.binding, "wasmcloud:graphdb", "DeleteGraph", &serialize(input_args)?, ) .map(|vec| { let resp = deserialize::<DeleteResponse>(vec.as_ref()).unwrap(); resp }) .map_err(|e| e.into()) } } #[cfg(feature = "guest")] pub struct Handlers {} #[cfg(feature = "guest")] impl Handlers { pub fn register_query_graph(f: fn(String, String) -> HandlerResult<QueryResponse>) { *QUERY_GRAPH.write().unwrap() = Some(f); register_function(&"QueryGraph", query_graph_wrapper); } pub fn register_delete_graph(f: fn(String) -> HandlerResult<DeleteResponse>) { *DELETE_GRAPH.write().unwrap() = Some(f); register_function(&"DeleteGraph", delete_graph_wrapper); } } #[cfg(feature = "guest")] lazy_static! { static ref QUERY_GRAPH: RwLock<Option<fn(String, String) -> HandlerResult<QueryResponse>>> = RwLock::new(None); static ref DELETE_GRAPH: RwLock<Option<fn(String) -> HandlerResult<DeleteResponse>>> = RwLock::new(None); } #[cfg(feature = "guest")] fn query_graph_wrapper(input_payload: &[u8]) -> CallResult { let input = deserialize::<QueryGraphArgs>(input_payload)?; let lock = QUERY_GRAPH.read().unwrap().unwrap(); let result = lock(input.graph_name, input.query)?; Ok(serialize(result)?) } #[cfg(feature = "guest")] fn delete_graph_wrapper(input_payload: &[u8]) -> CallResult { let input = deserialize::<DeleteGraphArgs>(input_payload)?; let lock = DELETE_GRAPH.read().unwrap().unwrap(); let result = lock(input.graph_name)?; Ok(serialize(result)?) } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct QueryGraphArgs { #[serde(rename = "graphName")] pub graph_name: String, #[serde(rename = "query")] pub query: String, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct DeleteGraphArgs { #[serde(rename = "graphName")] pub graph_name: String, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct QueryResponse { #[serde(rename = "resultSet")] pub result_set: ResultSet, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct DeleteResponse { #[serde(rename = "success")] pub success: bool, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct ResultSet { #[serde(rename = "columns")] pub columns: Vec<Column>, #[serde(rename = "statistics")] pub statistics: Vec<String>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Column { #[serde(rename = "scalars")] pub scalars: Option<Vec<Scalar>>, #[serde(rename = "nodes")] pub nodes: Option<Vec<Node>>, #[serde(rename = "relations")] pub relations: Option<Vec<Relation>>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Scalar { #[serde(rename = "boolValue")] pub bool_value: Option<bool>, #[serde(rename = "intValue")] pub int_value: Option<i64>, #[serde(rename = "doubleValue")] pub double_value: Option<f64>, #[serde(rename = "stringValue")] pub string_value: Option<String>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Node { #[serde(rename = "labels")] pub labels: Vec<String>, #[serde(rename = "properties")] pub properties: std::collections::HashMap<String, Scalar>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Relation { #[serde(rename = "typeName")] pub type_name: String, #[serde(rename = "properties")] pub properties: std::collections::HashMap<String, Scalar>, } pub fn serialize<T>( item: T, ) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>> where T: Serialize, { let mut buf = Vec::new(); item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?; Ok(buf) } pub fn deserialize<'de, T: Deserialize<'de>>( buf: &[u8], ) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> { let mut de = Deserializer::new(Cursor::new(buf)); match Deserialize::deserialize(&mut de) { Ok(t) => Ok(t), Err(e) => Err(format!("Failed to de-serialize: {}", e).into()), } }
extern crate rmp_serde as rmps; use rmps::{Deserializer, Serializer}; use serde::{Deserialize, Serialize}; use std::io::Cursor; extern crate log; #[cfg(feature = "guest")] extern crate wapc_guest as guest; #[cfg(feature = "guest")] use guest::prelude::*; #[cfg(feature = "guest")] use lazy_static::lazy_static; #[cfg(feature = "guest")] use std::sync::RwLock; #[cfg(feature = "guest")] pub struct Host { binding: String, } #[cfg(feature = "guest")] impl Default for Host { fn default() -> Self { Host { binding: "default".to_string(), } } } #[cfg(feature = "guest")] pub fn host(binding: &str) -> Host { Host { binding: binding.to_string(), } } #[cfg(feature = "guest")] pub fn default() -> Host { Host::default() } #[cfg(feature = "guest")] impl Host { pub(crate) fn query(&self, graphName: String, query: String) -> HandlerResult<QueryResponse> { let input_args = QueryGraphArgs { graph_name: graphName, query: query, }; host_call( &self.binding, "wasmcloud:graphdb", "QueryGraph", &serialize(input_args)?, ) .map(|vec| { let resp = deserialize::<QueryResponse>(vec.as_ref()).unwrap(); resp })
erialize<'de>>( buf: &[u8], ) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> { let mut de = Deserializer::new(Cursor::new(buf)); match Deserialize::deserialize(&mut de) { Ok(t) => Ok(t), Err(e) => Err(format!("Failed to de-serialize: {}", e).into()), } }
.map_err(|e| e.into()) } pub fn delete_graph(&self, graphName: String) -> HandlerResult<DeleteResponse> { let input_args = DeleteGraphArgs { graph_name: graphName, }; host_call( &self.binding, "wasmcloud:graphdb", "DeleteGraph", &serialize(input_args)?, ) .map(|vec| { let resp = deserialize::<DeleteResponse>(vec.as_ref()).unwrap(); resp }) .map_err(|e| e.into()) } } #[cfg(feature = "guest")] pub struct Handlers {} #[cfg(feature = "guest")] impl Handlers { pub fn register_query_graph(f: fn(String, String) -> HandlerResult<QueryResponse>) { *QUERY_GRAPH.write().unwrap() = Some(f); register_function(&"QueryGraph", query_graph_wrapper); } pub fn register_delete_graph(f: fn(String) -> HandlerResult<DeleteResponse>) { *DELETE_GRAPH.write().unwrap() = Some(f); register_function(&"DeleteGraph", delete_graph_wrapper); } } #[cfg(feature = "guest")] lazy_static! { static ref QUERY_GRAPH: RwLock<Option<fn(String, String) -> HandlerResult<QueryResponse>>> = RwLock::new(None); static ref DELETE_GRAPH: RwLock<Option<fn(String) -> HandlerResult<DeleteResponse>>> = RwLock::new(None); } #[cfg(feature = "guest")] fn query_graph_wrapper(input_payload: &[u8]) -> CallResult { let input = deserialize::<QueryGraphArgs>(input_payload)?; let lock = QUERY_GRAPH.read().unwrap().unwrap(); let result = lock(input.graph_name, input.query)?; Ok(serialize(result)?) } #[cfg(feature = "guest")] fn delete_graph_wrapper(input_payload: &[u8]) -> CallResult { let input = deserialize::<DeleteGraphArgs>(input_payload)?; let lock = DELETE_GRAPH.read().unwrap().unwrap(); let result = lock(input.graph_name)?; Ok(serialize(result)?) } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct QueryGraphArgs { #[serde(rename = "graphName")] pub graph_name: String, #[serde(rename = "query")] pub query: String, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct DeleteGraphArgs { #[serde(rename = "graphName")] pub graph_name: String, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct QueryResponse { #[serde(rename = "resultSet")] pub result_set: ResultSet, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct DeleteResponse { #[serde(rename = "success")] pub success: bool, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct ResultSet { #[serde(rename = "columns")] pub columns: Vec<Column>, #[serde(rename = "statistics")] pub statistics: Vec<String>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Column { #[serde(rename = "scalars")] pub scalars: Option<Vec<Scalar>>, #[serde(rename = "nodes")] pub nodes: Option<Vec<Node>>, #[serde(rename = "relations")] pub relations: Option<Vec<Relation>>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Scalar { #[serde(rename = "boolValue")] pub bool_value: Option<bool>, #[serde(rename = "intValue")] pub int_value: Option<i64>, #[serde(rename = "doubleValue")] pub double_value: Option<f64>, #[serde(rename = "stringValue")] pub string_value: Option<String>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Node { #[serde(rename = "labels")] pub labels: Vec<String>, #[serde(rename = "properties")] pub properties: std::collections::HashMap<String, Scalar>, } #[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)] pub struct Relation { #[serde(rename = "typeName")] pub type_name: String, #[serde(rename = "properties")] pub properties: std::collections::HashMap<String, Scalar>, } pub fn serialize<T>( item: T, ) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>> where T: Serialize, { let mut buf = Vec::new(); item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?; Ok(buf) } pub fn deserialize<'de, T: Des
random
[ { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n set_binding(binding);\n\n Host {}\n\n}\n\n\n\n/// Creates the default host binding\n", "file_path": "logging/rust/src/generated.rs", "rank": 0, "score": 288951.3958429548 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n pub(crate) fn _write_log(\n\n &self,\n\n target: String,\n\n level: String,\n\n text: String,\n\n ) -> HandlerResult<()> {\n\n let input_args = WriteLogArgs {\n\n target,\n\n level,\n\n text,\n\n };\n\n host_call(\n\n &CURRENT_BINDING.read().unwrap(),\n\n \"wasmcloud:logging\",\n", "file_path": "logging/rust/src/generated.rs", "rank": 1, "score": 261279.74525929155 }, { "content": "/// Creates a named host binding for the event streams capability\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n", "file_path": "eventstreams/rust/src/generated.rs", "rank": 2, "score": 253056.52857425815 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n/// Creates the default host binding for the key-value store capability\n", "file_path": "keyvalue/rust/src/generated.rs", "rank": 3, "score": 253053.2678600485 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n/// Creates the default host binding for the extras capability\n", "file_path": "extras/rust/src/generated.rs", "rank": 5, "score": 253053.2678600485 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n/// Creates the default host binding\n", "file_path": "messaging/rust/src/generated.rs", "rank": 6, "score": 253053.2678600485 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n/// Creates the default host binding for the telnet capability\n", "file_path": "telnet/rust/src/generated.rs", "rank": 7, "score": 253053.2678600485 }, { "content": "/// Creates a reference a blob store capability provider with the given link name\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n", "file_path": "blobstore/rust/src/generated.rs", "rank": 8, "score": 253049.5298872005 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n/// Creates the default host binding\n", "file_path": "http-client/rust/src/generated.rs", "rank": 9, "score": 248232.2838182869 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn host(binding: &str) -> Host {\n\n Host {\n\n binding: binding.to_string(),\n\n }\n\n}\n\n\n\n/// Requests the default host abstraction\n", "file_path": "http-server/rust/src/generated.rs", "rank": 10, "score": 248232.2838182869 }, { "content": "/// Creates the default host binding for the event streams capability\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Writes a map of key-value pairs to the given event stream\n\n pub fn write_event(\n\n &self,\n\n stream_id: String,\n\n values: std::collections::HashMap<String, String>,\n\n ) -> HandlerResult<EventAck> {\n\n let input_args = WriteEventArgs {\n\n stream_id: stream_id,\n\n values: values,\n\n };\n\n host_call(\n\n &self.binding,\n\n \"wasmcloud:eventstreams\",\n\n \"WriteEvent\",\n", "file_path": "eventstreams/rust/src/generated.rs", "rank": 11, "score": 223224.3090679471 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Retrieves a value stored in a given key\n\n pub fn get(&self, key: String) -> HandlerResult<GetResponse> {\n\n let input_args = GetArgs { key: key };\n\n host_call(\n\n &self.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Get\",\n\n &serialize(input_args)?,\n\n )\n\n .map(|vec| {\n\n let resp = deserialize::<GetResponse>(vec.as_ref()).unwrap();\n\n resp\n\n })\n\n .map_err(|e| e.into())\n", "file_path": "keyvalue/rust/src/generated.rs", "rank": 12, "score": 223217.51610509315 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Publishes a message on a given subject with an optional reply subject\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `subject` - Message subject\n\n /// * `reply_to` - Subject to receive message replies. Can be left blank for no reply subject\n\n /// * `body` - Message payload\n\n ///\n\n /// # Example\n\n /// ```rust\n\n /// extern crate wasmcloud_actor_messaging as messaging;\n\n /// fn send_message() {\n\n /// let subject = \"first.app\".to_string();\n\n /// let reply_to = \"\".to_string();\n", "file_path": "messaging/rust/src/generated.rs", "rank": 13, "score": 223217.51610509318 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Sends a string of text to a given session. The provider is not responsible for\n\n /// indicating if this is a valid session or not. The telnet provider will not automatically\n\n /// add newlines or carriage returns.\n\n pub fn send_text(&self, session: String, text: String) -> HandlerResult<bool> {\n\n let input_args = SendTextArgs {\n\n session: session,\n\n text: text,\n\n };\n\n host_call(\n\n &self.binding,\n\n \"wasmcloud:telnet\",\n\n OP_SEND_TEXT,\n\n &serialize(input_args)?,\n\n )\n", "file_path": "telnet/rust/src/generated.rs", "rank": 14, "score": 223217.51610509318 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Requests a UUID/GUID from the host. If, for any reason, the host is unable to produce a UUID, this will return `None`\n\n pub fn request_guid(&self) -> HandlerResult<Option<String>> {\n\n self.request_guid_raw(GeneratorRequest {\n\n guid: true,\n\n sequence: false,\n\n random: false,\n\n min: 0,\n\n max: 0,\n\n })\n\n }\n\n\n\n fn request_guid_raw(&self, req: GeneratorRequest) -> HandlerResult<Option<String>> {\n\n host_call(\n\n &self.binding,\n", "file_path": "extras/rust/src/generated.rs", "rank": 15, "score": 223217.51610509318 }, { "content": "/// Creates a reference to the default blob store capability provider\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Creates a new container with the given ID\n\n pub fn create_container(&self, id: String) -> HandlerResult<Container> {\n\n let input_args = CreateContainerArgs { id: id };\n\n host_call(\n\n &self.binding,\n\n \"wasmcloud:blobstore\",\n\n \"CreateContainer\",\n\n &serialize(input_args)?,\n\n )\n\n .map(|vec| {\n\n let resp = deserialize::<Container>(vec.as_ref()).unwrap();\n\n resp\n\n })\n\n .map_err(|e| e.into())\n", "file_path": "blobstore/rust/src/generated.rs", "rank": 17, "score": 223217.31038088942 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Make an HTTP request with specified method, headers and body to url.\n\n /// This request must be carried out by an appropriately bound capability\n\n /// provider implementing `wasmcloud:httpclient`, the request is not\n\n /// made directly by the actor.\n\n pub fn request(\n\n &self,\n\n method: String,\n\n url: String,\n\n headers: std::collections::HashMap<String, String>,\n\n body: Vec<u8>,\n\n ) -> HandlerResult<Response> {\n\n let input_args = RequestArgs {\n\n method,\n\n url,\n", "file_path": "http-client/rust/src/generated.rs", "rank": 18, "score": 218773.47419063392 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn default() -> Host {\n\n Host::default()\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n pub fn handle_request(&self, request: Request) -> HandlerResult<Response> {\n\n host_call(\n\n &self.binding,\n\n \"wasmcloud:httpserver\",\n\n \"HandleRequest\",\n\n &serialize(request)?,\n\n )\n\n .map(|vec| {\n\n let resp = deserialize::<Response>(vec.as_ref()).unwrap();\n\n resp\n\n })\n\n .map_err(|e| e.into())\n\n }\n\n}\n", "file_path": "http-server/rust/src/generated.rs", "rank": 19, "score": 218773.4741906339 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn set_binding(binding: &str) {\n\n *CURRENT_BINDING.write().unwrap() = binding.to_string();\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl log::Log for Host {\n\n fn enabled(&self, _metadata: &Metadata) -> bool {\n\n true\n\n }\n\n\n\n fn log(&self, record: &Record) {\n\n use log::Level::*;\n\n let level = match record.level() {\n\n Error => \"error\",\n\n Warn => \"warn\",\n\n Info => \"info\",\n\n Debug => \"debug\",\n\n Trace => \"trace\",\n\n };\n\n let _ = self._write_log(\n\n record.target().to_string(),\n\n level.to_string(),\n\n format!(\"{}\", record.args()),\n\n );\n\n }\n\n\n\n fn flush(&self) {}\n\n}\n", "file_path": "logging/rust/src/lib.rs", "rank": 20, "score": 194515.05303517115 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "logging/rust/src/generated.rs", "rank": 21, "score": 187624.37872983844 }, { "content": "#[cfg(feature = \"guest\")]\n\npub fn enable_macros() {\n\n if log::set_logger(&LOGGER).is_ok() {};\n\n log::set_max_level(log::LevelFilter::Trace);\n\n}\n\n\n", "file_path": "logging/rust/src/lib.rs", "rank": 22, "score": 156290.10849423026 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "telnet/rust/src/generated.rs", "rank": 24, "score": 149562.14957564007 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "eventstreams/rust/src/generated.rs", "rank": 25, "score": 149562.14957564007 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "blobstore/rust/src/generated.rs", "rank": 26, "score": 149562.14957564007 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "messaging/rust/src/generated.rs", "rank": 27, "score": 149562.14957564007 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "keyvalue/rust/src/generated.rs", "rank": 28, "score": 149562.14957564007 }, { "content": "#[doc(hidden)]\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n\n/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n", "file_path": "extras/rust/src/generated.rs", "rank": 29, "score": 149549.0783823957 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "http-server/rust/src/generated.rs", "rank": 30, "score": 146225.78339520114 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "http-client/rust/src/generated.rs", "rank": 31, "score": 146225.78339520117 }, { "content": "/// The standard function for serializing codec structs into a format that can be\n\n/// used for message exchange between actor and host. Use of any other function to\n\n/// serialize could result in breaking incompatibilities.\n\npub fn serialize<T>(\n\n item: T,\n\n) -> ::std::result::Result<Vec<u8>, Box<dyn std::error::Error + Send + Sync>>\n\nwhere\n\n T: Serialize,\n\n{\n\n let mut buf = Vec::new();\n\n item.serialize(&mut Serializer::new(&mut buf).with_struct_map())?;\n\n Ok(buf)\n\n}\n\n\n", "file_path": "actor-core/rust/wasmcloud-actor-core/src/generated.rs", "rank": 32, "score": 137293.62633482853 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "logging/rust/src/generated.rs", "rank": 33, "score": 128365.29223457383 }, { "content": "type Host struct {\n\n\tbinding string\n", "file_path": "logging/go/logging.go", "rank": 34, "score": 117437.37787563661 }, { "content": "/// Performs an actor-to-actor call, with the target actor identified by a reference string. This\n\n/// reference can be an OCI image URL, a 56-character public key (subject), or, if one is defined,\n\n/// a developer-friendly call alias\n\npub fn call_actor<'de, T: Serialize, U: Deserialize<'de>>(\n\n actor_ref: &str,\n\n operation: &str,\n\n msg: &T,\n\n) -> wapc_guest::HandlerResult<U> {\n\n let res = wapc_guest::host_call(\"default\", actor_ref, operation, &generated::serialize(msg)?)?;\n\n let res = generated::deserialize(&res)?;\n\n Ok(res)\n\n}\n\n\n\nimpl HealthCheckResponse {\n\n pub fn healthy() -> HealthCheckResponse {\n\n HealthCheckResponse {\n\n healthy: true,\n\n message: \"\".to_string(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\npub use wasmcloud_actor_core_derive::init;\n", "file_path": "actor-core/rust/wasmcloud-actor-core/src/lib.rs", "rank": 35, "score": 107243.75051239908 }, { "content": "/// Returns the number of rows in the result set.\n\npub fn num_rows(rs: &ResultSet) -> usize {\n\n if let Some(col) = rs.columns.get(0) {\n\n col.scalars.as_ref().unwrap_or(&vec![]).len()\n\n + col.relations.as_ref().unwrap_or(&vec![]).len()\n\n + col.nodes.as_ref().unwrap_or(&vec![]).len()\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 36, "score": 94502.7130313534 }, { "content": "/// Returns the number of columns in the result set.\n\npub fn num_columns(rs: &ResultSet) -> usize {\n\n rs.columns.len()\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 37, "score": 94502.7130313534 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "eventstreams/rust/src/generated.rs", "rank": 38, "score": 93459.48268658711 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "blobstore/rust/src/generated.rs", "rank": 39, "score": 93459.48268658711 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "messaging/rust/src/generated.rs", "rank": 41, "score": 93459.48268658711 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "telnet/rust/src/generated.rs", "rank": 42, "score": 93459.48268658711 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "keyvalue/rust/src/generated.rs", "rank": 43, "score": 93459.48268658711 }, { "content": "#[doc(hidden)]\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "extras/rust/src/generated.rs", "rank": 44, "score": 93448.08737996436 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "http-client/rust/src/generated.rs", "rank": 45, "score": 91588.868284144 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "http-server/rust/src/generated.rs", "rank": 46, "score": 91588.868284144 }, { "content": "/// The standard function for de-serializing codec structs from a format suitable\n\n/// for message exchange between actor and host. Use of any other function to\n\n/// deserialize could result in breaking incompatibilities.\n\npub fn deserialize<'de, T: Deserialize<'de>>(\n\n buf: &[u8],\n\n) -> ::std::result::Result<T, Box<dyn std::error::Error + Send + Sync>> {\n\n let mut de = Deserializer::new(Cursor::new(buf));\n\n match Deserialize::deserialize(&mut de) {\n\n Ok(t) => Ok(t),\n\n Err(e) => Err(format!(\"Failed to de-serialize: {}\", e).into()),\n\n }\n\n}\n", "file_path": "actor-core/rust/wasmcloud-actor-core/src/generated.rs", "rank": 47, "score": 86535.75077529502 }, { "content": "func (h *Host) WriteLog(target string, level string, text string) error {\n\n\tinputArgs := WriteLogArgs{\n\n\t\tTarget: target,\n\n\t\tLevel: level,\n\n\t\tText: text,\n\n\t}\n\n\tinputBytes, err := msgpack.ToBytes(&inputArgs)\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\t_, err = wapc.HostCall(\n\n\t\th.binding,\n\n\t\t\"wasmcloud:logging\",\n\n\t\t\"WriteLog\",\n\n\t\tinputBytes,\n\n\t)\n\n\treturn err\n", "file_path": "logging/go/logging.go", "rank": 48, "score": 84709.84450527938 }, { "content": "\tbinding string\n", "file_path": "logging/go/logging.go", "rank": 49, "score": 82981.9831438699 }, { "content": "func NewHost(binding string) *Host {\n\n\treturn &Host{\n\n\t\tbinding: binding,\n\n\t}\n", "file_path": "logging/go/logging.go", "rank": 51, "score": 80813.94202715193 }, { "content": "#[allow(clippy::needless_doctest_main)]\n\n#[proc_macro_attribute]\n\npub fn init(_: TokenStream, item: TokenStream) -> TokenStream {\n\n let mut input = syn::parse_macro_input!(item as syn::ItemFn);\n\n let attrs = &input.attrs;\n\n\n\n let sig = &mut input.sig;\n\n let body = &input.block;\n\n\n\n if sig.asyncness.is_some() {\n\n return syn::Error::new_spanned(\n\n sig.fn_token,\n\n \"the async keyword cannot be used within actors\",\n\n )\n\n .to_compile_error()\n\n .into();\n\n }\n\n\n\n sig.asyncness = None;\n\n\n\n (quote! {\n\n #[doc(hidden)]\n", "file_path": "actor-core/rust/wasmcloud-actor-core-derive/src/lib.rs", "rank": 52, "score": 78504.36335967894 }, { "content": "/// Returns the relation at the given position.\n\n///\n\n/// Returns an error if the value at the given position is not a relation\n\n/// or if the position is out of bounds.\n\npub fn get_relation(rs: &ResultSet, row_idx: usize, column_idx: usize) -> GraphResult<Relation> {\n\n match rs.columns.get(column_idx) {\n\n Some(column) => match column.relations.as_ref().unwrap_or(&vec![]).get(row_idx) {\n\n Some(relation) => Ok(relation.clone()),\n\n None => client_type_error!(\n\n \"failed to get relation: row index out of bounds: the len is {:?} but the index is {:?}\", column.relations.as_ref().unwrap_or(&vec![]).len(), row_idx,\n\n ),\n\n },\n\n None => client_type_error!(\n\n \"failed to get relation: column index out of bounds: the len is {:?} but the index is {:?}\", rs.columns.len(), column_idx,\n\n ),\n\n }\n\n}\n\n\n\nimpl FromTable for ResultSet {\n\n fn from_table(result_set: &ResultSet) -> GraphResult<Self> {\n\n Ok(result_set.clone())\n\n }\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 53, "score": 72334.40351986396 }, { "content": "/// Returns the scalar at the given position.\n\n///\n\n/// Returns an error if the value at the given position is not a scalar\n\n/// or if the position is out of bounds.\n\npub fn get_scalar(rs: &ResultSet, row_idx: usize, column_idx: usize) -> GraphResult<Scalar> {\n\n match rs.columns.get(column_idx) {\n\n Some(column) => match column.scalars.as_ref().unwrap_or(&vec![]).get(row_idx) {\n\n Some(scalar) => Ok(scalar.clone()),\n\n None => client_type_error!(\n\n \"failed to get scalar: row index out of bounds: the len is {:?} but the index is {:?}\", column.scalars.as_ref().unwrap_or(&vec![]).len(), row_idx,\n\n ),\n\n },\n\n None => client_type_error!(\n\n \"failed to get scalar: column index out of bounds: the len is {:?} but the index is {:?}\", rs.columns.len(), column_idx,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 54, "score": 72334.40351986396 }, { "content": "/// Returns the node at the given position.\n\n///\n\n/// Returns an error if the value at the given position is not a node\n\n/// or if the position is out of bounds.\n\npub fn get_node(rs: &ResultSet, row_idx: usize, column_idx: usize) -> GraphResult<Node> {\n\n match rs.columns.get(column_idx) {\n\n Some(column) => match column.nodes.as_ref().unwrap_or(&vec![]).get(row_idx) {\n\n Some(node) => Ok(node.clone()),\n\n None => client_type_error!(\n\n \"failed to get node: row index out of bounds: the len is {:?} but the index is {:?}\", column.nodes.as_ref().unwrap_or(&vec![]).len(), row_idx,\n\n ),\n\n },\n\n None => client_type_error!(\n\n \"failed to get node: column index out of bounds: the len is {:?} but the index is {:?}\", rs.columns.len(), column_idx,\n\n ),\n\n }\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 55, "score": 72334.40351986396 }, { "content": "type Host struct {\n\n\tbinding string\n", "file_path": "blobstore/go/blobstore.go", "rank": 56, "score": 70471.26984246838 }, { "content": "export class Host {\n\n binding: string;\n\n\n\n constructor(binding: string = \"default\") {\n\n this.binding = binding;\n\n }\n\n\n\n Get(key: string): GetResponse {\n\n const inputArgs = new GetArgs();\n\n inputArgs.key = key;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Get\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return GetResponse.decode(decoder);\n\n }\n\n\n\n Add(key: string, value: i32): AddResponse {\n\n const inputArgs = new AddArgs();\n\n inputArgs.key = key;\n\n inputArgs.value = value;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Add\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return AddResponse.decode(decoder);\n\n }\n\n\n\n Set(key: string, value: string, expires: i32): SetResponse {\n\n const inputArgs = new SetArgs();\n\n inputArgs.key = key;\n\n inputArgs.value = value;\n\n inputArgs.expires = expires;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Set\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetResponse.decode(decoder);\n\n }\n\n\n\n Del(key: string): DelResponse {\n\n const inputArgs = new DelArgs();\n\n inputArgs.key = key;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Del\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return DelResponse.decode(decoder);\n\n }\n\n\n\n Clear(key: string): DelResponse {\n\n const inputArgs = new ClearArgs();\n\n inputArgs.key = key;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Clear\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return DelResponse.decode(decoder);\n\n }\n\n\n\n Range(key: string, start: i32, stop: i32): ListRangeResponse {\n\n const inputArgs = new RangeArgs();\n\n inputArgs.key = key;\n\n inputArgs.start = start;\n\n inputArgs.stop = stop;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Range\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return ListRangeResponse.decode(decoder);\n\n }\n\n\n\n Push(key: string, value: string): ListResponse {\n\n const inputArgs = new PushArgs();\n\n inputArgs.key = key;\n\n inputArgs.value = value;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"Push\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return ListResponse.decode(decoder);\n\n }\n\n\n\n ListItemDelete(key: string, value: string): ListResponse {\n\n const inputArgs = new ListItemDeleteArgs();\n\n inputArgs.key = key;\n\n inputArgs.value = value;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"ListItemDelete\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return ListResponse.decode(decoder);\n\n }\n\n\n\n SetAdd(key: string, value: string): SetOperationResponse {\n\n const inputArgs = new SetAddArgs();\n\n inputArgs.key = key;\n\n inputArgs.value = value;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"SetAdd\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetOperationResponse.decode(decoder);\n\n }\n\n\n\n SetRemove(key: string, value: string): SetOperationResponse {\n\n const inputArgs = new SetRemoveArgs();\n\n inputArgs.key = key;\n\n inputArgs.value = value;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"SetRemove\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetOperationResponse.decode(decoder);\n\n }\n\n\n\n SetUnion(keys: Array<string>): SetQueryResponse {\n\n const inputArgs = new SetUnionArgs();\n\n inputArgs.keys = keys;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"SetUnion\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetQueryResponse.decode(decoder);\n\n }\n\n\n\n SetIntersection(keys: Array<string>): SetQueryResponse {\n\n const inputArgs = new SetIntersectionArgs();\n\n inputArgs.keys = keys;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"SetIntersection\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetQueryResponse.decode(decoder);\n\n }\n\n\n\n SetQuery(key: string): SetQueryResponse {\n\n const inputArgs = new SetQueryArgs();\n\n inputArgs.key = key;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"SetQuery\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetQueryResponse.decode(decoder);\n\n }\n\n\n\n KeyExists(key: string): GetResponse {\n\n const inputArgs = new KeyExistsArgs();\n\n inputArgs.key = key;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"KeyExists\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return GetResponse.decode(decoder);\n\n }\n", "file_path": "keyvalue/assemblyscript/assembly/module.ts", "rank": 57, "score": 69484.65150046525 }, { "content": "export class Host {\n\n binding: string;\n\n\n\n constructor(binding: string) {\n\n this.binding = binding;\n\n }\n\n\n\n CreateContainer(id: string): Container {\n\n const inputArgs = new CreateContainerArgs();\n\n inputArgs.id = id;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"CreateContainer\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return Container.decode(decoder);\n\n }\n\n\n\n RemoveContainer(id: string): BlobstoreResult {\n\n const inputArgs = new RemoveContainerArgs();\n\n inputArgs.id = id;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"RemoveContainer\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return BlobstoreResult.decode(decoder);\n\n }\n\n\n\n RemoveObject(id: string, container_id: string): BlobstoreResult {\n\n const inputArgs = new RemoveObjectArgs();\n\n inputArgs.id = id;\n\n inputArgs.container_id = container_id;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"RemoveObject\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return BlobstoreResult.decode(decoder);\n\n }\n\n\n\n ListObjects(container_id: string): BlobList {\n\n const inputArgs = new ListObjectsArgs();\n\n inputArgs.container_id = container_id;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"ListObjects\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return BlobList.decode(decoder);\n\n }\n\n\n\n UploadChunk(chunk: FileChunk): void {\n\n hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"UploadChunk\",\n\n chunk.toBuffer()\n\n );\n\n }\n\n\n\n StartDownload(\n\n blob_id: string,\n\n container_id: string,\n\n chunk_size: u64,\n\n context: Value<string> | null\n\n ): BlobstoreResult {\n\n const inputArgs = new StartDownloadArgs();\n\n inputArgs.blob_id = blob_id;\n\n inputArgs.container_id = container_id;\n\n inputArgs.chunk_size = chunk_size;\n\n inputArgs.context = context;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"StartDownload\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return BlobstoreResult.decode(decoder);\n\n }\n\n\n\n StartUpload(blob: FileChunk): BlobstoreResult {\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"StartUpload\",\n\n blob.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return BlobstoreResult.decode(decoder);\n\n }\n\n\n\n GetObjectInfo(blob_id: string, container_id: string): Blob {\n\n const inputArgs = new GetObjectInfoArgs();\n\n inputArgs.blob_id = blob_id;\n\n inputArgs.container_id = container_id;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"GetObjectInfo\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return Blob.decode(decoder);\n\n }\n\n\n\n ReceiveChunk(chunk: FileChunk): void {\n\n hostCall(\n\n this.binding,\n\n \"wasmcloud:blobstore\",\n\n \"ReceiveChunk\",\n\n chunk.toBuffer()\n\n );\n\n }\n", "file_path": "blobstore/assemblyscript/assembly/module.ts", "rank": 58, "score": 69484.65150046525 }, { "content": "export class Host {\n\n binding: string;\n\n\n\n constructor(binding: string = \"default\") {\n\n this.binding = binding;\n\n }\n\n\n\n HandleRequest(request: Request): Response {\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:httpserver\",\n\n \"HandleRequest\",\n\n request.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return Response.decode(decoder);\n\n }\n", "file_path": "http-server/assemblyscript/assembly/module.ts", "rank": 59, "score": 68551.1113759448 }, { "content": "export class Host {\n\n binding: string;\n\n\n\n constructor(binding: string = \"default\") {\n\n this.binding = binding;\n\n }\n\n\n\n HealthRequest(request: HealthCheckRequest): HealthCheckResponse {\n\n const payload = hostCall(\n\n this.binding,\n\n \"core\",\n\n \"HealthRequest\",\n\n request.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return HealthCheckResponse.decode(decoder);\n\n }\n", "file_path": "actor-core/assemblyscript/assembly/module.ts", "rank": 60, "score": 68551.1113759448 }, { "content": "\tQueryString string\n", "file_path": "http-server/go/httpserver.go", "rank": 61, "score": 68162.21009265039 }, { "content": " SetQuery(key: string): SetQueryResponse {\n\n const inputArgs = new SetQueryArgs();\n\n inputArgs.key = key;\n\n const payload = hostCall(\n\n this.binding,\n\n \"wasmcloud:keyvalue\",\n\n \"SetQuery\",\n\n inputArgs.toBuffer()\n\n );\n\n const decoder = new Decoder(payload);\n\n return SetQueryResponse.decode(decoder);\n", "file_path": "keyvalue/assemblyscript/assembly/module.ts", "rank": 62, "score": 66018.78564763571 }, { "content": " withQueryString(queryString: string): RequestBuilder {\n\n this.instance.queryString = queryString;\n\n return this;\n", "file_path": "http-server/assemblyscript/assembly/module.ts", "rank": 63, "score": 63072.44140565674 }, { "content": "/// Implemented by types that can be constructed from a cell in a [`ResultSet`](../result_set/struct.ResultSet.html).\n\npub trait FromCell: Sized {\n\n fn from_cell(result_set: &ResultSet, row_idx: usize, column_idx: usize) -> GraphResult<Self>;\n\n}\n\n\n\n// Macro generates generic \"From\" implementations to allow\n\n// tuples/vecs-of-tuples to be extracted from various types\n\n//\n\n// Altered version of https://github.com/mitsuhiko/redis-rs/blob/master/src/types.rs#L1080\n\nmacro_rules! impl_row_for_tuple {\n\n () => ();\n\n ($($name:ident,)+) => (\n\n #[doc(hidden)]\n\n impl<$($name: FromCell),*> FromRow for ($($name,)*) {\n\n // we have local variables named T1 as dummies and those\n\n // variables are unused.\n\n #[allow(non_snake_case, unused_variables, clippy::eval_order_dependence)]\n\n fn from_row(result_set: &ResultSet, row_idx: usize) -> GraphResult<($($name,)*)> {\n\n // hacky way to count the tuple size\n\n let mut n = 0;\n\n $(let $name = (); n += 1;)*\n", "file_path": "graphdb/rust/src/results.rs", "rank": 64, "score": 55712.06887122481 }, { "content": "/// Implemented by types that can be constructed from a row in a [`ResultSet`](../result_set/struct.ResultSet.html).\n\npub trait FromRow: Sized {\n\n fn from_row(result_set: &ResultSet, row_idx: usize) -> GraphResult<Self>;\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 65, "score": 55712.06887122481 }, { "content": "pub trait FromTable: Sized {\n\n fn from_table(result_set: &ResultSet) -> GraphResult<Self>;\n\n}\n\n\n", "file_path": "graphdb/rust/src/results.rs", "rank": 66, "score": 55708.82243694388 }, { "content": "package logging\n\n\n\nimport (\n\n\tmsgpack \"github.com/wapc/tinygo-msgpack\"\n\n\twapc \"github.com/wapc/wapc-guest-tinygo\"\n\n)\n\n\n\ntype Host struct {\n\n\tbinding string\n\n}\n\n\n\nfunc NewHost(binding string) *Host {\n\n\treturn &Host{\n\n\t\tbinding: binding,\n\n\t}\n\n}\n\n\n\nfunc (h *Host) WriteLog(target string, level string, text string) error {\n\n\tinputArgs := WriteLogArgs{\n\n\t\tTarget: target,\n\n\t\tLevel: level,\n\n\t\tText: text,\n\n\t}\n\n\tinputBytes, err := msgpack.ToBytes(&inputArgs)\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\t_, err = wapc.HostCall(\n\n\t\th.binding,\n\n\t\t\"wasmcloud:logging\",\n\n\t\t\"WriteLog\",\n\n\t\tinputBytes,\n\n\t)\n\n\treturn err\n\n}\n\n\n\ntype Handlers struct {\n\n\tWriteLog func(target string, level string, text string) error\n\n}\n\n\n\nfunc (h Handlers) Register() {\n\n\tif h.WriteLog != nil {\n\n\t\tWriteLogHandler = h.WriteLog\n\n\t\twapc.RegisterFunction(\"WriteLog\", WriteLogWrapper)\n\n\t}\n\n}\n\n\n\nvar (\n\n\tWriteLogHandler func(target string, level string, text string) error\n\n)\n\n\n\nfunc WriteLogWrapper(payload []byte) ([]byte, error) {\n\n\tdecoder := msgpack.NewDecoder(payload)\n\n\tvar inputArgs WriteLogArgs\n\n\tinputArgs.Decode(&decoder)\n\n\terr := WriteLogHandler(inputArgs.Target, inputArgs.Level, inputArgs.Text)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn []byte{}, nil\n\n}\n\n\n\ntype WriteLogArgs struct {\n\n\tTarget string\n\n\tLevel string\n\n\tText string\n\n}\n\n\n\nfunc DecodeWriteLogArgsNullable(decoder *msgpack.Decoder) (*WriteLogArgs, error) {\n\n\tif isNil, err := decoder.IsNextNil(); isNil || err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tdecoded, err := DecodeWriteLogArgs(decoder)\n\n\treturn &decoded, err\n\n}\n\n\n\nfunc DecodeWriteLogArgs(decoder *msgpack.Decoder) (WriteLogArgs, error) {\n\n\tvar o WriteLogArgs\n\n\terr := o.Decode(decoder)\n\n\treturn o, err\n\n}\n\n\n\nfunc (o *WriteLogArgs) Decode(decoder *msgpack.Decoder) error {\n\n\tnumFields, err := decoder.ReadMapSize()\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\tfor numFields > 0 {\n\n\t\tnumFields--\n\n\t\tfield, err := decoder.ReadString()\n\n\t\tif err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t\tswitch field {\n\n\t\tcase \"target\":\n\n\t\t\to.Target, err = decoder.ReadString()\n\n\t\tcase \"level\":\n\n\t\t\to.Level, err = decoder.ReadString()\n\n\t\tcase \"text\":\n\n\t\t\to.Text, err = decoder.ReadString()\n\n\t\tdefault:\n\n\t\t\terr = decoder.Skip()\n\n\t\t}\n\n\t\tif err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t}\n\n\n\n\treturn nil\n\n}\n\n\n\nfunc (o *WriteLogArgs) Encode(encoder msgpack.Writer) error {\n\n\tif o == nil {\n\n\t\tencoder.WriteNil()\n\n\t\treturn nil\n\n\t}\n\n\tencoder.WriteMapSize(3)\n\n\tencoder.WriteString(\"target\")\n\n\tencoder.WriteString(o.Target)\n\n\tencoder.WriteString(\"level\")\n\n\tencoder.WriteString(o.Level)\n\n\tencoder.WriteString(\"text\")\n\n\tencoder.WriteString(o.Text)\n\n\n\n\treturn nil\n\n}\n", "file_path": "logging/go/logging.go", "rank": 67, "score": 52345.79645969623 }, { "content": "\tWriteLog func(target string, level string, text string) error\n", "file_path": "logging/go/logging.go", "rank": 68, "score": 51632.34577812825 }, { "content": "type WriteLogArgs struct {\n\n\tTarget string\n\n\tLevel string\n\n\tText string\n", "file_path": "logging/go/logging.go", "rank": 69, "score": 50938.08164376759 }, { "content": "\tWriteLogHandler func(target string, level string, text string) error\n", "file_path": "logging/go/logging.go", "rank": 70, "score": 50938.08164376759 }, { "content": "func WriteLogWrapper(payload []byte) ([]byte, error) {\n\n\tdecoder := msgpack.NewDecoder(payload)\n\n\tvar inputArgs WriteLogArgs\n\n\tinputArgs.Decode(&decoder)\n\n\terr := WriteLogHandler(inputArgs.Target, inputArgs.Level, inputArgs.Text)\n\n\tif err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\treturn []byte{}, nil\n", "file_path": "logging/go/logging.go", "rank": 71, "score": 50938.08164376759 }, { "content": "func DecodeWriteLogArgs(decoder *msgpack.Decoder) (WriteLogArgs, error) {\n\n\tvar o WriteLogArgs\n\n\terr := o.Decode(decoder)\n\n\treturn o, err\n", "file_path": "logging/go/logging.go", "rank": 72, "score": 50262.240361147764 }, { "content": "func (o *WriteLogArgs) Decode(decoder *msgpack.Decoder) error {\n\n\tnumFields, err := decoder.ReadMapSize()\n\n\tif err != nil {\n\n\t\treturn err\n\n\t}\n\n\n\n\tfor numFields > 0 {\n\n\t\tnumFields--\n\n\t\tfield, err := decoder.ReadString()\n\n\t\tif err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t\tswitch field {\n\n\t\tcase \"target\":\n\n\t\t\to.Target, err = decoder.ReadString()\n\n\t\tcase \"level\":\n\n\t\t\to.Level, err = decoder.ReadString()\n\n\t\tcase \"text\":\n\n\t\t\to.Text, err = decoder.ReadString()\n\n\t\tdefault:\n\n\t\t\terr = decoder.Skip()\n\n\t\t}\n\n\t\tif err != nil {\n\n\t\t\treturn err\n\n\t\t}\n\n\t}\n\n\n\n\treturn nil\n", "file_path": "logging/go/logging.go", "rank": 73, "score": 50262.240361147764 }, { "content": "func (o *WriteLogArgs) Encode(encoder msgpack.Writer) error {\n\n\tif o == nil {\n\n\t\tencoder.WriteNil()\n\n\t\treturn nil\n\n\t}\n\n\tencoder.WriteMapSize(3)\n\n\tencoder.WriteString(\"target\")\n\n\tencoder.WriteString(o.Target)\n\n\tencoder.WriteString(\"level\")\n\n\tencoder.WriteString(o.Level)\n\n\tencoder.WriteString(\"text\")\n\n\tencoder.WriteString(o.Text)\n\n\n\n\treturn nil\n", "file_path": "logging/go/logging.go", "rank": 74, "score": 50262.240361147764 }, { "content": "func DecodeWriteLogArgsNullable(decoder *msgpack.Decoder) (*WriteLogArgs, error) {\n\n\tif isNil, err := decoder.IsNextNil(); isNil || err != nil {\n\n\t\treturn nil, err\n\n\t}\n\n\tdecoded, err := DecodeWriteLogArgs(decoder)\n\n\treturn &decoded, err\n", "file_path": "logging/go/logging.go", "rank": 75, "score": 49604.09823462366 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn receive_chunk_wrapper(input_payload: &[u8]) -> CallResult {\n\n let input = deserialize::<FileChunk>(input_payload)?;\n\n let lock = RECEIVE_CHUNK.read().unwrap().unwrap();\n\n let result = lock(input)?;\n\n Ok(serialize(result)?)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct CreateContainerArgs {\n\n #[serde(rename = \"id\")]\n\n pub id: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct RemoveContainerArgs {\n\n #[serde(rename = \"id\")]\n\n pub id: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n", "file_path": "blobstore/rust/src/generated.rs", "rank": 76, "score": 48212.49085360368 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn receive_text_wrapper(input_payload: &[u8]) -> CallResult {\n\n let input = deserialize::<ReceiveTextArgs>(input_payload)?;\n\n let lock = RECEIVE_TEXT.read().unwrap().unwrap();\n\n let result = lock(input.session, input.text)?;\n\n Ok(serialize(result)?)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct SessionStartedArgs {\n\n #[serde(rename = \"session\")]\n\n pub session: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct ReceiveTextArgs {\n\n #[serde(rename = \"session\")]\n\n pub session: String,\n\n #[serde(rename = \"text\")]\n\n pub text: String,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct SendTextArgs {\n\n #[serde(rename = \"session\")]\n\n pub session: String,\n\n #[serde(rename = \"text\")]\n\n pub text: String,\n\n}\n\n\n", "file_path": "telnet/rust/src/generated.rs", "rank": 77, "score": 48212.49085360368 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn handle_message_wrapper(input_payload: &[u8]) -> CallResult {\n\n let input = deserialize::<BrokerMessage>(input_payload)?;\n\n let lock = HANDLE_MESSAGE.read().unwrap().unwrap();\n\n let result = lock(input)?;\n\n Ok(serialize(result)?)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct PublishArgs {\n\n #[serde(rename = \"subject\")]\n\n pub subject: String,\n\n #[serde(rename = \"replyTo\")]\n\n pub reply_to: String,\n\n #[serde(with = \"serde_bytes\")]\n\n #[serde(rename = \"body\")]\n\n pub body: Vec<u8>,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct RequestArgs {\n", "file_path": "messaging/rust/src/generated.rs", "rank": 79, "score": 48212.49085360368 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn session_started_wrapper(input_payload: &[u8]) -> CallResult {\n\n let input = deserialize::<SessionStartedArgs>(input_payload)?;\n\n let lock = SESSION_STARTED.read().unwrap().unwrap();\n\n let result = lock(input.session)?;\n\n Ok(serialize(result)?)\n\n}\n\n\n", "file_path": "telnet/rust/src/generated.rs", "rank": 80, "score": 48212.49085360368 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn handle_request_wrapper(input_payload: &[u8]) -> CallResult {\n\n let input = deserialize::<Request>(input_payload)?;\n\n let lock = HANDLE_REQUEST.read().unwrap().unwrap();\n\n let result = lock(input)?;\n\n Ok(serialize(result)?)\n\n}\n\n\n\n/// Represents an HTTP request received by the capability provider and delivered to the actor\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct Request {\n\n #[serde(rename = \"method\")]\n\n pub method: String,\n\n #[serde(rename = \"path\")]\n\n pub path: String,\n\n #[serde(rename = \"queryString\")]\n\n pub query_string: String,\n\n #[serde(rename = \"header\")]\n\n pub header: std::collections::HashMap<String, String>,\n\n #[serde(with = \"serde_bytes\")]\n\n #[serde(rename = \"body\")]\n", "file_path": "http-server/rust/src/generated.rs", "rank": 81, "score": 47371.86165605279 }, { "content": "\tText string\n", "file_path": "logging/go/logging.go", "rank": 82, "score": 46966.10803316823 }, { "content": "\tTarget string\n", "file_path": "logging/go/logging.go", "rank": 83, "score": 46966.10803316823 }, { "content": "type Handlers struct {\n\n\tWriteLog func(target string, level string, text string) error\n", "file_path": "logging/go/logging.go", "rank": 84, "score": 46966.10803316823 }, { "content": "\tLevel string\n", "file_path": "logging/go/logging.go", "rank": 85, "score": 46966.10803316823 }, { "content": "func (h Handlers) Register() {\n\n\tif h.WriteLog != nil {\n\n\t\tWriteLogHandler = h.WriteLog\n\n\t\twapc.RegisterFunction(\"WriteLog\", WriteLogWrapper)\n\n\t}\n", "file_path": "logging/go/logging.go", "rank": 86, "score": 46108.6390411197 }, { "content": "#[cfg(feature = \"guest\")]\n\nfn health_request_wrapper(input_payload: &[u8]) -> CallResult {\n\n let input = deserialize::<HealthCheckRequest>(input_payload)?;\n\n let lock = HEALTH_REQUEST.read().unwrap().unwrap();\n\n let result = lock(input)?;\n\n Ok(serialize(result)?)\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct CapabilityConfiguration {\n\n #[serde(rename = \"module\")]\n\n pub module: String,\n\n #[serde(rename = \"values\")]\n\n pub values: std::collections::HashMap<String, String>,\n\n}\n\n\n\n/// A request sent to the actor by the host itself in order to determine\n\n/// health status\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct HealthCheckRequest {\n\n #[serde(rename = \"placeholder\")]\n", "file_path": "actor-core/rust/wasmcloud-actor-core/src/generated.rs", "rank": 87, "score": 45089.15147345222 }, { "content": "extern crate rmp_serde as rmps;\n\nuse rmps::{Deserializer, Serializer};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::io::Cursor;\n\n\n\n#[cfg(feature = \"guest\")]\n\nextern crate wapc_guest as guest;\n\n#[cfg(feature = \"guest\")]\n\nuse crate::{set_binding, CURRENT_BINDING};\n\n#[cfg(feature = \"guest\")]\n\nuse guest::prelude::*;\n\n\n\n#[cfg(feature = \"guest\")]\n\npub struct Host {}\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Default for Host {\n\n fn default() -> Self {\n\n set_binding(\"default\");\n\n Host {}\n\n }\n\n}\n\n\n\n/// Creates a named host binding\n\n#[cfg(feature = \"guest\")]\n", "file_path": "logging/rust/src/generated.rs", "rank": 88, "score": 41877.094295341056 }, { "content": "mod generated;\n\n#[allow(unused_imports)]\n\npub use generated::*;\n\n\n\n// The operation used to request writing a log\n\npub const OP_LOG: &str = \"WriteLog\";\n\n\n\n#[cfg(feature = \"guest\")]\n\n#[doc(hidden)]\n\nstatic LOG_LEVELS: [&str; 5] = [\"error\", \"warn\", \"info\", \"debug\", \"trace\"];\n\n\n\n#[cfg(feature = \"guest\")]\n\nimpl Host {\n\n /// Writes a log message to specified target and level\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `target` - Used to filter logs to a specific target, e.g. actor name. Can be left blank\n\n /// * `level` - Log level, accepts `error`, `warn`, `info`, `debug`, `trace`. Defaults to `info`\n\n /// * `text` - Text to log\n", "file_path": "logging/rust/src/lib.rs", "rank": 89, "score": 41865.227211237885 }, { "content": " ///\n\n pub fn write_log(&self, target: &str, level: &str, text: &str) -> HandlerResult<()> {\n\n let log_level = if LOG_LEVELS.contains(&level.to_ascii_lowercase().as_str()) {\n\n level\n\n } else {\n\n \"info\"\n\n };\n\n self._write_log(target.to_string(), log_level.to_string(), text.to_string())\n\n }\n\n}\n\n\n\n// Begin implementation of automatic log macro interception\n\n\n\n#[cfg(feature = \"guest\")]\n\nuse lazy_static::lazy_static;\n\n#[cfg(feature = \"guest\")]\n\nuse log::{Metadata, Record};\n\n#[cfg(feature = \"guest\")]\n\nuse std::sync::{Arc, RwLock};\n\n#[cfg(feature = \"guest\")]\n", "file_path": "logging/rust/src/lib.rs", "rank": 90, "score": 41864.694319663045 }, { "content": "use wapc_guest::HandlerResult;\n\n\n\n#[cfg(feature = \"guest\")]\n\nlazy_static! {\n\n static ref CURRENT_BINDING: Arc<RwLock<String>> = Arc::new(RwLock::new(\"default\".to_string()));\n\n}\n\n\n\n#[cfg(feature = \"guest\")]\n\nstatic LOGGER: Host = Host {};\n\n\n\n/// Initializes the logger to use standard log macros\n\n///\n\n/// This function must be called before attempting to use log macros\n\n/// such as `info!` or `debug!` or the logs will not be written by the logger\n\n#[cfg(feature = \"guest\")]\n", "file_path": "logging/rust/src/lib.rs", "rank": 91, "score": 41863.2466104013 }, { "content": " \"WriteLog\",\n\n &serialize(input_args)?,\n\n )\n\n .map(|_vec| ())\n\n .map_err(|e| e.into())\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq, Deserialize, Serialize, Default, Clone)]\n\npub struct WriteLogArgs {\n\n #[serde(rename = \"target\")]\n\n pub target: String,\n\n #[serde(rename = \"level\")]\n\n pub level: String,\n\n #[serde(rename = \"text\")]\n\n pub text: String,\n\n}\n\n\n", "file_path": "logging/rust/src/generated.rs", "rank": 92, "score": 41861.63942704283 }, { "content": "#![doc(html_logo_url = \"https://avatars2.githubusercontent.com/u/52050279?s=200&v=4\")]\n\n//! # wasmCloud Logging Actor Interface\n\n//!\n\n//! This crate provides an abstraction over the `wasmcloud:logging` contract. This\n\n//! allows actors to use normal log macros (like `info!`, `warn!`, `error!`, etc)\n\n//! to write logs from within the actor.\n\n//!\n\n//! Example:\n\n//! ```rust\n\n//! extern crate wasmcloud_actor_http_server as http;\n\n//! extern crate wasmcloud_actor_logging as logging;\n\n//! use wapc_guest::HandlerResult;\n\n//! use http::{Request, Response, Handlers};\n\n//! use log::{info, warn, error, trace, debug};\n\n//!\n\n//! #[no_mangle]\n\n//! pub fn wapc_init() {\n\n//! http::Handlers::register_handle_request(method_logger);\n\n//! /// Initialize the logger to enable log macros\n\n//! logging::enable_macros();\n", "file_path": "logging/rust/src/lib.rs", "rank": 93, "score": 41861.10261904085 }, { "content": "//! }\n\n//!\n\n//! /// Actor must be signed with `wasmcloud:logging` to log messages\n\n//! fn method_logger(msg: http::Request) -> HandlerResult<http::Response> {\n\n//! /// Logs can be directly written via `write_log`\n\n//! logging::default().write_log(\"\", \"trace\", \"Coercing Rust String to str\");\n\n//! \n\n//! /// After initialization, logs can be directly written from the actor using macros\n\n//! match &*msg.method {\n\n//! \"GET\" => info!(\"Received a GET request\"),\n\n//! \"POST\" => info!(\"Received a POST request\"),\n\n//! \"PUT\" => info!(\"Received a PUT request\"),\n\n//! \"DELETE\" => warn!(\"Received a DELETE request\"),\n\n//! req => error!(\"Received an unsupported HTTP Request: {}\", req),\n\n//! };\n\n//! debug!(\"Finished matching HTTP method, returning OK\");\n\n//! Ok(http::Response::ok())\n\n//! }\n\n//! ```\n\n\n", "file_path": "logging/rust/src/lib.rs", "rank": 94, "score": 41854.374307042424 }, { "content": "\tbinding string\n", "file_path": "blobstore/go/blobstore.go", "rank": 95, "score": 36015.87511070168 }, { "content": "func NewHost(binding string) *Host {\n\n\treturn &Host{\n\n\t\tbinding: binding,\n\n\t}\n", "file_path": "blobstore/go/blobstore.go", "rank": 96, "score": 34705.30298603224 }, { "content": "func (h *Host) RemoveObject(id string, container_id string) (BlobstoreResult, error) {\n\n\tinputArgs := RemoveObjectArgs{\n\n\t\tID: id,\n\n\t\tContainer_id: container_id,\n\n\t}\n\n\tinputBytes, err := msgpack.ToBytes(&inputArgs)\n\n\tif err != nil {\n\n\t\treturn BlobstoreResult{}, err\n\n\t}\n\n\tpayload, err := wapc.HostCall(\n\n\t\th.binding,\n\n\t\t\"wasmcloud:blobstore\",\n\n\t\t\"RemoveObject\",\n\n\t\tinputBytes,\n\n\t)\n\n\tif err != nil {\n\n\t\treturn BlobstoreResult{}, err\n\n\t}\n\n\tdecoder := msgpack.NewDecoder(payload)\n\n\treturn DecodeBlobstoreResult(&decoder)\n", "file_path": "blobstore/go/blobstore.go", "rank": 97, "score": 33771.7628615118 }, { "content": "func (h *Host) CreateContainer(id string) (Container, error) {\n\n\tinputArgs := CreateContainerArgs{\n\n\t\tID: id,\n\n\t}\n\n\tinputBytes, err := msgpack.ToBytes(&inputArgs)\n\n\tif err != nil {\n\n\t\treturn Container{}, err\n\n\t}\n\n\tpayload, err := wapc.HostCall(\n\n\t\th.binding,\n\n\t\t\"wasmcloud:blobstore\",\n\n\t\t\"CreateContainer\",\n\n\t\tinputBytes,\n\n\t)\n\n\tif err != nil {\n\n\t\treturn Container{}, err\n\n\t}\n\n\tdecoder := msgpack.NewDecoder(payload)\n\n\treturn DecodeContainer(&decoder)\n", "file_path": "blobstore/go/blobstore.go", "rank": 98, "score": 33771.7628615118 }, { "content": "func (h *Host) RemoveContainer(id string) (BlobstoreResult, error) {\n\n\tinputArgs := RemoveContainerArgs{\n\n\t\tID: id,\n\n\t}\n\n\tinputBytes, err := msgpack.ToBytes(&inputArgs)\n\n\tif err != nil {\n\n\t\treturn BlobstoreResult{}, err\n\n\t}\n\n\tpayload, err := wapc.HostCall(\n\n\t\th.binding,\n\n\t\t\"wasmcloud:blobstore\",\n\n\t\t\"RemoveContainer\",\n\n\t\tinputBytes,\n\n\t)\n\n\tif err != nil {\n\n\t\treturn BlobstoreResult{}, err\n\n\t}\n\n\tdecoder := msgpack.NewDecoder(payload)\n\n\treturn DecodeBlobstoreResult(&decoder)\n", "file_path": "blobstore/go/blobstore.go", "rank": 99, "score": 33771.7628615118 } ]